Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions .github/workflows/benchmarks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
name: CI Checks - Benchmarks

on: [push, pull_request]

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
benchmark:
runs-on: ubuntu-latest
env:
TOOLCHAIN: stable
steps:
- name: Checkout source code
uses: actions/checkout@v3
- name: Install Rust toolchain
run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile=minimal --default-toolchain stable
rustup override set stable
- name: Enable caching for bitcoind
id: cache-bitcoind
uses: actions/cache@v4
with:
path: bin/bitcoind-${{ runner.os }}-${{ runner.arch }}
key: bitcoind-${{ runner.os }}-${{ runner.arch }}
- name: Enable caching for electrs
id: cache-electrs
uses: actions/cache@v4
with:
path: bin/electrs-${{ runner.os }}-${{ runner.arch }}
key: electrs-${{ runner.os }}-${{ runner.arch }}
- name: Download bitcoind/electrs
if: "(steps.cache-bitcoind.outputs.cache-hit != 'true' || steps.cache-electrs.outputs.cache-hit != 'true')"
run: |
source ./scripts/download_bitcoind_electrs.sh
mkdir bin
mv "$BITCOIND_EXE" bin/bitcoind-${{ runner.os }}-${{ runner.arch }}
mv "$ELECTRS_EXE" bin/electrs-${{ runner.os }}-${{ runner.arch }}
- name: Set bitcoind/electrs environment variables
run: |
echo "BITCOIND_EXE=$( pwd )/bin/bitcoind-${{ runner.os }}-${{ runner.arch }}" >> "$GITHUB_ENV"
echo "ELECTRS_EXE=$( pwd )/bin/electrs-${{ runner.os }}-${{ runner.arch }}" >> "$GITHUB_ENV"
- name: Run benchmarks
run: |
cargo bench
6 changes: 6 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ lightning = { version = "0.2.0-beta1", features = ["std", "_test_utils"] }
#lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] }
proptest = "1.0.0"
regex = "1.5.6"
criterion = { version = "0.7.0", features = ["async_tokio"] }

[target.'cfg(not(no_download))'.dev-dependencies]
electrsd = { version = "0.35.0", default-features = false, features = ["legacy", "esplora_a33e97e1", "corepc-node_27_2"] }
Expand Down Expand Up @@ -148,3 +149,8 @@ check-cfg = [
"cfg(cln_test)",
"cfg(lnd_test)",
]

[[bench]]
name = "payments"
path = "tests/benchmarks.rs"
harness = false
194 changes: 194 additions & 0 deletions tests/benchmarks.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,194 @@
mod common;

use std::time::Instant;
use std::{sync::Arc, time::Duration};

use bitcoin::hex::DisplayHex;
use bitcoin::Amount;
use common::{
expect_channel_ready_event, generate_blocks_and_wait, premine_and_distribute_funds,
setup_bitcoind_and_electrsd, setup_two_nodes_with_store, TestChainSource,
};
use criterion::{criterion_group, criterion_main, Criterion};
use ldk_node::{Event, Node};
use lightning_types::payment::{PaymentHash, PaymentPreimage};
use rand::RngCore;
use tokio::task::{self};

use crate::common::open_channel_push_amt;

fn spawn_payment(node_a: Arc<Node>, node_b: Arc<Node>, amount_msat: u64) {
let mut preimage_bytes = [0u8; 32];
rand::thread_rng().fill_bytes(&mut preimage_bytes);
let preimage = PaymentPreimage(preimage_bytes);
let payment_hash: PaymentHash = preimage.into();

// Spawn each payment as a separate async task
task::spawn(async move {
println!("{}: Starting payment", payment_hash.0.as_hex());
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are we positive all the printing IO doesn't impact the performance? Should we omit this for 'production' benchmarking?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I did wonder the same. Maybe I should indeed remove it to be sure. Would that be a cfg flag?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could be, but do we really need them in the first place?


loop {
// Pre-check the HTLC slots to try to avoid the performance impact of a failed payment.
while node_a.list_channels()[0].next_outbound_htlc_limit_msat == 0 {
println!("{}: Waiting for HTLC slots to free up", payment_hash.0.as_hex());
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How did you determine 100ms here? Do we have an intuition if this could starve any of the waiting payers?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It isn't great indeed. Basically something that looked 'reasonable' to me. I think it is a weakness in the API that a user even needs to do this.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay.

}

let payment_id = node_a.spontaneous_payment().send_with_preimage(
amount_msat,
node_b.node_id(),
preimage,
None,
);

match payment_id {
Ok(payment_id) => {
println!(
"{}: Awaiting payment with id {}",
payment_hash.0.as_hex(),
payment_id
);
break;
},
Err(e) => {
println!("{}: Payment attempt failed: {:?}", payment_hash.0.as_hex(), e);

tokio::time::sleep(std::time::Duration::from_millis(100)).await;
},
}
}
});
}

async fn send_payments(node_a: Arc<Node>, node_b: Arc<Node>) -> std::time::Duration {
let start = Instant::now();

let total_payments = 1000;
let amount_msat = 10_000_000;

let mut success_count = 0;
for _ in 0..total_payments {
spawn_payment(node_a.clone(), node_b.clone(), amount_msat);
}

while success_count < total_payments {
match node_a.next_event_async().await {
Event::PaymentSuccessful { payment_id, payment_hash, .. } => {
if let Some(id) = payment_id {
success_count += 1;
println!("{}: Payment with id {:?} completed", payment_hash.0.as_hex(), id);
} else {
println!("Payment completed (no payment_id)");
}
},
Event::PaymentFailed { payment_id, payment_hash, .. } => {
println!("{}: Payment {:?} failed", payment_hash.unwrap().0.as_hex(), payment_id);

// The payment failed, so we need to respawn it.
spawn_payment(node_a.clone(), node_b.clone(), amount_msat);
},
ref e => {
println!("Received non-payment event: {:?}", e);
},
}

node_a.event_handled().unwrap();
}

let duration = start.elapsed();
println!("Time elapsed: {:?}", duration);

// Send back the money for the next iteration.
let mut preimage_bytes = [0u8; 32];
rand::thread_rng().fill_bytes(&mut preimage_bytes);
node_b
.spontaneous_payment()
.send_with_preimage(
amount_msat * total_payments,
node_a.node_id(),
PaymentPreimage(preimage_bytes),
None,
)
.ok()
.unwrap();

duration
}

fn payment_benchmark(c: &mut Criterion) {
// Set up two nodes. Because this is slow, we reuse the same nodes for each sample.
let (bitcoind, electrsd) = setup_bitcoind_and_electrsd();
let chain_source = TestChainSource::Esplora(&electrsd);

let (node_a, node_b) = setup_two_nodes_with_store(
&chain_source,
false,
true,
false,
common::TestStoreType::Sqlite,
);

let runtime =
tokio::runtime::Builder::new_multi_thread().worker_threads(4).enable_all().build().unwrap();

let node_a = Arc::new(node_a);
let node_b = Arc::new(node_b);

// Fund the nodes and setup a channel between them. The criterion function cannot be async, so we need to execute
// the setup using a runtime.
let node_a_cloned = Arc::clone(&node_a);
let node_b_cloned = Arc::clone(&node_b);
runtime.block_on(async move {
let address_a = node_a_cloned.onchain_payment().new_address().unwrap();
let premine_sat = 25_000_000;
premine_and_distribute_funds(
&bitcoind.client,
&electrsd.client,
vec![address_a],
Amount::from_sat(premine_sat),
)
.await;
node_a_cloned.sync_wallets().unwrap();
node_b_cloned.sync_wallets().unwrap();
open_channel_push_amt(
&node_a_cloned,
&node_b_cloned,
16_000_000,
Some(1_000_000_000),
false,
&electrsd,
)
.await;
generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6).await;
node_a_cloned.sync_wallets().unwrap();
node_b_cloned.sync_wallets().unwrap();
expect_channel_ready_event!(node_a_cloned, node_b_cloned.node_id());
expect_channel_ready_event!(node_b_cloned, node_a_cloned.node_id());
});

let mut group = c.benchmark_group("payments");
group.sample_size(10);

group.bench_function("payments", |b| {
// Use custom timing so that sending back the money at the end of each iteration isn't included in the
// measurement.
b.to_async(&runtime).iter_custom(|iter| {
let node_a = Arc::clone(&node_a);
let node_b = Arc::clone(&node_b);

async move {
let mut total = Duration::ZERO;
for _i in 0..iter {
let node_a = Arc::clone(&node_a);
let node_b = Arc::clone(&node_b);

total += send_payments(node_a, node_b).await;
}
total
}
});
});
}

criterion_group!(benches, payment_benchmark);
criterion_main!(benches);
45 changes: 40 additions & 5 deletions tests/common/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,11 @@ use electrsd::corepc_node::{Client as BitcoindClient, Node as BitcoinD};
use electrsd::{corepc_node, ElectrsD};
use electrum_client::ElectrumApi;
use ldk_node::config::{AsyncPaymentsRole, Config, ElectrumSyncConfig, EsploraSyncConfig};
use ldk_node::io::sqlite_store::SqliteStore;
use ldk_node::io::sqlite_store::{SqliteStore, KV_TABLE_NAME, SQLITE_DB_FILE_NAME};
use ldk_node::payment::{PaymentDirection, PaymentKind, PaymentStatus};
use ldk_node::{
Builder, CustomTlvRecord, Event, LightningBalance, Node, NodeError, PendingSweepBalance,
Builder, CustomTlvRecord, DynStore, Event, LightningBalance, Node, NodeError,
PendingSweepBalance,
};
use lightning::io;
use lightning::ln::msgs::SocketAddress;
Expand Down Expand Up @@ -262,10 +263,23 @@ pub(crate) enum TestChainSource<'a> {
BitcoindRestSync(&'a BitcoinD),
}

#[derive(Clone, Copy)]
pub(crate) enum TestStoreType {
TestSyncStore,
Sqlite,
}

impl Default for TestStoreType {
fn default() -> Self {
TestStoreType::TestSyncStore
}
}

#[derive(Clone, Default)]
pub(crate) struct TestConfig {
pub node_config: Config,
pub log_writer: TestLogWriter,
pub store_type: TestStoreType,
}

macro_rules! setup_builder {
Expand All @@ -282,13 +296,28 @@ pub(crate) use setup_builder;
pub(crate) fn setup_two_nodes(
chain_source: &TestChainSource, allow_0conf: bool, anchor_channels: bool,
anchors_trusted_no_reserve: bool,
) -> (TestNode, TestNode) {
setup_two_nodes_with_store(
chain_source,
allow_0conf,
anchor_channels,
anchors_trusted_no_reserve,
TestStoreType::TestSyncStore,
)
}

pub(crate) fn setup_two_nodes_with_store(
chain_source: &TestChainSource, allow_0conf: bool, anchor_channels: bool,
anchors_trusted_no_reserve: bool, store_type: TestStoreType,
) -> (TestNode, TestNode) {
println!("== Node A ==");
let config_a = random_config(anchor_channels);
let mut config_a = random_config(anchor_channels);
config_a.store_type = store_type;
let node_a = setup_node(chain_source, config_a, None);

println!("\n== Node B ==");
let mut config_b = random_config(anchor_channels);
config_b.store_type = store_type;
if allow_0conf {
config_b.node_config.trusted_peers_0conf.push(node_a.node_id());
}
Expand Down Expand Up @@ -381,8 +410,14 @@ pub(crate) fn setup_node_for_async_payments(

builder.set_async_payments_role(async_payments_role).unwrap();

let test_sync_store = Arc::new(TestSyncStore::new(config.node_config.storage_dir_path.into()));
let node = builder.build_with_store(test_sync_store).unwrap();
let node = match config.store_type {
TestStoreType::TestSyncStore => {
let kv_store = Arc::new(TestSyncStore::new(config.node_config.storage_dir_path.into()));
builder.build_with_store(kv_store).unwrap()
},
TestStoreType::Sqlite => builder.build().unwrap(),
};

node.start().unwrap();
assert!(node.status().is_running);
assert!(node.status().latest_fee_rate_cache_update_timestamp.is_some());
Expand Down
Loading