From 6b1fd236c81f060b301372a86d1ded6712077b0b Mon Sep 17 00:00:00 2001 From: Miguel <64885598+MiguelDD1@users.noreply.github.com> Date: Wed, 1 Mar 2023 12:02:37 +0100 Subject: [PATCH] Replace logs by metrics on Kate (#24) * Replace logs by metrics on Kate * Bump `kate` version & clippy warnings --- Cargo.lock | 2 +- kate/Cargo.toml | 2 +- kate/recovery/src/com.rs | 4 +- kate/recovery/src/matrix.rs | 2 +- kate/src/com.rs | 99 ++++++++++++++++++------------------- kate/src/lib.rs | 2 + kate/src/metrics.rs | 24 +++++++++ 7 files changed, 79 insertions(+), 56 deletions(-) create mode 100644 kate/src/metrics.rs diff --git a/Cargo.lock b/Cargo.lock index 24a57645..7b7bdec8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1774,7 +1774,7 @@ dependencies = [ [[package]] name = "kate" -version = "0.6.1" +version = "0.7.0" dependencies = [ "criterion", "da-primitives", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 619fa96c..6bd4c523 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "kate" -version = "0.6.1" +version = "0.7.0" authors = ["Denis Ermolin "] edition = "2021" diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index b661b298..1a42be9d 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -270,7 +270,7 @@ pub fn decode_app_extrinsics( .filter(|cell| !cell.data.is_empty()) { None => app_data.extend(vec![0; config::CHUNK_SIZE]), - Some(cell) => app_data.extend(&cell.data), + Some(cell) => app_data.extend(cell.data), } } let ranges = index.app_data_ranges(app_id); @@ -398,7 +398,7 @@ fn zero_poly_fn( let mut zero_poly: Vec = Vec::with_capacity(length as usize); let mut sub: BlsScalar; for i in 0..missing_indices.len() { - let v = missing_indices[i as usize]; + let v = missing_indices[i]; sub = BlsScalar::zero() - expanded_r_o_u[(v * domain_stride) as usize]; zero_poly.push(sub); if i > 0 { diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index 807a0e43..5c81697a 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -177,7 +177,7 @@ impl Dimensions { /// Checks if extended matrix contains given position. pub fn extended_contains(&self, position: &Position) -> bool { - (position.row as u32) < self.extended_rows() && position.col < self.cols + position.row < self.extended_rows() && position.col < self.cols } /// Creates iterator over rows in extended matrix. diff --git a/kate/src/com.rs b/kate/src/com.rs index a7d5a00d..22e8a02a 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -20,7 +20,6 @@ use dusk_plonk::{ use frame_support::{ensure, sp_runtime::SaturatedConversion}; #[cfg(feature = "std")] use kate_recovery::{com::app_specific_rows, index, matrix}; -use log::info; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; use rayon::prelude::*; @@ -32,6 +31,7 @@ use crate::{ DATA_CHUNK_SIZE, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, PROOF_SIZE, PROVER_KEY_SIZE, SCALAR_SIZE, }, + metrics::Metrics, padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, LOG_TARGET, }; #[cfg(feature = "std")] @@ -270,9 +270,10 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result { /// and that original data will be interleaved with erasure codes, /// instead of being in first k chunks of a column. #[cfg(feature = "std")] -pub fn par_extend_data_matrix( +pub fn par_extend_data_matrix( block_dims: BlockDimensions, block: &[u8], + metrics: &M, ) -> Result, Error> { let start = Instant::now(); let dimensions: matrix::Dimensions = block_dims.try_into().map_err(|_| Error::BlockTooBig)?; @@ -308,22 +309,19 @@ pub fn par_extend_data_matrix( extended_column_eval_domain.fft_slice(col); }); - info!( - target: LOG_TARGET, - "Time to extend block {:?}", - start.elapsed() - ); + metrics.extended_block_time(start.elapsed()); Ok(chunk_elements) } //TODO cache extended data matrix //TODO explore faster Variable Base Multi Scalar Multiplication -pub fn build_proof( +pub fn build_proof( public_params: &kzg10::PublicParameters, block_dims: BlockDimensions, ext_data_matrix: &[BlsScalar], cells: &[Cell], + metrics: &M, ) -> Result, Error> { let cols_num = block_dims.cols.as_usize(); let extended_rows_num = block_dims @@ -350,11 +348,6 @@ pub fn build_proof( let prover_key = &prover_key; let row_dom_x_pts = &row_dom_x_pts; - info!( - target: LOG_TARGET, - "Number of CPU cores: {:#?}", - num_cpus::get() - ); // generate proof only for requested cells let total_start = Instant::now(); @@ -393,23 +386,19 @@ pub fn build_proof( } }); - info!( - target: LOG_TARGET, - "Time to build proofs of {} cells: {:?}", - cells.len(), - total_start.elapsed() - ); + metrics.proof_build_time(total_start.elapsed(), cells.len().saturated_into()); Ok(result_bytes) } #[cfg(feature = "std")] -pub fn par_build_commitments( +pub fn par_build_commitments( rows: BlockLengthRows, cols: BlockLengthColumns, chunk_size: u32, extrinsics_by_key: &[AppExtrinsic], rng_seed: Seed, + metrics: &M, ) -> Result<(XtsLayout, Vec, BlockDimensions, Vec), Error> { let start = Instant::now(); @@ -417,22 +406,16 @@ pub fn par_build_commitments( let (tx_layout, block, block_dims) = flatten_and_pad_block(rows, cols, chunk_size, extrinsics_by_key, rng_seed)?; - info!( - target: LOG_TARGET, - "Rows: {} Cols: {} Size: {}", - block_dims.rows, - block_dims.cols, - block.len(), - ); + metrics.block_dims_and_size(&block_dims, block.len().saturated_into()); - let ext_data_matrix = par_extend_data_matrix(block_dims, &block)?; + let ext_data_matrix = par_extend_data_matrix(block_dims, &block, metrics)?; let extended_rows_num = block_dims .rows .0 .checked_mul(EXTENSION_FACTOR) .ok_or(Error::BlockTooBig)?; - info!(target: LOG_TARGET, "Time to prepare {:?}", start.elapsed()); + metrics.preparation_block_time(start.elapsed()); let public_params = testnet::public_params(block_dims.cols.as_usize()); @@ -462,12 +445,6 @@ pub fn par_build_commitments( result_bytes.set_len(result_bytes_len); } - info!( - target: "system", - "Number of CPU cores: {:#?}", - num_cpus::get() - ); - let start = Instant::now(); (0..extended_rows_num) @@ -484,11 +461,7 @@ pub fn par_build_commitments( .map(|(row, res)| commit(&prover_key, row_eval_domain, row, res)) .collect::>()?; - info!( - target: "system", - "Time to build a commitment {:?}", - start.elapsed() - ); + metrics.commitment_build_time(start.elapsed()); Ok((tx_layout, result_bytes, block_dims, ext_data_matrix)) } @@ -552,6 +525,7 @@ mod tests { use crate::{ com::{get_block_dimensions, pad_iec_9797_1, par_extend_data_matrix, BlockDimensions}, config::DATA_CHUNK_SIZE, + metrics::IgnoreMetrics, padded_len, }; @@ -634,7 +608,7 @@ mod tests { .chunks_exact(DATA_CHUNK_SIZE) .flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), block_dims.chunk_size)) .collect::>(); - let res = par_extend_data_matrix(block_dims, &block); + let res = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {}); eprintln!("result={:?}", res); eprintln!("expect={:?}", expected_result); assert_eq!(res.unwrap(), expected_result); @@ -793,8 +767,9 @@ mod tests { #![proptest_config(ProptestConfig::with_cases(20))] #[test] fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { + let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( - BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); + BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &metrics).unwrap(); let columns = sample_cells_from_matrix(&matrix, &dims, None); let extended_dims = dims.try_into().unwrap(); @@ -809,7 +784,7 @@ mod tests { for cell in random_cells(dims.cols, dims.rows, 1) { let row = cell.row.as_usize(); - let proof = build_proof(&public_params, dims, &matrix, &[cell]).unwrap(); + let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap(); prop_assert!(proof.len() == 80); let col: u16 = cell.col.0.try_into().expect("`random_cells` function generates a valid `u16` for columns"); @@ -829,7 +804,7 @@ mod tests { #![proptest_config(ProptestConfig::with_cases(20))] #[test] fn test_commitments_verify(ref xts in app_extrinsics_strategy()) { - let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); + let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); let public_params = testnet::public_params(dims.cols.as_usize()); @@ -847,7 +822,7 @@ mod tests { #![proptest_config(ProptestConfig::with_cases(20))] #[test] fn verify_commitmnets_missing_row(ref xts in app_extrinsics_strategy()) { - let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); + let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); let public_params = testnet::public_params(dims.cols.as_usize()); @@ -881,6 +856,7 @@ mod tests { chunk_size, &[AppExtrinsic::from(original_data.to_vec())], hash, + &IgnoreMetrics {}, ) .unwrap(); @@ -929,7 +905,8 @@ get erasure coded to ensure redundancy."#; hash, ) .unwrap(); - let coded: Vec = par_extend_data_matrix(dims, &data[..]).unwrap(); + let coded: Vec = + par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); let cols_1 = sample_cells_from_matrix(&coded, &dims, Some(&[0, 1, 2, 3])); @@ -973,7 +950,7 @@ get erasure coded to ensure redundancy."#; hash, ) .unwrap(); - let coded = par_extend_data_matrix(dims, &data[..]).unwrap(); + let coded = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); let dimensions: Dimensions = dims.try_into().unwrap(); let extended_matrix = coded @@ -1018,7 +995,8 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat ) .unwrap(); - let coded: Vec = par_extend_data_matrix(dims, &data[..]).unwrap(); + let coded: Vec = + par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); let cols = sample_cells_from_matrix(&coded, &dims, None); @@ -1058,7 +1036,8 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat ) .unwrap(); - let coded: Vec = par_extend_data_matrix(dims, &data[..]).unwrap(); + let coded: Vec = + par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); let cols = sample_cells_from_matrix(&coded, &dims, None); let extended_dims = dims.try_into().unwrap(); @@ -1143,7 +1122,15 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat data: data.clone(), }) .collect::>(); - par_build_commitments(BlockLengthRows(4), BlockLengthColumns(4), 32, &xts, hash).unwrap(); + par_build_commitments( + BlockLengthRows(4), + BlockLengthColumns(4), + 32, + &xts, + hash, + &IgnoreMetrics {}, + ) + .unwrap(); } #[test] @@ -1155,7 +1142,15 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat app_id: AppId(0), data: vec![0; 31 * 8], }]; - par_build_commitments(BlockLengthRows(4), BlockLengthColumns(4), 32, &xts, hash).unwrap(); + par_build_commitments( + BlockLengthRows(4), + BlockLengthColumns(4), + 32, + &xts, + hash, + &IgnoreMetrics {}, + ) + .unwrap(); } #[test_case( ([1,1,1,1]).to_vec(); "All values are non-zero but same")] #[test_case( ([0,0,0,0]).to_vec(); "All values are zero")] @@ -1187,6 +1182,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let ext_m = row.into_iter().flat_map(|e| vec![e, e]).collect::>(); let rows: u16 = len.try_into().expect("rows length should be valid `u16`"); + let metrics = IgnoreMetrics {}; for col in 0..rows { // Randomly chosen cell to prove, probably should test all of them @@ -1203,6 +1199,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat }, &ext_m, &[cell], + &metrics, ) .unwrap(); println!("Proof: {proof:?}"); diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 4518712b..16370ce6 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -62,6 +62,8 @@ pub mod testnet { } } +pub mod metrics; + #[cfg(feature = "std")] pub mod com; /// Precalculate the length of padding IEC 9797 1. diff --git a/kate/src/metrics.rs b/kate/src/metrics.rs new file mode 100644 index 00000000..b823e2be --- /dev/null +++ b/kate/src/metrics.rs @@ -0,0 +1,24 @@ +use crate::BlockDimensions; +use sp_std::time::Duration; + +/// Trait for measurements during the header built process. +pub trait Metrics { + fn extended_block_time(&self, elapsed: Duration); + fn preparation_block_time(&self, elapsed: Duration); + fn commitment_build_time(&self, elapsed: Duration); + fn proof_build_time(&self, elapsed: Duration, cells: u32); + fn block_dims_and_size(&self, block_dims: &BlockDimensions, block_len: u32); +} + +/// Adapter to ignore any measurements. +/// It should be used for testing environments. +#[derive(Clone, Copy, Debug)] +pub struct IgnoreMetrics {} + +impl Metrics for IgnoreMetrics { + fn extended_block_time(&self, _: Duration) {} + fn preparation_block_time(&self, _: Duration) {} + fn commitment_build_time(&self, _: Duration) {} + fn proof_build_time(&self, _: Duration, _: u32) {} + fn block_dims_and_size(&self, _: &BlockDimensions, _: u32) {} +}