Skip to content

Commit

Permalink
Replace logs by metrics on Kate (#24)
Browse files Browse the repository at this point in the history
* Replace logs by metrics on Kate
* Bump `kate` version & clippy warnings
  • Loading branch information
MiguelDD1 authored Mar 1, 2023
1 parent 7ba247c commit 6b1fd23
Show file tree
Hide file tree
Showing 7 changed files with 79 additions and 56 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion kate/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "kate"
version = "0.6.1"
version = "0.7.0"
authors = ["Denis Ermolin <denis.ermolin@matic.network>"]
edition = "2021"

Expand Down
4 changes: 2 additions & 2 deletions kate/recovery/src/com.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ pub fn decode_app_extrinsics(
.filter(|cell| !cell.data.is_empty())
{
None => app_data.extend(vec![0; config::CHUNK_SIZE]),
Some(cell) => app_data.extend(&cell.data),
Some(cell) => app_data.extend(cell.data),
}
}
let ranges = index.app_data_ranges(app_id);
Expand Down Expand Up @@ -398,7 +398,7 @@ fn zero_poly_fn(
let mut zero_poly: Vec<BlsScalar> = Vec::with_capacity(length as usize);
let mut sub: BlsScalar;
for i in 0..missing_indices.len() {
let v = missing_indices[i as usize];
let v = missing_indices[i];
sub = BlsScalar::zero() - expanded_r_o_u[(v * domain_stride) as usize];
zero_poly.push(sub);
if i > 0 {
Expand Down
2 changes: 1 addition & 1 deletion kate/recovery/src/matrix.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ impl Dimensions {

/// Checks if extended matrix contains given position.
pub fn extended_contains(&self, position: &Position) -> bool {
(position.row as u32) < self.extended_rows() && position.col < self.cols
position.row < self.extended_rows() && position.col < self.cols
}

/// Creates iterator over rows in extended matrix.
Expand Down
99 changes: 48 additions & 51 deletions kate/src/com.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ use dusk_plonk::{
use frame_support::{ensure, sp_runtime::SaturatedConversion};
#[cfg(feature = "std")]
use kate_recovery::{com::app_specific_rows, index, matrix};
use log::info;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaChaRng;
use rayon::prelude::*;
Expand All @@ -32,6 +31,7 @@ use crate::{
DATA_CHUNK_SIZE, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, PROOF_SIZE,
PROVER_KEY_SIZE, SCALAR_SIZE,
},
metrics::Metrics,
padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, LOG_TARGET,
};
#[cfg(feature = "std")]
Expand Down Expand Up @@ -270,9 +270,10 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result<BlsScalar, Error> {
/// and that original data will be interleaved with erasure codes,
/// instead of being in first k chunks of a column.
#[cfg(feature = "std")]
pub fn par_extend_data_matrix(
pub fn par_extend_data_matrix<M: Metrics>(
block_dims: BlockDimensions,
block: &[u8],
metrics: &M,
) -> Result<Vec<BlsScalar>, Error> {
let start = Instant::now();
let dimensions: matrix::Dimensions = block_dims.try_into().map_err(|_| Error::BlockTooBig)?;
Expand Down Expand Up @@ -308,22 +309,19 @@ pub fn par_extend_data_matrix(
extended_column_eval_domain.fft_slice(col);
});

info!(
target: LOG_TARGET,
"Time to extend block {:?}",
start.elapsed()
);
metrics.extended_block_time(start.elapsed());

Ok(chunk_elements)
}

//TODO cache extended data matrix
//TODO explore faster Variable Base Multi Scalar Multiplication
pub fn build_proof(
pub fn build_proof<M: Metrics>(
public_params: &kzg10::PublicParameters,
block_dims: BlockDimensions,
ext_data_matrix: &[BlsScalar],
cells: &[Cell],
metrics: &M,
) -> Result<Vec<u8>, Error> {
let cols_num = block_dims.cols.as_usize();
let extended_rows_num = block_dims
Expand All @@ -350,11 +348,6 @@ pub fn build_proof(
let prover_key = &prover_key;
let row_dom_x_pts = &row_dom_x_pts;

info!(
target: LOG_TARGET,
"Number of CPU cores: {:#?}",
num_cpus::get()
);
// generate proof only for requested cells
let total_start = Instant::now();

Expand Down Expand Up @@ -393,46 +386,36 @@ pub fn build_proof(
}
});

info!(
target: LOG_TARGET,
"Time to build proofs of {} cells: {:?}",
cells.len(),
total_start.elapsed()
);
metrics.proof_build_time(total_start.elapsed(), cells.len().saturated_into());

Ok(result_bytes)
}

#[cfg(feature = "std")]
pub fn par_build_commitments(
pub fn par_build_commitments<M: Metrics>(
rows: BlockLengthRows,
cols: BlockLengthColumns,
chunk_size: u32,
extrinsics_by_key: &[AppExtrinsic],
rng_seed: Seed,
metrics: &M,
) -> Result<(XtsLayout, Vec<u8>, BlockDimensions, Vec<BlsScalar>), Error> {
let start = Instant::now();

// generate data matrix first
let (tx_layout, block, block_dims) =
flatten_and_pad_block(rows, cols, chunk_size, extrinsics_by_key, rng_seed)?;

info!(
target: LOG_TARGET,
"Rows: {} Cols: {} Size: {}",
block_dims.rows,
block_dims.cols,
block.len(),
);
metrics.block_dims_and_size(&block_dims, block.len().saturated_into());

let ext_data_matrix = par_extend_data_matrix(block_dims, &block)?;
let ext_data_matrix = par_extend_data_matrix(block_dims, &block, metrics)?;
let extended_rows_num = block_dims
.rows
.0
.checked_mul(EXTENSION_FACTOR)
.ok_or(Error::BlockTooBig)?;

info!(target: LOG_TARGET, "Time to prepare {:?}", start.elapsed());
metrics.preparation_block_time(start.elapsed());

let public_params = testnet::public_params(block_dims.cols.as_usize());

Expand Down Expand Up @@ -462,12 +445,6 @@ pub fn par_build_commitments(
result_bytes.set_len(result_bytes_len);
}

info!(
target: "system",
"Number of CPU cores: {:#?}",
num_cpus::get()
);

let start = Instant::now();

(0..extended_rows_num)
Expand All @@ -484,11 +461,7 @@ pub fn par_build_commitments(
.map(|(row, res)| commit(&prover_key, row_eval_domain, row, res))
.collect::<Result<_, _>>()?;

info!(
target: "system",
"Time to build a commitment {:?}",
start.elapsed()
);
metrics.commitment_build_time(start.elapsed());

Ok((tx_layout, result_bytes, block_dims, ext_data_matrix))
}
Expand Down Expand Up @@ -552,6 +525,7 @@ mod tests {
use crate::{
com::{get_block_dimensions, pad_iec_9797_1, par_extend_data_matrix, BlockDimensions},
config::DATA_CHUNK_SIZE,
metrics::IgnoreMetrics,
padded_len,
};

Expand Down Expand Up @@ -634,7 +608,7 @@ mod tests {
.chunks_exact(DATA_CHUNK_SIZE)
.flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), block_dims.chunk_size))
.collect::<Vec<u8>>();
let res = par_extend_data_matrix(block_dims, &block);
let res = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {});
eprintln!("result={:?}", res);
eprintln!("expect={:?}", expected_result);
assert_eq!(res.unwrap(), expected_result);
Expand Down Expand Up @@ -793,8 +767,9 @@ mod tests {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) {
let metrics = IgnoreMetrics {};
let (layout, commitments, dims, matrix) = par_build_commitments(
BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap();
BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &metrics).unwrap();

let columns = sample_cells_from_matrix(&matrix, &dims, None);
let extended_dims = dims.try_into().unwrap();
Expand All @@ -809,7 +784,7 @@ mod tests {
for cell in random_cells(dims.cols, dims.rows, 1) {
let row = cell.row.as_usize();

let proof = build_proof(&public_params, dims, &matrix, &[cell]).unwrap();
let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap();
prop_assert!(proof.len() == 80);

let col: u16 = cell.col.0.try_into().expect("`random_cells` function generates a valid `u16` for columns");
Expand All @@ -829,7 +804,7 @@ mod tests {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_commitments_verify(ref xts in app_extrinsics_strategy()) {
let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap();
let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap();

let index = app_data_index_try_from_layout(layout).unwrap();
let public_params = testnet::public_params(dims.cols.as_usize());
Expand All @@ -847,7 +822,7 @@ mod tests {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn verify_commitmnets_missing_row(ref xts in app_extrinsics_strategy()) {
let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap();
let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap();

let index = app_data_index_try_from_layout(layout).unwrap();
let public_params = testnet::public_params(dims.cols.as_usize());
Expand Down Expand Up @@ -881,6 +856,7 @@ mod tests {
chunk_size,
&[AppExtrinsic::from(original_data.to_vec())],
hash,
&IgnoreMetrics {},
)
.unwrap();

Expand Down Expand Up @@ -929,7 +905,8 @@ get erasure coded to ensure redundancy."#;
hash,
)
.unwrap();
let coded: Vec<BlsScalar> = par_extend_data_matrix(dims, &data[..]).unwrap();
let coded: Vec<BlsScalar> =
par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap();

let cols_1 = sample_cells_from_matrix(&coded, &dims, Some(&[0, 1, 2, 3]));

Expand Down Expand Up @@ -973,7 +950,7 @@ get erasure coded to ensure redundancy."#;
hash,
)
.unwrap();
let coded = par_extend_data_matrix(dims, &data[..]).unwrap();
let coded = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap();

let dimensions: Dimensions = dims.try_into().unwrap();
let extended_matrix = coded
Expand Down Expand Up @@ -1018,7 +995,8 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat
)
.unwrap();

let coded: Vec<BlsScalar> = par_extend_data_matrix(dims, &data[..]).unwrap();
let coded: Vec<BlsScalar> =
par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap();

let cols = sample_cells_from_matrix(&coded, &dims, None);

Expand Down Expand Up @@ -1058,7 +1036,8 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat
)
.unwrap();

let coded: Vec<BlsScalar> = par_extend_data_matrix(dims, &data[..]).unwrap();
let coded: Vec<BlsScalar> =
par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap();

let cols = sample_cells_from_matrix(&coded, &dims, None);
let extended_dims = dims.try_into().unwrap();
Expand Down Expand Up @@ -1143,7 +1122,15 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat
data: data.clone(),
})
.collect::<Vec<_>>();
par_build_commitments(BlockLengthRows(4), BlockLengthColumns(4), 32, &xts, hash).unwrap();
par_build_commitments(
BlockLengthRows(4),
BlockLengthColumns(4),
32,
&xts,
hash,
&IgnoreMetrics {},
)
.unwrap();
}

#[test]
Expand All @@ -1155,7 +1142,15 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat
app_id: AppId(0),
data: vec![0; 31 * 8],
}];
par_build_commitments(BlockLengthRows(4), BlockLengthColumns(4), 32, &xts, hash).unwrap();
par_build_commitments(
BlockLengthRows(4),
BlockLengthColumns(4),
32,
&xts,
hash,
&IgnoreMetrics {},
)
.unwrap();
}
#[test_case( ([1,1,1,1]).to_vec(); "All values are non-zero but same")]
#[test_case( ([0,0,0,0]).to_vec(); "All values are zero")]
Expand Down Expand Up @@ -1187,6 +1182,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat
let ext_m = row.into_iter().flat_map(|e| vec![e, e]).collect::<Vec<_>>();

let rows: u16 = len.try_into().expect("rows length should be valid `u16`");
let metrics = IgnoreMetrics {};

for col in 0..rows {
// Randomly chosen cell to prove, probably should test all of them
Expand All @@ -1203,6 +1199,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat
},
&ext_m,
&[cell],
&metrics,
)
.unwrap();
println!("Proof: {proof:?}");
Expand Down
2 changes: 2 additions & 0 deletions kate/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,8 @@ pub mod testnet {
}
}

pub mod metrics;

#[cfg(feature = "std")]
pub mod com;
/// Precalculate the length of padding IEC 9797 1.
Expand Down
24 changes: 24 additions & 0 deletions kate/src/metrics.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
use crate::BlockDimensions;
use sp_std::time::Duration;

/// Trait for measurements during the header built process.
pub trait Metrics {
fn extended_block_time(&self, elapsed: Duration);
fn preparation_block_time(&self, elapsed: Duration);
fn commitment_build_time(&self, elapsed: Duration);
fn proof_build_time(&self, elapsed: Duration, cells: u32);
fn block_dims_and_size(&self, block_dims: &BlockDimensions, block_len: u32);
}

/// Adapter to ignore any measurements.
/// It should be used for testing environments.
#[derive(Clone, Copy, Debug)]
pub struct IgnoreMetrics {}

impl Metrics for IgnoreMetrics {
fn extended_block_time(&self, _: Duration) {}
fn preparation_block_time(&self, _: Duration) {}
fn commitment_build_time(&self, _: Duration) {}
fn proof_build_time(&self, _: Duration, _: u32) {}
fn block_dims_and_size(&self, _: &BlockDimensions, _: u32) {}
}

0 comments on commit 6b1fd23

Please # to comment.