Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Fix for overly harsh checks when checking Merkle trees. #289

Merged
merged 4 commits into from
Aug 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 2 additions & 28 deletions sdk/src/assertions/bmff_hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -609,33 +609,13 @@ impl BmffHash {
}
};

let sample_cnt = mp4.sample_count(mm.local_id).map_err(|_e| {
Error::InvalidAsset("Could not parse BMFF track sample".to_string())
})?;

let sample_cnt = track.sample_count();
if sample_cnt == 0 {
return Err(Error::InvalidAsset("No samples".to_string()));
}

let track_id = track.track_id();

// get the chunk count
let stbl_box = &track.trak.mdia.minf.stbl;
let chunk_cnt = match &stbl_box.stco {
Some(stco) => stco.entries.len(),
None => match &stbl_box.co64 {
Some(co64) => co64.entries.len(),
None => 0,
},
};

// the Merkle count is the number of chunks for timed media
if mm.count != chunk_cnt as u32 {
return Err(Error::HashMismatch(
"Track count does not match Merkle map count".to_string(),
));
}

// create sample to chunk mapping
// create the Merkle tree per samples in a chunk
let mut chunk_hash_map: HashMap<u32, Hasher> = HashMap::new();
Expand Down Expand Up @@ -684,19 +664,13 @@ impl BmffHash {
}
}

if chunk_cnt != chunk_hash_map.len() {
return Err(Error::HashMismatch(
"Incorrect number of Merkle trees".to_string(),
));
}

// finalize leaf hashes
let mut leaf_hashes = Vec::new();
for chunk_bmff_mm in &track_to_bmff_merkle_map[&track_id] {
match chunk_hash_map.remove(&(chunk_bmff_mm.location + 1)) {
Some(h) => {
let h = Hasher::finalize(h);
leaf_hashes.push(h.clone());
leaf_hashes.push(h);
}
None => {
return Err(Error::HashMismatch(
Expand Down
95 changes: 53 additions & 42 deletions sdk/src/jumbf/boxes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ use std::{
io::{Read, Result as IoResult, Seek, SeekFrom, Write},
};

use byteorder::{BigEndian, ReadBytesExt};
use hex::FromHex;
use log::debug;
use thiserror::Error;
Expand Down Expand Up @@ -1910,10 +1911,10 @@ impl BoxReader {
reader.read_exact(&mut togs)?;
bytes_left -= 1;

let mut sbuf = Vec::with_capacity(64);
if togs[0] & 0x03 == 0x03 {
// must be requestable and labeled
// read label
let mut sbuf = Vec::with_capacity(64);
loop {
let mut buf = [0; 1];
reader.read_exact(&mut buf)?;
Expand All @@ -1924,54 +1925,64 @@ impl BoxReader {
sbuf.push(buf[0]);
}
}
} else {
return Err(JumbfParseError::InvalidDescriptionBox);
}

// if there is a signature, we need to read it...
let sig = if togs[0] & 0x08 == 0x08 {
let mut sigbuf: [u8; 32] = [0; 32];
reader.read_exact(&mut sigbuf)?;
bytes_left -= 32;
Some(sigbuf)
} else {
None
};

// read private box if necessary
let private = if togs[0] & 0x10 == 0x10 {
let header = BoxReader::read_header(reader)
.map_err(|_| JumbfParseError::InvalidBoxHeader)?;
if header.size == 0 {
// bad read,
return Err(JumbfParseError::InvalidBoxHeader);
} else if header.size != bytes_left - HEADER_SIZE {
// this means that we started w/o the header...
unread_bytes(reader, HEADER_SIZE)?;
}
// box id
let bxid = if togs[0] & 0x04 == 0x04 {
let idbuf = reader.read_u32::<BigEndian>()?;
bytes_left -= 4;
Some(idbuf)
} else {
None
};

if header.name == BoxType::SaltHash {
let data_len = header.size - HEADER_SIZE;
let mut buf = vec![0u8; data_len as usize];
reader.read_exact(&mut buf)?;
// if there is a signature, we need to read it...
let sig = if togs[0] & 0x08 == 0x08 {
let mut sigbuf: [u8; 32] = [0; 32];
reader.read_exact(&mut sigbuf)?;
bytes_left -= 32;
Some(sigbuf)
} else {
None
};

bytes_left -= header.size;
// read private box if necessary
let private = if togs[0] & 0x10 == 0x10 {
let header =
BoxReader::read_header(reader).map_err(|_| JumbfParseError::InvalidBoxHeader)?;
if header.size == 0 {
// bad read,
return Err(JumbfParseError::InvalidBoxHeader);
} else if header.size != bytes_left - HEADER_SIZE {
// this means that we started w/o the header...
unread_bytes(reader, HEADER_SIZE)?;
}

Some(CAISaltContentBox::new(buf))
} else {
return Err(JumbfParseError::InvalidBoxHeader);
}
} else {
None
};
if header.name == BoxType::SaltHash {
let data_len = header.size - HEADER_SIZE;
let mut buf = vec![0u8; data_len as usize];
reader.read_exact(&mut buf)?;

if bytes_left != HEADER_SIZE {
// make sure we have consumed the entire box
bytes_left -= header.size;

Some(CAISaltContentBox::new(buf))
} else {
return Err(JumbfParseError::InvalidBoxHeader);
}
} else {
None
};

return Ok(JUMBFDescriptionBox::from(
&uuid, togs[0], sbuf, None, sig, private,
));
if bytes_left != HEADER_SIZE {
// make sure we have consumed the entire box
return Err(JumbfParseError::InvalidBoxHeader);
}
Err(JumbfParseError::InvalidDescriptionBox)

Ok(JUMBFDescriptionBox::from(
&uuid, togs[0], sbuf, bxid, sig, private,
))
}

pub fn read_json_box<R: Read + Seek>(
Expand Down Expand Up @@ -2192,6 +2203,7 @@ impl BoxReader {
// load the description box & create a new superbox from it
let jdesc = BoxReader::read_desc_box(reader, jumd_header.size)
.map_err(|_| JumbfParseError::UnexpectedEof)?;

if jdesc.label().is_empty() {
return Err(JumbfParseError::UnexpectedEof);
}
Expand All @@ -2213,8 +2225,7 @@ impl BoxReader {
unread_bytes(reader, HEADER_SIZE)?; // seek back to the beginning of the box
let next_box: Box<dyn BMFFBox> = match box_header.name {
BoxType::Jumb => Box::new(
BoxReader::read_super_box(reader)
.map_err(|_| JumbfParseError::InvalidJumbBox)?,
BoxReader::read_super_box(reader)?, //.map_err(|_| JumbfParseError::InvalidJumbBox)?,
),
BoxType::Json => Box::new(
BoxReader::read_json_box(reader, box_header.size)
Expand Down