Skip to content

Commit

Permalink
Reverted previous broken fix from commit 2f4172c and tried to impleme…
Browse files Browse the repository at this point in the history
…nt a proper fix. Also added some additional regression tests.
  • Loading branch information
cry-inc committed Aug 17, 2024
1 parent b861bee commit 99b5d44
Show file tree
Hide file tree
Showing 3 changed files with 131 additions and 29 deletions.
40 changes: 35 additions & 5 deletions src/bs_write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,25 @@ impl ByteStreamWriteBuffer {
}
}

pub fn get_full_bytes(&mut self) -> Vec<u8> {
let to_take = self.full_bytes();
self.buffer.drain(..to_take).collect()
}

pub fn get_all_bytes(&mut self) -> Vec<u8> {
self.last_byte_bit = 0;
self.buffer.drain(..).collect()
}

pub fn full_bytes(&self) -> usize {
let len = self.buffer.len();
if self.last_byte_bit != 0 {
len - 1
} else {
len
}
}

pub fn all_bytes(&self) -> usize {
self.buffer.len()
}
Expand All @@ -64,8 +78,12 @@ mod tests {
#[test]
fn empty() {
let mut buffer = ByteStreamWriteBuffer::new();
assert_eq!(buffer.full_bytes(), 0);
assert_eq!(buffer.all_bytes(), 0);

let full = buffer.get_full_bytes();
assert_eq!(full.len(), 0);

let all = buffer.get_all_bytes();
assert_eq!(all.len(), 0);
}
Expand All @@ -74,12 +92,14 @@ mod tests {
fn add_bytes() {
let mut buffer = ByteStreamWriteBuffer::new();
buffer.add_bytes(&[1, 2, 3, 4]);
assert_eq!(buffer.full_bytes(), 4);
assert_eq!(buffer.all_bytes(), 4);

let all = buffer.get_all_bytes();
assert_eq!(all.len(), 4);
assert_eq!(all, [1, 2, 3, 4]);
let full = buffer.get_full_bytes();
assert_eq!(full.len(), 4);
assert_eq!(full, [1, 2, 3, 4]);

assert_eq!(buffer.full_bytes(), 0);
assert_eq!(buffer.all_bytes(), 0);
}

Expand All @@ -89,12 +109,20 @@ mod tests {
buffer.add_bytes(&[1, 2, 3, 4]);
buffer.add_bits(&[0b00001111], 4);

assert_eq!(buffer.full_bytes(), 4);
assert_eq!(buffer.all_bytes(), 5);

let full = buffer.get_full_bytes();
assert_eq!(full.len(), 4);
assert_eq!(full, [1, 2, 3, 4]);
assert_eq!(buffer.full_bytes(), 0);
assert_eq!(buffer.all_bytes(), 1);

let all = buffer.get_all_bytes();
assert_eq!(all.len(), 5);
assert_eq!(all, [1, 2, 3, 4, 0b00001111]);
assert_eq!(all.len(), 1);
assert_eq!(all, [0b00001111]);

assert_eq!(buffer.full_bytes(), 0);
assert_eq!(buffer.all_bytes(), 0);
}

Expand All @@ -104,6 +132,7 @@ mod tests {
buffer.add_bits(&[0b00001111], 4);
buffer.add_bits(&[0b00001111], 4);

assert_eq!(buffer.full_bytes(), 1);
assert_eq!(buffer.all_bytes(), 1);

let all = buffer.get_all_bytes();
Expand All @@ -117,6 +146,7 @@ mod tests {
buffer.add_bytes(&[0b10000001]);
buffer.add_bits(&[0b100001], 6);

assert_eq!(buffer.full_bytes(), 2);
assert_eq!(buffer.all_bytes(), 3);

let all = buffer.get_all_bytes();
Expand Down
63 changes: 43 additions & 20 deletions src/pc_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ pub struct PointCloudWriter<'a, T: Read + Write + Seek> {
point_count: u64,
buffer: VecDeque<RawValues>,
max_points_per_packet: usize,
byte_streams: Vec<ByteStreamWriteBuffer>,
cartesian_bounds: Option<CartesianBounds>,
spherical_bounds: Option<SphericalBounds>,
index_bounds: Option<IndexBounds>,
Expand Down Expand Up @@ -67,6 +68,9 @@ impl<'a, T: Read + Write + Seek> PointCloudWriter<'a, T> {
// Calculate max number of points per packet
let max_points_per_packet = get_max_packet_points(&prototype);

// Prepare byte stream buffers
let byte_streams = vec![ByteStreamWriteBuffer::new(); prototype.len()];

let mut section_header = CompressedVectorSectionHeader::default();
let section_offset = writer.physical_position()?;
section_header.data_offset = section_offset + CompressedVectorSectionHeader::SIZE;
Expand Down Expand Up @@ -135,6 +139,7 @@ impl<'a, T: Read + Write + Seek> PointCloudWriter<'a, T> {
prototype,
point_count: 0,
buffer: VecDeque::new(),
byte_streams,
max_points_per_packet,
cartesian_bounds,
spherical_bounds,
Expand Down Expand Up @@ -299,14 +304,10 @@ impl<'a, T: Read + Write + Seek> PointCloudWriter<'a, T> {
Ok(())
}

fn write_buffer_to_disk(&mut self) -> Result<()> {
fn write_buffer_to_disk(&mut self, last_flush: bool) -> Result<()> {
// Add points from buffer into byte streams
let packet_points = self.max_points_per_packet.min(self.buffer.len());
if packet_points == 0 {
return Ok(());
}

let proto_len = self.prototype.len();
let mut buffers = vec![ByteStreamWriteBuffer::new(); proto_len];
for _ in 0..packet_points {
let p = self
.buffer
Expand All @@ -316,22 +317,37 @@ impl<'a, T: Read + Write + Seek> PointCloudWriter<'a, T> {
let raw_value = p
.get(i)
.invalid_err("Prototype is bigger than number of provided values")?;
prototype.data_type.write(raw_value, &mut buffers[i])?;
prototype
.data_type
.write(raw_value, &mut self.byte_streams[i])?;
}
}

// Check and prepare buffer sizes
let mut sum_buffer_sizes = 0;
let mut buffer_sizes = Vec::with_capacity(proto_len);
for buffer in &buffers {
let len = buffer.all_bytes();
sum_buffer_sizes += len;
buffer_sizes.push(len as u16);
let mut streams_empty = true;
let mut sum_bs_sizes = 0;
let mut bs_sizes = Vec::with_capacity(proto_len);
for bs in &self.byte_streams {
let bs_size = if last_flush {
bs.all_bytes()
} else {
bs.full_bytes()
};
if bs_size > 0 {
streams_empty = false;
}
sum_bs_sizes += bs_size;
bs_sizes.push(bs_size as u16);
}

// No data to write, lets stop here
if streams_empty {
return Ok(());
}

// Calculate packet length for header, must be aligned to four bytes.
// If the length exceeds 2^16 this library has somewhere a logic bug!
let mut packet_length = DataPacketHeader::SIZE + proto_len * 2 + sum_buffer_sizes;
let mut packet_length = DataPacketHeader::SIZE + proto_len * 2 + sum_bs_sizes;
if packet_length % 4 != 0 {
let missing = 4 - (packet_length % 4);
packet_length += missing;
Expand All @@ -352,16 +368,20 @@ impl<'a, T: Read + Write + Seek> PointCloudWriter<'a, T> {
.write(&mut self.writer)?;

// Write bytestream sizes as u16 values
for size in buffer_sizes {
for size in bs_sizes {
let bytes = size.to_le_bytes();
self.writer
.write_all(&bytes)
.write_err("Cannot write data packet buffer size")?;
}

// Write actual bytestream buffers with data
for buffer in &mut buffers {
let data = buffer.get_all_bytes();
for bs in &mut self.byte_streams {
let data = if last_flush {
bs.get_all_bytes()
} else {
bs.get_full_bytes()
};
self.writer
.write_all(&data)
.write_err("Cannot write bytestream buffer into data packet")?;
Expand Down Expand Up @@ -477,19 +497,22 @@ impl<'a, T: Read + Write + Seek> PointCloudWriter<'a, T> {

// Empty buffer and write points when its full
if self.buffer.len() >= self.max_points_per_packet {
self.write_buffer_to_disk()?;
self.write_buffer_to_disk(false)?;
}

Ok(())
}

/// Called after all points have been added to finalize the creation of the new point cloud.
pub fn finalize(&mut self) -> Result<()> {
// Flush remaining points from buffer
// Flush remaining points from buffer into byte streams and write them
while !self.buffer.is_empty() {
self.write_buffer_to_disk()?;
self.write_buffer_to_disk(false)?;
}

// Flush last partial bytes from byte streams
self.write_buffer_to_disk(true)?;

// We need to write the section header again with the final length
// which was previously unknown and is now available.
let end_offset = self
Expand Down
57 changes: 53 additions & 4 deletions tests/writer_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -947,8 +947,8 @@ fn custom_xml_test() {
}

#[test]
fn writer_bug_regression_test() {
let file = "writer_bug_regression.e57";
fn writer_bug_regression_partial_bytes() {
let file = "writer_bug_regression_partial_bytes.e57";
{
let mut writer = e57::E57Writer::from_file(file, "file_uuid").unwrap();
let proto = vec![
Expand Down Expand Up @@ -984,9 +984,58 @@ fn writer_bug_regression_test() {
let pcs = reader.pointclouds();
for pc in pcs {
let iter = reader.pointcloud_raw(&pc).unwrap();
for point in iter {
for (i, point) in iter.enumerate() {
if let Err(err) = point {
panic!("Error reading point at: {err}");
panic!("Error reading point {i} at: {err}");
}
}
}
}
std::fs::remove_file(file).unwrap();
}

#[test]
fn writer_bug_regression_invalid_integers() {
let file = "writer_bug_regression_invalid_integers.e57";
{
let mut writer = e57::E57Writer::from_file(file, "file_uuid").unwrap();
let proto = vec![
Record::CARTESIAN_X_F32,
Record::CARTESIAN_Y_F32,
Record::CARTESIAN_Z_F32,
Record {
name: RecordName::Intensity,
data_type: RecordDataType::ScaledInteger {
min: 0,
max: 2047,
scale: 1.0,
offset: 0.0,
},
},
];
let mut pc_writer = writer.add_pointcloud("pc_guid", proto).unwrap();
// must be more than the the max packet point count of the internal writer
for i in 0..5000 {
let point = vec![
RecordValue::Single(0.0),
RecordValue::Single(0.0),
RecordValue::Single(0.0),
RecordValue::ScaledInteger(i % 2048),
];
pc_writer.add_point(point).unwrap();
}
pc_writer.finalize().unwrap();
writer.finalize().unwrap();
}
{
let mut reader = e57::E57Reader::from_file(file).unwrap();
let pcs = reader.pointclouds();
for pc in pcs {
let iter = reader.pointcloud_raw(&pc).unwrap();
for (i, point) in iter.enumerate() {
match point {
Ok(p) => assert_eq!(p[3], RecordValue::ScaledInteger(i as i64 % 2048)),
Err(err) => panic!("Error reading point {i} at: {err}"),
}
}
}
Expand Down

0 comments on commit 99b5d44

Please sign in to comment.