) -> Result<(), Er
/// version: Version::new((4,1)),
/// title: String::from("Tetrahedron"),
/// byte_order: ByteOrder::BigEndian,
+/// file_path: Some(PathBuf::from("./test.vtk")),
/// data: DataSet::inline(UnstructuredGridPiece {
/// points: vec![0.0f32, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 1.0, 0.0].into(),
/// cells: Cells {
diff --git a/src/model.rs b/src/model.rs
index 92c76de..3051323 100644
--- a/src/model.rs
+++ b/src/model.rs
@@ -13,6 +13,7 @@ use std::any::TypeId;
use std::convert::TryFrom;
use std::fmt;
use std::ops::RangeInclusive;
+use std::path::{Path, PathBuf};
use bytemuck::{cast_slice, cast_vec};
use num_derive::FromPrimitive;
@@ -74,6 +75,95 @@ pub struct Vtk {
pub title: String,
pub byte_order: ByteOrder,
pub data: DataSet,
+ /// The path to the source file of this Vtk file (if any).
+ ///
+ /// This is used to load pieces stored in other files used in "Parallel" XML file types.
+ pub file_path: Option,
+}
+
+impl Vtk {
+ /// Loads all referenced pieces into the current struct.
+ ///
+ /// This function is useful for "Parallel" XML files like `.pvtu`, `.pvtp`, etc.
+ /// For all other files this is a no-op.
+ pub fn load_all_pieces(&mut self) -> Result<(), Error> {
+ let Vtk {
+ data, file_path, ..
+ } = self;
+
+ fn flatten_pieces(pieces: &mut Vec>, mut pick_data_set_pieces: F)
+ where
+ F: FnMut(DataSet) -> Option>>,
+ {
+ let owned_pieces = std::mem::take(pieces);
+ *pieces = owned_pieces
+ .into_iter()
+ .flat_map(|piece| {
+ let (loaded, rest) = match piece {
+ Piece::Loaded(data_set) => (pick_data_set_pieces(*data_set), None),
+ p => (None, Some(p)),
+ };
+ loaded.into_iter().flatten().chain(rest.into_iter())
+ })
+ .collect();
+ }
+ let file_path = file_path.as_ref().map(|p| p.as_ref());
+ match data {
+ DataSet::ImageData { pieces, meta, .. } => {
+ for p in pieces.iter_mut() {
+ p.load_piece_in_place_recursive(file_path)?;
+ }
+ // flatten the loaded pieces stored in each piece into a single Vec.
+ flatten_pieces(pieces, |data_set| match data_set {
+ DataSet::ImageData { pieces, .. } => Some(pieces),
+ _ => None,
+ });
+ *meta = None;
+ }
+ DataSet::StructuredGrid { pieces, meta, .. } => {
+ for p in pieces.iter_mut() {
+ p.load_piece_in_place_recursive(file_path)?;
+ }
+ flatten_pieces(pieces, |data_set| match data_set {
+ DataSet::StructuredGrid { pieces, .. } => Some(pieces),
+ _ => None,
+ });
+ *meta = None;
+ }
+ DataSet::RectilinearGrid { pieces, meta, .. } => {
+ for p in pieces.iter_mut() {
+ p.load_piece_in_place_recursive(file_path)?;
+ }
+ flatten_pieces(pieces, |data_set| match data_set {
+ DataSet::RectilinearGrid { pieces, .. } => Some(pieces),
+ _ => None,
+ });
+ *meta = None;
+ }
+ DataSet::UnstructuredGrid { pieces, meta, .. } => {
+ for p in pieces.iter_mut() {
+ p.load_piece_in_place_recursive(file_path)?;
+ }
+ flatten_pieces(pieces, |data_set| match data_set {
+ DataSet::UnstructuredGrid { pieces, .. } => Some(pieces),
+ _ => None,
+ });
+ *meta = None;
+ }
+ DataSet::PolyData { pieces, meta, .. } => {
+ for p in pieces.iter_mut() {
+ p.load_piece_in_place_recursive(file_path)?;
+ }
+ flatten_pieces(pieces, |data_set| match data_set {
+ DataSet::PolyData { pieces, .. } => Some(pieces),
+ _ => None,
+ });
+ *meta = None;
+ }
+ _ => {} // No-op
+ }
+ Ok(())
+ }
}
/// Version number (e.g. `4.1 => Version { major: 4, minor: 1 }`)
@@ -282,6 +372,11 @@ impl IOBuffer {
match_buf!(self, v => v.len())
}
+ /// Returns the number of bytes held by this buffer.
+ pub fn num_bytes(&self) -> usize {
+ self.len() * self.scalar_size()
+ }
+
/// Checks if the buffer is empty.
pub fn is_empty(&self) -> bool {
self.len() == 0
@@ -292,13 +387,20 @@ impl IOBuffer {
/// The size of the scalar type in bytes is stored as a 64-bit integer at the very beginning.
///
/// This is how VTK data arrays store data in the XML files.
- pub fn into_bytes_with_size(self, bo: ByteOrder) -> Vec {
+ #[cfg(feature = "xml")]
+ pub fn into_bytes_with_size(
+ self,
+ bo: ByteOrder,
+ compressor: crate::xml::Compressor,
+ compression_level: u32,
+ ) -> Vec {
use byteorder::WriteBytesExt;
use byteorder::{BE, LE};
- let size = self.len() as u64 * self.scalar_size() as u64;
- self.into_bytes_with_size_impl(bo, |out| match bo {
- ByteOrder::BigEndian => out.write_u64::(size).unwrap(),
- ByteOrder::LittleEndian => out.write_u64::(size).unwrap(),
+ self.into_bytes_with_size_impl(bo, compressor, compression_level, 8, |mut out, size| {
+ match bo {
+ ByteOrder::BigEndian => out.write_u64::(size as u64).unwrap(),
+ ByteOrder::LittleEndian => out.write_u64::(size as u64).unwrap(),
+ }
})
}
@@ -307,122 +409,175 @@ impl IOBuffer {
/// The size of the scalar type in bytes is stored as a 32-bit integer at the very beginning.
///
/// This is how VTK data arrays store data in the XML files.
- pub fn into_bytes_with_size32(self, bo: ByteOrder) -> Vec {
+ #[cfg(feature = "xml")]
+ pub fn into_bytes_with_size32(
+ self,
+ bo: ByteOrder,
+ compressor: crate::xml::Compressor,
+ compression_level: u32,
+ ) -> Vec {
use byteorder::WriteBytesExt;
use byteorder::{BE, LE};
- let size = self.len() as u32 * self.scalar_size() as u32;
- self.into_bytes_with_size_impl(bo, |out| match bo {
- ByteOrder::BigEndian => out.write_u32::(size).unwrap(),
- ByteOrder::LittleEndian => out.write_u32::(size).unwrap(),
+ self.into_bytes_with_size_impl(bo, compressor, compression_level, 4, |mut out, size| {
+ match bo {
+ ByteOrder::BigEndian => out.write_u32::(size as u32).unwrap(),
+ ByteOrder::LittleEndian => out.write_u32::(size as u32).unwrap(),
+ }
})
}
+ #[cfg(feature = "xml")]
fn into_bytes_with_size_impl(
self,
bo: ByteOrder,
- write_size: impl Fn(&mut Vec),
+ compressor: crate::xml::Compressor,
+ compression_level: u32,
+ prefix_size: usize,
+ write_size: impl Fn(&mut [u8], usize),
) -> Vec {
- use byteorder::WriteBytesExt;
- use byteorder::{BE, LE};
- let mut out: Vec = Vec::new();
+ use crate::xml::Compressor;
- // Write out the size prefix
- write_size(&mut out);
+ // Allocate enough bytes for the prefix.
+ // We will know what exactly to put there after compression.
+ let mut out = vec![0u8; prefix_size];
- match self {
- IOBuffer::Bit(mut v) => out.append(&mut v),
- IOBuffer::U8(mut v) => out.append(&mut v),
- IOBuffer::I8(v) => out.append(&mut cast_vec(v)),
- IOBuffer::U16(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_u16::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_u16::(x).unwrap())
- }
+ let num_uncompressed_bytes = self.num_bytes();
+
+ // Reserve the number of bytes of the uncompressed data.
+ out.reserve(num_uncompressed_bytes);
+
+ // Handle fast pass cases where we can just do a memcpy.
+ if compressor == Compressor::None || compression_level == 0 {
+ match self {
+ IOBuffer::Bit(mut v) | IOBuffer::U8(mut v) => {
+ out.append(&mut v);
+ write_size(out.as_mut_slice(), num_uncompressed_bytes);
+ return out;
}
- }
- IOBuffer::I16(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_i16::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_i16::(x).unwrap())
- }
+ IOBuffer::I8(v) => {
+ out.append(&mut cast_vec(v));
+ write_size(out.as_mut_slice(), num_uncompressed_bytes);
+ return out;
}
+ // Can't just copy the bytes, so we will do a conversion.
+ _ => {}
}
- IOBuffer::U32(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_u32::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_u32::(x).unwrap())
- }
- }
+ }
+
+ match compressor {
+ Compressor::ZLib =>
+ #[cfg(feature = "flate2")]
+ {
+ use flate2::{write::ZlibEncoder, Compression};
+ let mut e = ZlibEncoder::new(out, Compression::new(compression_level));
+ self.write_bytes(&mut e, bo);
+ let mut out = e.finish().unwrap();
+ let num_compressed_bytes = out.len() - prefix_size;
+ write_size(out.as_mut_slice(), num_compressed_bytes);
+ return out;
}
- IOBuffer::I32(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_i32::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_i32::(x).unwrap())
- }
- }
+ Compressor::LZMA =>
+ #[cfg(feature = "xz2")]
+ {
+ let mut e = xz2::write::XzEncoder::new(out, compression_level);
+ self.write_bytes(&mut e, bo);
+ let mut out = e.finish().unwrap();
+ let num_compressed_bytes = out.len() - prefix_size;
+ write_size(out.as_mut_slice(), num_compressed_bytes);
+ return out;
}
- IOBuffer::U64(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_u64::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_u64::(x).unwrap())
- }
+ Compressor::LZ4 => {
+ #[cfg(feature = "lz4")]
+ {
+ //let mut e = lz4::EncoderBuilder::new()
+ // .level(compression_level)
+ // .checksum(lz4::ContentChecksum::NoChecksum)
+ // .build(out)
+ // .unwrap();
+ //self.write_bytes(&mut e, bo);
+ //let mut out = e.finish().0;
+
+ // Initially write raw bytes to out.
+ self.write_bytes(&mut out, bo);
+
+ // Then compress them.
+ // This should be done using a writer, but lz4_flex does not implement this at
+ // this time, and it seems like the lz4 crate doesn't support lz4's block format.
+ let mut out = lz4::compress(&out);
+
+ let num_compressed_bytes = out.len() - prefix_size;
+ write_size(out.as_mut_slice(), num_compressed_bytes);
+ return out;
}
}
- IOBuffer::I64(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_i64::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_i64::(x).unwrap())
- }
+ Compressor::None => {}
+ }
+
+ self.write_bytes(&mut out, bo);
+ write_size(out.as_mut_slice(), num_uncompressed_bytes);
+
+ // Remove excess bytes.
+ out.shrink_to_fit();
+
+ out
+ }
+
+ #[cfg(feature = "xml")]
+ fn write_bytes(self, out: &mut W, bo: ByteOrder) {
+ use byteorder::{BE, LE};
+ match self {
+ IOBuffer::Bit(v) => v.into_iter().for_each(|x| out.write_u8(x).unwrap()),
+ IOBuffer::U8(v) => v.into_iter().for_each(|x| out.write_u8(x).unwrap()),
+ IOBuffer::I8(v) => v.into_iter().for_each(|x| out.write_i8(x).unwrap()),
+ IOBuffer::U16(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_u16::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_u16::(x).unwrap())
}
- }
- IOBuffer::F32(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_f32::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_f32::(x).unwrap())
- }
+ },
+ IOBuffer::I16(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_i16::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_i16::(x).unwrap())
}
- }
- IOBuffer::F64(v) => {
- out.reserve(v.len() * std::mem::size_of::());
- match bo {
- ByteOrder::BigEndian => {
- v.into_iter().for_each(|x| out.write_f64::(x).unwrap())
- }
- ByteOrder::LittleEndian => {
- v.into_iter().for_each(|x| out.write_f64::(x).unwrap())
- }
+ },
+ IOBuffer::U32(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_u32::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_u32::(x).unwrap())
}
- }
+ },
+ IOBuffer::I32(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_i32::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_i32::(x).unwrap())
+ }
+ },
+ IOBuffer::U64(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_u64::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_u64::(x).unwrap())
+ }
+ },
+ IOBuffer::I64(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_i64::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_i64::(x).unwrap())
+ }
+ },
+ IOBuffer::F32(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_f32::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_f32::(x).unwrap())
+ }
+ },
+ IOBuffer::F64(v) => match bo {
+ ByteOrder::BigEndian => v.into_iter().for_each(|x| out.write_f64::(x).unwrap()),
+ ByteOrder::LittleEndian => {
+ v.into_iter().for_each(|x| out.write_f64::(x).unwrap())
+ }
+ },
}
- out
}
/// Constructs an `IOBuffer` from a slice of bytes and a corresponding scalar type.
@@ -1532,40 +1687,79 @@ pub enum Piece {
}
pub trait PieceData: Sized {
- fn from_data_set(data_set: DataSet) -> Result;
+ fn from_data_set(data_set: DataSet, source_path: Option<&Path>) -> Result;
+}
+
+/// Build an absolute path to the referenced piece.
+fn build_piece_path(path: impl AsRef, source_path: Option<&Path>) -> PathBuf {
+ let path = path.as_ref();
+ if !path.has_root() {
+ if let Some(root) = source_path.and_then(|p| p.parent()) {
+ root.join(path)
+ } else {
+ PathBuf::from(path)
+ }
+ } else {
+ PathBuf::from(path)
+ }
}
impl Piece {
- /// Converts `self` to loaded piece data.
+ /// Converts `self` into a loaded piece if the current piece is only a `Source`.
+ ///
+ /// This function recursively loads any referenced pieces down the hierarchy.
+ ///
+ /// If this pieces is `Loaded` or `Inline`, this function does nothing.
+ ///
+ /// The given `source_path` is the path to the file containing this piece (if any).
+ pub fn load_piece_in_place_recursive(
+ &mut self,
+ source_path: Option<&Path>,
+ ) -> Result<(), Error> {
+ match self {
+ Piece::Source(path, _) => {
+ let piece_path = build_piece_path(path, source_path);
+ let mut piece_vtk = crate::import(&piece_path)?;
+ piece_vtk.load_all_pieces()?;
+ let piece = Box::new(piece_vtk.data);
+ *self = Piece::Loaded(piece);
+ }
+ _ => {}
+ }
+ Ok(())
+ }
+
+ /// Consumes `self` and returns loaded piece data.
///
/// If the piece is not yet loaded, this function will load it and return the reference to the
/// resulting data.
- pub fn load_piece_data(mut self) -> Result
{
+ pub fn into_loaded_piece_data(self, source_path: Option<&Path>) -> Result
{
match self {
Piece::Source(path, _) => {
- let piece_vtk = crate::import(&path)?;
- let piece = Box::new(piece_vtk.data);
- self = Piece::Loaded(piece);
- self.load_piece_data()
+ let piece_path = build_piece_path(path, source_path);
+ let piece_vtk = crate::import(&piece_path)?;
+ P::from_data_set(piece_vtk.data, Some(piece_path.as_ref()))
}
- Piece::Loaded(data_set) => P::from_data_set(*data_set),
+ Piece::Loaded(data_set) => P::from_data_set(*data_set, source_path),
Piece::Inline(piece_data) => Ok(*piece_data),
}
}
- /// Converts `self` to loaded piece data.
+ /// Consumes `self` and returns loaded piece data.
///
- /// This is the async version of `load_piece_data` function.
+ /// This is the async version of `into_loaded_piece_data` function.
#[cfg(feature = "async_blocked")]
- pub async fn load_piece_data_async(mut self) -> Result
{
+ pub async fn into_loaded_piece_data_async(
+ mut self,
+ source_path: Option<&Path>,
+ ) -> Result
{
match self {
Piece::Source(path, _) => {
- let piece_vtk = crate::import_async(&path).await?;
- let piece = Box::new(piece_vtk.data);
- self = Piece::Loaded(piece);
- self.load_piece_data() // Not async since the piece is now loaded.
+ let piece_path = build_piece_path(path, source_path);
+ let piece_vtk = crate::import_async(&piece_path).await?;
+ P::from_data_set(piece_vtk.data, Some(piece_path.as_ref()))
}
- Piece::Loaded(data_set) => P::from_data_set(*data_set),
+ Piece::Loaded(data_set) => P::from_data_set(*data_set, source_path),
Piece::Inline(piece_data) => Ok(*piece_data),
}
}
@@ -1704,17 +1898,17 @@ macro_rules! impl_piece_data {
impl TryFrom for $piece {
type Error = Error;
fn try_from(data_set: DataSet) -> Result {
- Self::from_data_set(data_set)
+ Self::from_data_set(data_set, None)
}
}
impl PieceData for $piece {
- fn from_data_set(data_set: DataSet) -> Result {
+ fn from_data_set(data_set: DataSet, source_path: Option<&Path>) -> Result {
match data_set {
DataSet::$data_set { pieces, .. } => pieces
.into_iter()
.next()
.ok_or(Error::MissingPieceData)?
- .load_piece_data(),
+ .into_loaded_piece_data(source_path),
_ => Err(Error::PieceDataMismatch),
}
}
diff --git a/src/parser.rs b/src/parser.rs
index 254b388..309923f 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -776,7 +776,8 @@ impl VtkParser {
// This is ignored in Legacy formats
byte_order: ByteOrderTag::BigEndian,
title: h.1,
- data: d
+ data: d,
+ file_path: None,
})
))
)
diff --git a/src/writer.rs b/src/writer.rs
index 68b3d11..773a058 100644
--- a/src/writer.rs
+++ b/src/writer.rs
@@ -31,6 +31,19 @@ mod write_vtk_impl {
LookupTable,
}
+ impl std::fmt::Display for EntryPart {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ use EntryPart::*;
+ match self {
+ Tags => write!(f, "Tags"),
+ Sizes => write!(f, "Sizes"),
+ Header => write!(f, "Header"),
+ Data(kind) => write!(f, "Data: {:?}", kind),
+ LookupTable => write!(f, "Lookup table"),
+ }
+ }
+ }
+
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum AttributeError {
Scalars(EntryPart),
@@ -45,6 +58,24 @@ mod write_vtk_impl {
UnrecognizedAttributeType,
}
+ impl std::fmt::Display for AttributeError {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ use AttributeError::*;
+ match self {
+ Scalars(part) => write!(f, "Scalars: {}", part),
+ ColorScalars(part) => write!(f, "Color scalars: {}", part),
+ LookupTable(part) => write!(f, "Lookup table: {}", part),
+ Vectors(part) => write!(f, "Vectors: {}", part),
+ Normals(part) => write!(f, "Normals: {}", part),
+ TextureCoordinates(part) => write!(f, "Texture coordinates: {}", part),
+ Tensors(part) => write!(f, "Tensors: {}", part),
+ Field(part) => write!(f, "Field: {}", part),
+ FieldArray(part) => write!(f, "Field array: {}", part),
+ UnrecognizedAttributeType => write!(f, "Unrecognized attribute type"),
+ }
+ }
+ }
+
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Header {
Version,
@@ -53,6 +84,16 @@ mod write_vtk_impl {
FileType,
}
+ impl std::fmt::Display for Header {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ match self {
+ Header::Version => write!(f, "Version"),
+ Header::Title => write!(f, "Title"),
+ Header::FileType => write!(f, "File type (BINARY or ASCII)"),
+ }
+ }
+ }
+
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum DataSetPart {
/// Tags identifying the data set type. For example UNSTRUCTURED_GRID or POLY_DATA.
@@ -68,6 +109,24 @@ mod write_vtk_impl {
ZCoordinates(EntryPart),
}
+ impl std::fmt::Display for DataSetPart {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ use DataSetPart::*;
+ match self {
+ Tags => write!(f, "Tags"),
+ Points(part) => write!(f, "Points: {}", part),
+ Cells(part) => write!(f, "Cells: {}", part),
+ CellTypes(part) => write!(f, "Cell types: {}", part),
+ Dimensions => write!(f, "Dimensions"),
+ Origin => write!(f, "Origin"),
+ Spacing(part) => write!(f, "Spacing: {}", part),
+ XCoordinates(part) => write!(f, "X coords: {}", part),
+ YCoordinates(part) => write!(f, "Y coords: {}", part),
+ ZCoordinates(part) => write!(f, "Z coords: {}", part),
+ }
+ }
+ }
+
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum DataSetError {
FieldDataHeader,
@@ -85,6 +144,25 @@ mod write_vtk_impl {
MissingPieceData,
}
+ impl std::fmt::Display for DataSetError {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ use DataSetError::*;
+ match self {
+ FieldDataHeader => write!(f, "Field data header"),
+ FieldArray(entry) => write!(f, "Field array: {}", entry),
+
+ PolyData(part) => write!(f, "Poly data: {}", part),
+ UnstructuredGrid(part) => write!(f, "Unstructured grid: {}", part),
+ StructuredGrid(part) => write!(f, "Structured grid: {}", part),
+ StructuredPoints(part) => write!(f, "Structured points: {}", part),
+ RectilinearGrid(part) => write!(f, "Rectilinear grid: {}", part),
+
+ PieceDataMismatch => write!(f, "Piece data mismatch"),
+ MissingPieceData => write!(f, "Missing piece data"),
+ }
+ }
+ }
+
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Error {
PointDataHeader,
@@ -95,15 +173,29 @@ mod write_vtk_impl {
DataSet(DataSetError),
NewLine,
- /// Unexpected type stored in referenced data buffer. This is most likely caused by
- /// data corruption.
- DataMismatchError,
/// Generic formatting error originating from [`std::fmt::Error`].
FormatError,
/// Generic IO error originating from [`std::io::Error`].
IOError(std::io::ErrorKind),
}
+ impl std::fmt::Display for Error {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ match self {
+ Error::PointDataHeader => write!(f, "POINT_DATA header"),
+ Error::CellDataHeader => write!(f, "CELL_DATA header"),
+ Error::Attribute(attrib_err) => write!(f, "Attribute: {}", attrib_err),
+ Error::Header(header_err) => write!(f, "Header: {}", header_err),
+ Error::DataSet(data_set_err) => write!(f, "Data set: {}", data_set_err),
+ Error::NewLine => write!(f, "New line"),
+ Error::FormatError => write!(f, "Format error"),
+ Error::IOError(kind) => write!(f, "IO Error: {:?}", kind),
+ }
+ }
+ }
+
+ impl std::error::Error for Error {}
+
/// Extract a raw IO Error from our error if any. This helps annotate the IO error with
/// where it originated from when reported from lower level functions.
impl Into> for Error {
@@ -343,6 +435,7 @@ mod write_vtk_impl {
&mut self,
vtk: Vtk,
) -> std::result::Result<&mut Self, Error> {
+ let source_path = vtk.file_path.as_ref().map(|p| p.as_ref());
writeln!(self, "# vtk DataFile Version {}", vtk.version)
.map_err(|_| Error::Header(Header::Version))?;
writeln!(self, "{}", vtk.title).map_err(|_| Error::Header(Header::Version))?;
@@ -384,7 +477,7 @@ mod write_vtk_impl {
polys,
strips,
data,
- }) = piece.load_piece_data()
+ }) = piece.into_loaded_piece_data(source_path)
{
writeln!(self, "DATASET POLYDATA").map_err(|_| {
Error::DataSet(DataSetError::PolyData(DataSetPart::Tags))
@@ -463,7 +556,7 @@ mod write_vtk_impl {
points,
cells,
data,
- }) = piece.load_piece_data()
+ }) = piece.into_loaded_piece_data(source_path)
{
writeln!(self, "DATASET UNSTRUCTURED_GRID").map_err(|_| {
Error::DataSet(DataSetError::UnstructuredGrid(DataSetPart::Tags))
@@ -524,7 +617,9 @@ mod write_vtk_impl {
.into_iter()
.next()
.ok_or(DataSetError::MissingPieceData)?;
- if let Ok(ImageDataPiece { data, .. }) = piece.load_piece_data() {
+ if let Ok(ImageDataPiece { data, .. }) =
+ piece.into_loaded_piece_data(source_path)
+ {
writeln!(self, "DATASET STRUCTURED_POINTS").map_err(|_| {
Error::DataSet(DataSetError::StructuredPoints(DataSetPart::Tags))
})?;
@@ -572,7 +667,9 @@ mod write_vtk_impl {
.into_iter()
.next()
.ok_or(DataSetError::MissingPieceData)?;
- if let Ok(StructuredGridPiece { points, data, .. }) = piece.load_piece_data() {
+ if let Ok(StructuredGridPiece { points, data, .. }) =
+ piece.into_loaded_piece_data(source_path)
+ {
writeln!(self, "DATASET STRUCTURED_GRID").map_err(|_| {
Error::DataSet(DataSetError::StructuredGrid(DataSetPart::Tags))
})?;
@@ -610,7 +707,9 @@ mod write_vtk_impl {
.into_iter()
.next()
.ok_or(DataSetError::MissingPieceData)?;
- if let Ok(RectilinearGridPiece { coords, data, .. }) = piece.load_piece_data() {
+ if let Ok(RectilinearGridPiece { coords, data, .. }) =
+ piece.into_loaded_piece_data(source_path)
+ {
writeln!(self, "DATASET RECTILINEAR_GRID").map_err(|_| {
Error::DataSet(DataSetError::RectilinearGrid(DataSetPart::Tags))
})?;
@@ -727,6 +826,7 @@ mod write_vtk_impl {
}
match buf {
+ IOBuffer::Bit(v) => write_buf_impl(v, &mut self.0, W::write_u8)?,
IOBuffer::U8(v) => write_buf_impl(v, &mut self.0, W::write_u8)?,
IOBuffer::I8(v) => write_buf_impl(v, &mut self.0, W::write_i8)?,
IOBuffer::U16(v) => {
@@ -753,7 +853,6 @@ mod write_vtk_impl {
IOBuffer::F64(v) => {
write_buf_impl(v, &mut self.0, W::write_f64::)?;
}
- _ => return Err(Error::DataMismatchError),
}
writeln!(&mut self.0)?;
diff --git a/src/xml.rs b/src/xml.rs
index 798a096..e46e7f5 100644
--- a/src/xml.rs
+++ b/src/xml.rs
@@ -435,6 +435,7 @@ mod coordinates {
A: MapAccess<'de>,
{
let invalid_len_err = |n| ::invalid_length(n, &self);
+ // TODO: These should not be positional. (See VTKFile deserialization for reference)
let (_, x) = map
.next_entry::()?
.ok_or_else(|| invalid_len_err(0))?;
@@ -473,10 +474,128 @@ mod coordinates {
}
mod data {
- use super::RawData;
- use serde::de::{Deserialize, Deserializer, Visitor};
- use serde::ser::{Serialize, Serializer};
+ use super::{AppendedData, Data, Encoding, RawData};
+ use serde::{
+ de::{self, Deserialize, Deserializer, MapAccess, Visitor},
+ Serialize, Serializer,
+ };
use std::fmt;
+ // A helper function to detect whitespace bytes.
+ fn is_whitespace(b: u8) -> bool {
+ match b {
+ b' ' | b'\r' | b'\n' | b'\t' => true,
+ _ => false,
+ }
+ }
+
+ #[derive(Debug, serde::Deserialize)]
+ #[serde(field_identifier)]
+ enum Field {
+ #[serde(rename = "encoding")]
+ Encoding,
+ #[serde(rename = "$value")]
+ Value,
+ }
+
+ /*
+ * Data in a DataArray element
+ */
+
+ struct DataVisitor;
+
+ impl<'de> Visitor<'de> for DataVisitor {
+ type Value = Data;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Data string in base64 or ASCII format")
+ }
+
+ fn visit_map(self, _map: A) -> Result
+ where
+ A: MapAccess<'de>,
+ {
+ // Ignore InformationKey fields.
+ Ok(Data::Meta {
+ information_key: (),
+ })
+ }
+ fn visit_str(self, v: &str) -> Result
+ where
+ E: de::Error,
+ {
+ Ok(Data::Data(v.trim_end().to_string()))
+ }
+ }
+
+ /* Serialization of Data is derived. */
+
+ impl<'de> Deserialize<'de> for Data {
+ fn deserialize(d: D) -> Result
+ where
+ D: Deserializer<'de>,
+ {
+ Ok(d.deserialize_any(DataVisitor)?)
+ }
+ }
+
+ /*
+ * AppendedData Element
+ */
+ struct AppendedDataVisitor;
+
+ impl<'de> Visitor<'de> for AppendedDataVisitor {
+ type Value = AppendedData;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Appended bytes or base64 data")
+ }
+
+ fn visit_map(self, mut map: A) -> Result
+ where
+ A: MapAccess<'de>,
+ {
+ let make_err = || {
+ ::custom(
+ "AppendedData element must contain only a single \"encoding\" attribute",
+ )
+ };
+ let mut encoding = None;
+ let mut data = RawData::default();
+ if let Some((key, value)) = map.next_entry::()? {
+ match key {
+ Field::Encoding => encoding = Some(value),
+ _ => return Err(make_err()),
+ }
+ }
+ if let Some((key, value)) = map.next_entry::()? {
+ match key {
+ Field::Value => data = value,
+ _ => return Err(make_err()),
+ }
+ }
+ if let Some(Encoding::Base64) = encoding {
+ // In base64 encoding we can trim whitespace from the end.
+ if let Some(end) = data.0.iter().rposition(|&b| !is_whitespace(b)) {
+ data = RawData(data.0[..=end].to_vec());
+ }
+ }
+ Ok(AppendedData {
+ encoding: encoding.unwrap_or(Encoding::Raw),
+ data,
+ })
+ }
+ }
+
+ /* Serialization of AppendedData is derived. */
+
+ impl<'de> Deserialize<'de> for AppendedData {
+ fn deserialize(d: D) -> Result
+ where
+ D: Deserializer<'de>,
+ {
+ Ok(d.deserialize_struct("AppendedData", &["encoding", "$value"], AppendedDataVisitor)?)
+ }
+ }
/*
* Data in an AppendedData element
@@ -492,7 +611,6 @@ mod data {
}
fn visit_bytes(self, v: &[u8]) -> Result {
- //eprintln!("Deserializing as bytes");
// Skip the first byte which always corresponds to the preceeding underscore
if v.is_empty() {
return Ok(RawData(Vec::new()));
@@ -1097,7 +1215,7 @@ impl Default for VTKFile {
}
}
-#[derive(Clone, Debug, PartialEq)]
+#[derive(Copy, Clone, Debug, PartialEq)]
pub enum Compressor {
LZ4,
ZLib,
@@ -1683,6 +1801,9 @@ impl Coordinates {
pub struct EncodingInfo {
byte_order: model::ByteOrder,
header_type: ScalarType,
+ compressor: Compressor,
+ // Note that compression level is meaningless during decoding.
+ compression_level: u32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
@@ -1768,9 +1889,10 @@ impl DataArray {
scalar_type: buf.scalar_type().into(),
data: vec![Data::Data(base64::encode(
if ei.header_type == ScalarType::UInt64 {
- buf.into_bytes_with_size(ei.byte_order)
+ buf.into_bytes_with_size(ei.byte_order, ei.compressor, ei.compression_level)
} else {
- buf.into_bytes_with_size32(ei.byte_order) // Older vtk Versions
+ buf.into_bytes_with_size32(ei.byte_order, ei.compressor, ei.compression_level)
+ // Older vtk Versions
},
))],
..Default::default()
@@ -1815,42 +1937,13 @@ impl DataArray {
//eprintln!("name = {:?}", &name);
let num_elements = usize::try_from(num_comp).unwrap() * l;
- let num_bytes = num_elements * scalar_type.size();
- let header_bytes = if ei.header_type == ScalarType::UInt64 {
- 8
- } else {
- 4
- };
+ let header_bytes = ei.header_type.size();
let data = match format {
DataArrayFormat::Appended => {
if let Some(appended) = appended {
- let mut start: usize = offset.unwrap_or(0).try_into().unwrap();
- let buf = match appended.encoding {
- Encoding::Raw => {
- // Skip the first 64 bits which gives the size of each component in bytes
- //eprintln!("{:?}", &appended.data.0[start..start + header_bytes]);
- start += header_bytes;
- let bytes = &appended.data.0[start..start + num_bytes];
- IOBuffer::from_bytes(bytes, scalar_type.into(), ei.byte_order)?
- }
- Encoding::Base64 => {
- // Add one 64-bit integer that specifies the size of each component in bytes.
- let num_target_bits = (num_bytes + header_bytes) * 8;
- // Compute how many base64 chars we need to decode l elements.
- let num_source_bytes =
- num_target_bits / 6 + if num_target_bits % 6 == 0 { 0 } else { 1 };
- let bytes = &appended.data.0[start..start + num_source_bytes];
- let bytes = base64::decode(bytes)?;
- //eprintln!("{:?}", &bytes[..header_bytes]);
- // Skip the first 64 bits which gives the size of each component in bytes
- IOBuffer::from_bytes(
- &bytes[header_bytes..],
- scalar_type.into(),
- ei.byte_order,
- )?
- }
- };
+ let start: usize = offset.unwrap_or(0).try_into().unwrap();
+ let buf = appended.extract_data(start, num_elements, scalar_type, ei)?;
if buf.len() != num_elements {
return Err(ValidationError::DataArraySizeMismatch {
name,
@@ -1977,12 +2070,12 @@ fn default_num_comp() -> u32 {
/// Some VTK tools like ParaView may produce undocumented tags inside this
/// element. We capture and ignore those via the `Meta` variant. Otherwise this
/// is treated as a data string.
-#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
+#[derive(Clone, Debug, PartialEq, Serialize)]
#[serde(untagged)]
pub enum Data {
Meta {
- #[serde(rename = "InformationKey", default)]
- info_key: (),
+ #[serde(rename = "InformationKey")]
+ information_key: (),
},
Data(String),
}
@@ -2078,7 +2171,7 @@ pub enum DataArrayFormat {
Ascii,
}
-#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
+#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct AppendedData {
/// Encoding used in the `data` field.
pub encoding: Encoding,
@@ -2107,6 +2200,230 @@ pub enum Encoding {
Raw,
}
+impl AppendedData {
+ /// Extract the decompressed and unencoded raw bytes from appended data.
+ ///
+ /// The data is expected to begin at `offset` from the beginning of the stored data array.
+ ///
+ /// The expected number of elements is given by `num_elements`.
+ /// The given encoding info specifies the format of the data header and how the data is compressed.
+ pub fn extract_data(
+ &self,
+ offset: usize,
+ num_elements: usize,
+ scalar_type: ScalarType,
+ ei: EncodingInfo,
+ ) -> std::result::Result {
+ // Convert number of target bytes to number of chars in base64 encoding.
+ fn to_b64(bytes: usize) -> usize {
+ 4 * (bytes as f64 / 3.0).ceil() as usize
+ //(bytes * 4 + 1) / 3 + match bytes % 3 {
+ // 1 => 2, 2 => 1, _ => 0
+ //}
+ }
+
+ let header_bytes = ei.header_type.size();
+ let expected_num_bytes = num_elements * scalar_type.size();
+ let mut start = offset;
+
+ if ei.compressor == Compressor::None {
+ return match self.encoding {
+ Encoding::Raw => {
+ // The first 64/32 bits gives the size of each component in bytes
+ // Since data here is uncompressed we can predict exactly how many bytes to expect
+ // We check this below.
+ let given_num_bytes = read_header_num(
+ &mut std::io::Cursor::new(&self.data.0[start..start + header_bytes]),
+ ei,
+ )?;
+ if given_num_bytes != expected_num_bytes {
+ return Err(ValidationError::UnexpectedBytesInAppendedData(
+ expected_num_bytes as u64,
+ given_num_bytes as u64,
+ ));
+ }
+ start += header_bytes;
+ let bytes = &self.data.0[start..start + expected_num_bytes];
+ Ok(model::IOBuffer::from_bytes(
+ bytes,
+ scalar_type.into(),
+ ei.byte_order,
+ )?)
+ }
+ Encoding::Base64 => {
+ // Add one integer that specifies the size of each component in bytes.
+ let num_target_bytes = expected_num_bytes + header_bytes;
+ // Compute how many base64 chars we need to decode l elements.
+ let num_source_bytes = to_b64(num_target_bytes);
+ let bytes = &self.data.0[start..start + num_source_bytes];
+ let bytes = base64::decode(bytes)?;
+ Ok(model::IOBuffer::from_bytes(
+ &bytes[header_bytes..],
+ scalar_type.into(),
+ ei.byte_order,
+ )?)
+ }
+ };
+ }
+
+ // Compressed data has a more complex header.
+ // The data is organized as [nb][nu][np][nc_1]...[nc_nb][Data]
+ // Where
+ // [nb] = Number of blocks in the data array
+ // [nu] = Block size before compression
+ // [np] = Size of the last partial block before compression (zero if it is not needed)
+ // [nc_i] = Size in bytes of block i after compression
+ // See https://vtk.org/Wiki/VTK_XML_Formats for details.
+ // In this case we dont know how many bytes are in the data array so we must first read
+ // this information from a header.
+
+ // Helper function to read a single header number, which depends on the encoding parameters.
+ fn read_header_num>(
+ header_buf: &mut std::io::Cursor,
+ ei: EncodingInfo,
+ ) -> std::result::Result {
+ use byteorder::ReadBytesExt;
+ use byteorder::{BE, LE};
+ Ok(match ei.byte_order {
+ model::ByteOrder::LittleEndian => {
+ if ei.header_type == ScalarType::UInt64 {
+ header_buf.read_u64::()? as usize
+ } else {
+ header_buf.read_u32::()? as usize
+ }
+ }
+ model::ByteOrder::BigEndian => {
+ if ei.header_type == ScalarType::UInt64 {
+ header_buf.read_u64::()? as usize
+ } else {
+ header_buf.read_u32::()? as usize
+ }
+ }
+ })
+ }
+
+ fn get_data_slice<'a, D, B>(
+ buf: &'a mut Vec,
+ mut decode: D,
+ mut to_b64: B,
+ data: &'a [u8],
+ header_bytes: usize,
+ ei: EncodingInfo,
+ ) -> std::result::Result, ValidationError>
+ where
+ D: for<'b> FnMut(
+ &'b [u8],
+ &'b mut Vec,
+ ) -> std::result::Result<&'b [u8], ValidationError>,
+ B: FnMut(usize) -> usize,
+ {
+ use std::io::Cursor;
+ use std::io::Read;
+
+ // First we need to determine the number of blocks stored.
+ let num_blocks = {
+ let encoded_header = &data[0..to_b64(header_bytes)];
+ let decoded_header = decode(encoded_header, buf)?;
+ read_header_num(&mut Cursor::new(decoded_header), ei)?
+ };
+
+ let full_header_bytes = header_bytes * (3 + num_blocks); // nb + nu + np + sum_i nc_i
+ buf.clear();
+
+ let encoded_header = &data[0..to_b64(full_header_bytes)];
+ let decoded_header = decode(encoded_header, buf)?;
+ let mut header_cursor = Cursor::new(decoded_header);
+ let _nb = read_header_num(&mut header_cursor, ei); // We already know the number of blocks
+ let _nu = read_header_num(&mut header_cursor, ei);
+ let _np = read_header_num(&mut header_cursor, ei);
+ let nc_total = (0..num_blocks).fold(0, |acc, _| {
+ acc + read_header_num(&mut header_cursor, ei).unwrap_or(0)
+ });
+ let num_data_bytes = to_b64(nc_total);
+ let start = to_b64(full_header_bytes);
+ buf.clear();
+ let encoded_data = &data[start..start + num_data_bytes];
+ let decoded_data = decode(encoded_data, buf)?;
+
+ // Now that the data is decoded, what is left is to decompress it.
+ let mut out = Vec::new();
+ match ei.compressor {
+ Compressor::ZLib => {
+ #[cfg(not(feature = "flate2"))]
+ {
+ return Err(ValidationError::MissingCompressionLibrary(ei.compressor));
+ }
+ #[cfg(feature = "flate2")]
+ {
+ let mut decoder = flate2::read::ZlibDecoder::new(decoded_data);
+ decoder.read_to_end(&mut out)?;
+ }
+ }
+ Compressor::LZ4 => {
+ #[cfg(not(feature = "lz4"))]
+ {
+ return Err(ValidationError::MissingCompressionLibrary(ei.compressor));
+ }
+ #[cfg(feature = "lz4")]
+ {
+ out = lz4::decompress(decoded_data, num_data_bytes)?;
+ }
+ }
+ Compressor::LZMA => {
+ #[cfg(not(feature = "xz2"))]
+ {
+ return Err(ValidationError::MissingCompressionLibrary(ei.compressor));
+ }
+ #[cfg(feature = "xz2")]
+ {
+ let mut decoder = xz2::read::XzDecoder::new(decoded_data);
+ decoder.read_to_end(&mut out)?;
+ }
+ }
+ _ => {}
+ };
+ Ok(out)
+ }
+
+ let out = match self.encoding {
+ Encoding::Raw => {
+ let mut buf = Vec::new();
+ get_data_slice(
+ &mut buf,
+ |header, _| Ok(header),
+ |x| x,
+ &self.data.0[offset..],
+ header_bytes,
+ ei,
+ )?
+ }
+ Encoding::Base64 => {
+ let mut buf = Vec::new();
+ get_data_slice(
+ &mut buf,
+ |header, buf| {
+ base64::decode_config_buf(
+ header,
+ base64::STANDARD.decode_allow_trailing_bits(true),
+ buf,
+ )?;
+ Ok(buf.as_slice())
+ },
+ to_b64,
+ &self.data.0[offset..],
+ header_bytes,
+ ei,
+ )?
+ }
+ };
+ Ok(model::IOBuffer::from_byte_vec(
+ out,
+ scalar_type.into(),
+ ei.byte_order,
+ )?)
+ }
+}
+
/// A file type descriptor of a XML VTK data file.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct FileType {
@@ -2283,14 +2600,17 @@ pub enum ValidationError {
MissingDataSet,
DataSetMismatch,
InvalidDataFormat,
+ IO(std::io::Error),
Model(model::Error),
ParseFloat(std::num::ParseFloatError),
ParseInt(std::num::ParseIntError),
InvalidCellType(u8),
TooManyElements(u32),
+ UnexpectedBytesInAppendedData(u64, u64),
MissingTopologyOffsets,
MissingReferencedAppendedData,
MissingCoordinates,
+ MissingCompressionLibrary(Compressor),
DataArraySizeMismatch {
name: String,
expected: usize,
@@ -2298,9 +2618,24 @@ pub enum ValidationError {
},
Base64Decode(base64::DecodeError),
Deserialize(de::DeError),
+ #[cfg(feature = "lz4")]
+ LZ4DecompressError(lz4::block::DecompressError),
Unsupported,
}
+#[cfg(feature = "lz4")]
+impl From for ValidationError {
+ fn from(e: lz4::block::DecompressError) -> ValidationError {
+ ValidationError::LZ4DecompressError(e)
+ }
+}
+
+impl From for ValidationError {
+ fn from(e: std::io::Error) -> ValidationError {
+ ValidationError::IO(e)
+ }
+}
+
impl From for ValidationError {
fn from(e: model::Error) -> ValidationError {
ValidationError::Model(e)
@@ -2334,11 +2669,14 @@ impl From for ValidationError {
impl std::error::Error for ValidationError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
+ ValidationError::IO(source) => Some(source),
ValidationError::Model(source) => Some(source),
ValidationError::Base64Decode(source) => Some(source),
ValidationError::Deserialize(source) => Some(source),
ValidationError::ParseFloat(source) => Some(source),
ValidationError::ParseInt(source) => Some(source),
+ #[cfg(feature = "lz4")]
+ ValidationError::LZ4DecompressError(source) => Some(source),
_ => None,
}
}
@@ -2351,11 +2689,17 @@ impl std::fmt::Display for ValidationError {
write!(f, "VTKFile type doesn't match internal data set definition")
}
ValidationError::InvalidDataFormat => write!(f, "Invalid data format"),
+ ValidationError::IO(e) => write!(f, "IO Error: {}", e),
ValidationError::Model(e) => write!(f, "Failed to convert model to xml: {}", e),
ValidationError::ParseFloat(e) => write!(f, "Failed to parse a float: {}", e),
ValidationError::ParseInt(e) => write!(f, "Failed to parse an int: {}", e),
ValidationError::InvalidCellType(t) => write!(f, "Invalid cell type: {}", t),
ValidationError::TooManyElements(n) => write!(f, "Too many elements: {}", n),
+ ValidationError::UnexpectedBytesInAppendedData(expected, actual) => write!(
+ f,
+ "Expected {} bytes in appended data array but found {} in header",
+ expected, actual
+ ),
ValidationError::MissingTopologyOffsets => write!(f, "Missing topology offsets"),
ValidationError::MissingReferencedAppendedData => {
write!(f, "Appended data is referenced but missing from the file")
@@ -2363,6 +2707,13 @@ impl std::fmt::Display for ValidationError {
ValidationError::MissingCoordinates => {
write!(f, "Missing coordinates in rectilinear grid definition")
}
+ ValidationError::MissingCompressionLibrary(c) => {
+ write!(
+ f,
+ "Cannot compress/decompress data: {:?} compression is unsupported",
+ c
+ )
+ }
ValidationError::DataArraySizeMismatch {
name,
expected,
@@ -2372,10 +2723,14 @@ impl std::fmt::Display for ValidationError {
"Data array \"{}\" has {} elements, but should have {}",
name, actual, expected
),
- ValidationError::Base64Decode(source) => write!(f, "Base64 decode error: {:?}", source),
+ ValidationError::Base64Decode(source) => write!(f, "Base64 decode error: {}", source),
ValidationError::Deserialize(source) => {
write!(f, "Failed to deserialize data: {:?}", source)
}
+ #[cfg(feature = "lz4")]
+ ValidationError::LZ4DecompressError(source) => {
+ write!(f, "LZ4 deompression error: {}", source)
+ }
ValidationError::Unsupported => write!(f, "Unsupported data set format"),
}
}
@@ -2387,6 +2742,7 @@ impl TryFrom for model::Vtk {
let VTKFile {
version,
byte_order,
+ compressor,
header_type,
data_set_type,
appended_data,
@@ -2397,6 +2753,8 @@ impl TryFrom for model::Vtk {
let encoding_info = EncodingInfo {
byte_order,
header_type: header_type.unwrap_or(ScalarType::UInt64),
+ compressor,
+ compression_level: 0, // This is meaningless when decoding
};
let appended_data = appended_data.as_ref();
@@ -2817,25 +3175,45 @@ impl TryFrom for model::Vtk {
byte_order,
title: String::new(),
data,
+ file_path: None,
})
}
}
-impl TryFrom for VTKFile {
- type Error = Error;
- fn try_from(vtk: model::Vtk) -> Result {
+impl model::Vtk {
+ /// Converts the given Vtk model into an XML format represented by `VTKFile`.
+ ///
+ /// This function allows one to specify the compression level (0-9):
+ /// ```verbatim
+ /// 0 -> No compression
+ /// 1 -> Fastest write
+ /// ...
+ /// 5 -> Balanced performance
+ /// ...
+ /// 9 -> Slowest but smallest file size.
+ /// ```
+ pub fn try_into_xml_format(
+ self,
+ compressor: Compressor,
+ compression_level: u32,
+ ) -> Result {
let model::Vtk {
version,
byte_order,
data: data_set,
+ file_path,
..
- } = vtk;
+ } = self;
+
+ let source_path = file_path.as_ref().map(|p| p.as_ref());
let header_type = ScalarType::UInt64;
let encoding_info = EncodingInfo {
byte_order,
header_type,
+ compressor,
+ compression_level,
};
let appended_data = Vec::new();
@@ -2855,7 +3233,7 @@ impl TryFrom for VTKFile {
pieces: pieces
.into_iter()
.map(|piece| {
- let piece_data = piece.load_piece_data()?;
+ let piece_data = piece.into_loaded_piece_data(source_path)?;
let model::ImageDataPiece { extent, data } = piece_data;
Ok(Piece {
extent: Some(extent.into()),
@@ -2882,7 +3260,7 @@ impl TryFrom for VTKFile {
pieces: pieces
.into_iter()
.map(|piece| {
- let piece_data = piece.load_piece_data()?;
+ let piece_data = piece.into_loaded_piece_data(source_path)?;
let model::StructuredGridPiece {
extent,
points,
@@ -2914,7 +3292,7 @@ impl TryFrom for VTKFile {
pieces: pieces
.into_iter()
.map(|piece| {
- let piece_data = piece.load_piece_data()?;
+ let piece_data = piece.into_loaded_piece_data(source_path)?;
let model::RectilinearGridPiece {
extent,
coords,
@@ -2947,7 +3325,7 @@ impl TryFrom for VTKFile {
pieces: pieces
.into_iter()
.map(|piece| {
- let piece_data = piece.load_piece_data()?;
+ let piece_data = piece.into_loaded_piece_data(source_path)?;
let num_points = piece_data.num_points();
let model::UnstructuredGridPiece {
points,
@@ -2980,7 +3358,7 @@ impl TryFrom for VTKFile {
pieces: pieces
.into_iter()
.map(|piece| {
- let piece_data = piece.load_piece_data()?;
+ let piece_data = piece.into_loaded_piece_data(source_path)?;
let num_points = piece_data.num_points();
let number_of_verts = piece_data.num_verts();
let number_of_lines = piece_data.num_lines();
@@ -3064,22 +3442,41 @@ impl TryFrom for VTKFile {
version,
byte_order,
header_type: Some(header_type),
- compressor: Compressor::None,
+ compressor,
appended_data,
data_set,
})
}
}
+impl TryFrom for VTKFile {
+ type Error = Error;
+ fn try_from(vtk: model::Vtk) -> Result {
+ vtk.try_into_xml_format(Compressor::None, 0)
+ }
+}
+
/// Import an XML VTK file from the specified path.
pub(crate) fn import(file_path: impl AsRef) -> Result {
let f = std::fs::File::open(file_path)?;
parse(std::io::BufReader::new(f))
}
+fn de_from_reader(reader: impl BufRead) -> Result {
+ let mut reader = quick_xml::Reader::from_reader(reader);
+ reader
+ .expand_empty_elements(true)
+ .check_end_names(true)
+ .trim_text(true);
+ //TODO: Uncomment when https://github.com/tafia/quick-xml/pull/253 is merged
+ //.trim_text_end(false);
+ let mut de = de::Deserializer::new(reader);
+ Ok(VTKFile::deserialize(&mut de)?)
+}
+
/// Parse an XML VTK file from the given reader.
pub(crate) fn parse(reader: impl BufRead) -> Result {
- Ok(de::from_reader(reader)?)
+ Ok(de_from_reader(reader)?)
}
/// Import an XML VTK file from the specified path.
@@ -3087,7 +3484,7 @@ pub(crate) fn parse(reader: impl BufRead) -> Result {
pub(crate) async fn import_async(file_path: impl AsRef) -> Result {
let f = tokio::fs::File::open(file_path).await?;
// Blocked on async support from quick-xml (e.g. https://github.com/tafia/quick-xml/pull/233)
- Ok(de::from_reader(std::io::BufReader::new(f))?)
+ Ok(de_from_reader(std::io::BufReader::new(f))?)
}
/// Export an XML VTK file to the specified path.
@@ -3264,7 +3661,7 @@ mod tests {
//eprintln!("{:#?}", &vtk);
let as_bytes = se::to_bytes(&vtk)?;
//eprintln!("{:?}", &as_bytes);
- let vtk_roundtrip = de::from_reader(as_bytes.as_slice()).unwrap();
+ let vtk_roundtrip = de_from_reader(as_bytes.as_slice()).unwrap();
assert_eq!(vtk, vtk_roundtrip);
Ok(())
}
@@ -3275,7 +3672,7 @@ mod tests {
//eprintln!("{:#?}", &vtk);
let as_bytes = se::to_bytes(&vtk)?;
//eprintln!("{:?}", &as_bytes);
- let vtk_roundtrip = de::from_reader(as_bytes.as_slice()).unwrap();
+ let vtk_roundtrip = de_from_reader(as_bytes.as_slice()).unwrap();
assert_eq!(vtk, vtk_roundtrip);
Ok(())
}
@@ -3286,7 +3683,7 @@ mod tests {
//eprintln!("{:#?}", &vtk);
let as_bytes = se::to_bytes(&vtk)?;
//eprintln!("{:?}", &as_bytes);
- let vtk_roundtrip = de::from_reader(as_bytes.as_slice()).unwrap();
+ let vtk_roundtrip = de_from_reader(as_bytes.as_slice()).unwrap();
assert_eq!(vtk, vtk_roundtrip);
Ok(())
}
@@ -3351,9 +3748,9 @@ mod tests {
#[test]
fn hexahedron_appended() -> Result<()> {
let vtk = import("assets/hexahedron.vtu")?;
- eprintln!("{:#?}", &vtk);
+ //eprintln!("{:#?}", &vtk);
let as_str = se::to_string(&vtk).unwrap();
- eprintln!("{}", &as_str);
+ //eprintln!("{}", &as_str);
let vtk_roundtrip = de::from_str(&as_str).unwrap();
assert_eq!(vtk, vtk_roundtrip);
Ok(())
@@ -3387,17 +3784,6 @@ mod tests {
Ok(())
}
- #[test]
- fn parallel_compressed_cube() -> Result<()> {
- let vtk = import("assets/cube_compressed.pvtu")?;
- //eprintln!("{:#?}", &vtk);
- let as_str = se::to_string(&vtk).unwrap();
- //eprintln!("{}", &as_str);
- let vtk_roundtrip = de::from_str(&as_str).unwrap();
- assert_eq!(vtk, vtk_roundtrip);
- Ok(())
- }
-
#[test]
fn coordinates() -> Result<()> {
let xml = r#"
@@ -3512,7 +3898,8 @@ mod tests {
Attribute::generic("Z Velocity", 1).with_data(vec![0.0f32, 0.5, 0.0]),
]
}
- })
+ }),
+ file_path: None,
}
);
Ok(())
diff --git a/tests/legacy.rs b/tests/legacy.rs
index 16d9b01..6459d68 100644
--- a/tests/legacy.rs
+++ b/tests/legacy.rs
@@ -56,6 +56,7 @@ fn para_tet_test() -> Result {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("vtk output"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
0.0f64, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0,
@@ -93,6 +94,7 @@ fn para_tets_test() -> Result {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("vtk output"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
13.2, 135.4, -7.7, 13.7, 134.2, -8.7, 12.2, 134.7, -8.6, 12.7, 133.6, -7.0, 3.6,
@@ -165,6 +167,7 @@ fn tet_test() -> Result {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("Tetrahedron example"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
0.0f32, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0,
@@ -197,6 +200,7 @@ fn tri_test() -> Result {
version: Version::new((2, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("Triangle example"),
+ file_path: None,
data: DataSet::inline(PolyDataPiece {
points: vec![0.0f32, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0].into(),
polys: Some(VertexNumbers::Legacy {
@@ -222,6 +226,7 @@ fn tri_attrib_ascii_test() -> Result {
version: Version::new((2, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("Triangle example"),
+ file_path: None,
data: DataSet::inline(PolyDataPiece {
points: vec![0.0f32, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0].into(),
polys: Some(VertexNumbers::Legacy {
@@ -265,6 +270,7 @@ fn tri_attrib_binary_test() -> Result {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("Triangle example"),
+ file_path: None,
data: DataSet::inline(PolyDataPiece {
points: vec![0.0f32, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, -1.0].into(),
polys: Some(VertexNumbers::Legacy {
@@ -308,6 +314,7 @@ fn square_test() -> Result {
version: Version::new((2, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("Square example"),
+ file_path: None,
data: DataSet::inline(PolyDataPiece {
points: vec![
0.0f32, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, -1.0, 0.0, 0.0, -1.0,
@@ -336,6 +343,7 @@ fn cube_test() -> Result {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("Cube example"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, 1.0, 0.0, 0.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0,
@@ -367,6 +375,7 @@ fn structured_grid_test() -> Result {
version: Version::new((3, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("vtk output"),
+ file_path: None,
data: DataSet::inline(StructuredGridPiece {
extent: Extent::Dims([2, 2, 2]),
points: vec![
@@ -423,6 +432,7 @@ fn rectilinear_grid_test() -> Result {
version: Version::new((3, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("vtk output"),
+ file_path: None,
data: DataSet::inline(RectilinearGridPiece {
extent: Extent::Dims([3, 4, 1]),
coords: Coordinates {
@@ -467,6 +477,7 @@ fn field_test() -> Result {
version: Version::new((2, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("field example"),
+ file_path: None,
data: DataSet::Field {
name: String::from("FieldData"),
data_array: vec![
@@ -567,6 +578,7 @@ fn cube_complex_test() -> Result {
version: Version::new((2, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("Cube example"),
+ file_path: None,
data: DataSet::inline(PolyDataPiece {
points: points.clone(),
polys: polys.clone(),
@@ -650,6 +662,7 @@ fn unstructured_grid_complex_test() -> Result {
version: Version::new((2, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("Unstructured Grid Example"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
0.0f32, 0., 0., 1., 0., 0., 2., 0., 0., 0., 1., 0., 1., 1., 0., 2., 1., 0., 0., 0.,
@@ -737,6 +750,7 @@ fn volume_complex_test() -> Result {
version: Version::new((2, 0)),
byte_order: ByteOrder::BigEndian,
title: String::from("Volume example"),
+ file_path: None,
data: DataSet::inline(ImageDataPiece {
extent: Extent::Dims([3, 4, 6]),
data: Attributes {
@@ -779,6 +793,7 @@ fn dodecagon_test() -> Result {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("Dodecagon example"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
0.5f32,
@@ -845,6 +860,7 @@ fn dodecagon_with_meta_test() {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("Dodecagon example"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
0.5f32,
@@ -906,6 +922,7 @@ fn binary_dodecagon_test() {
version: Version::new((4, 2)),
byte_order: ByteOrder::BigEndian,
title: String::from("Dodecagon example"),
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: vec![
0.5f32,
diff --git a/tests/xml.rs b/tests/xml.rs
index 756e58b..413647c 100644
--- a/tests/xml.rs
+++ b/tests/xml.rs
@@ -1,3 +1,4 @@
+#![cfg(feature = "xml")]
use std::io::BufReader;
use vtkio::{import, model::*, parse_xml, Error};
@@ -8,6 +9,7 @@ fn make_box_vtu() -> Vtk {
version: Version { major: 4, minor: 2 },
title: String::new(),
byte_order: ByteOrder::BigEndian,
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: IOBuffer::F64(vec![
0.5208333134651184,
@@ -91,7 +93,8 @@ fn box_parse_xml() -> Result {
#[test]
fn box_import() -> Result {
- let vtk = import("./assets/box.vtu")?;
+ let mut vtk = import("./assets/box.vtu")?;
+ vtk.file_path = None; // erase file path before comparison.
assert_eq!(vtk, make_box_vtu());
Ok(())
}
@@ -101,6 +104,7 @@ fn make_box_para_vtu() -> Vtk {
version: Version { major: 1, minor: 0 },
title: String::new(),
byte_order: ByteOrder::LittleEndian,
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
points: IOBuffer::F64(vec![
0.5208333134651184,
@@ -179,6 +183,7 @@ fn make_hexahedron_vtu() -> Vtk {
version: Version { major: 1, minor: 0 },
title: String::new(),
byte_order: ByteOrder::LittleEndian,
+ file_path: None,
data: DataSet::inline(UnstructuredGridPiece {
#[rustfmt::skip]
points: IOBuffer::F32(vec![
@@ -208,7 +213,63 @@ fn make_hexahedron_vtu() -> Vtk {
#[test]
fn hexahedron_appended() -> Result {
- let vtu = import("./assets/hexahedron.vtu")?;
+ let mut vtu = import("./assets/hexahedron.vtu")?;
+ vtu.file_path = None;
+ assert_eq!(vtu, make_hexahedron_vtu());
+ Ok(())
+}
+
+#[test]
+fn hexahedron_pvtu() -> Result {
+ let mut vtu = import("./assets/hexahedron_parallel.pvtu")?;
+ vtu.load_all_pieces().unwrap();
+ vtu.file_path = None;
+ assert_eq!(vtu, make_hexahedron_vtu());
+ Ok(())
+}
+
+#[test]
+fn hexahedron_lzma_pvtu() -> Result {
+ let mut vtu = import("./assets/hexahedron_parallel_lzma.pvtu")?;
+ vtu.load_all_pieces().unwrap();
+ vtu.file_path = None;
+ assert_eq!(vtu, make_hexahedron_vtu());
+ Ok(())
+}
+
+#[test]
+fn hexahedron_zlib() -> Result {
+ let mut vtu = import("./assets/hexahedron_zlib.vtu")?;
+ vtu.load_all_pieces().unwrap();
+ vtu.file_path = None;
+ assert_eq!(vtu, make_hexahedron_vtu());
+ Ok(())
+}
+
+// TODO: Will not work until https://github.com/tafia/quick-xml/pull/253 is merged.
+//#[test]
+//fn hexahedron_zlib_binary() -> Result {
+// let mut vtu = import("./assets/hexahedron_zlib_binary.vtu")?;
+// vtu.load_all_pieces().unwrap();
+// vtu.file_path = None;
+// assert_eq!(vtu, make_hexahedron_vtu());
+// Ok(())
+//}
+
+#[test]
+fn hexahedron_lz4() -> Result {
+ let mut vtu = import("./assets/hexahedron_lz4.vtu")?;
+ vtu.load_all_pieces().unwrap();
+ vtu.file_path = None;
+ assert_eq!(vtu, make_hexahedron_vtu());
+ Ok(())
+}
+
+#[test]
+fn hexahedron_binary() -> Result {
+ let mut vtu = import("./assets/hexahedron_binary.vtu")?;
+ vtu.load_all_pieces().unwrap();
+ vtu.file_path = None;
assert_eq!(vtu, make_hexahedron_vtu());
Ok(())
}