Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions lang/rust/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions lang/rust/avro/examples/benchmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ use apache_avro::{
types::{Record, Value},
Reader, Writer,
};
use apache_avro_test_helper::TestResult;
use std::{
io::{BufReader, BufWriter},
time::{Duration, Instant},
Expand All @@ -45,7 +46,7 @@ fn benchmark(
big_or_small: &str,
count: usize,
runs: usize,
) -> anyhow::Result<()> {
) -> TestResult {
let mut records = Vec::new();
for __ in 0..count {
records.push(record.clone());
Expand Down Expand Up @@ -100,7 +101,7 @@ fn benchmark(
Ok(())
}

fn main() -> anyhow::Result<()> {
fn main() -> TestResult {
let raw_small_schema = r#"
{"namespace": "test", "type": "record", "name": "Test", "fields": [{"type": {"type": "string"}, "name": "field"}]}
"#;
Expand Down
5 changes: 3 additions & 2 deletions lang/rust/avro/examples/generate_interop_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ use apache_avro::{
types::{Record, Value},
Codec, Writer,
};
use apache_avro_test_helper::TestResult;
use std::{
collections::HashMap,
io::{BufWriter, Write},
Expand Down Expand Up @@ -74,7 +75,7 @@ fn create_datum(schema: &Schema) -> Record {
datum
}

fn main() -> anyhow::Result<()> {
fn main() -> TestResult {
let schema_str = std::fs::read_to_string("../../share/test/schemas/interop.avsc")
.expect("Unable to read the interop Avro schema");
let schema = Schema::parse_str(schema_str.as_str())?;
Expand Down Expand Up @@ -104,7 +105,7 @@ fn main() -> anyhow::Result<()> {
Ok(())
}

fn write_user_metadata<W: Write>(writer: &mut Writer<BufWriter<W>>) -> anyhow::Result<()> {
fn write_user_metadata<W: Write>(writer: &mut Writer<BufWriter<W>>) -> TestResult {
writer.add_user_metadata("user_metadata".to_string(), b"someByteArray")?;

Ok(())
Expand Down
3 changes: 2 additions & 1 deletion lang/rust/avro/examples/test_interop_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,14 @@
// under the License.

use apache_avro::Reader;
use apache_avro_test_helper::TestResult;
use std::{
collections::HashMap,
ffi::OsStr,
io::{BufReader, Read},
};

fn main() -> anyhow::Result<()> {
fn main() -> TestResult {
let mut expected_user_metadata: HashMap<String, Vec<u8>> = HashMap::new();
expected_user_metadata.insert("user_metadata".to_string(), b"someByteArray".to_vec());

Expand Down
35 changes: 19 additions & 16 deletions lang/rust/avro/src/codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,55 +186,58 @@ impl Codec {
mod tests {
use super::*;
use pretty_assertions::{assert_eq, assert_ne};
use apache_avro_test_helper::TestResult;

const INPUT: &[u8] = b"theanswertolifetheuniverseandeverythingis42theanswertolifetheuniverseandeverythingis4theanswertolifetheuniverseandeverythingis2";

#[test]
fn null_compress_and_decompress() {
fn null_compress_and_decompress() -> TestResult {
let codec = Codec::Null;
let mut stream = INPUT.to_vec();
codec.compress(&mut stream).unwrap();
codec.compress(&mut stream)?;
assert_eq!(INPUT, stream.as_slice());
codec.decompress(&mut stream).unwrap();
codec.decompress(&mut stream)?;
assert_eq!(INPUT, stream.as_slice());
Ok(())
}

#[test]
fn deflate_compress_and_decompress() {
compress_and_decompress(Codec::Deflate);
fn deflate_compress_and_decompress()-> TestResult {
compress_and_decompress(Codec::Deflate)
}

#[cfg(feature = "snappy")]
#[test]
fn snappy_compress_and_decompress() {
compress_and_decompress(Codec::Snappy);
fn snappy_compress_and_decompress()-> TestResult {
compress_and_decompress(Codec::Snappy)
}

#[cfg(feature = "zstandard")]
#[test]
fn zstd_compress_and_decompress() {
compress_and_decompress(Codec::Zstandard);
fn zstd_compress_and_decompress()-> TestResult {
compress_and_decompress(Codec::Zstandard)
}

#[cfg(feature = "bzip")]
#[test]
fn bzip_compress_and_decompress() {
compress_and_decompress(Codec::Bzip2);
fn bzip_compress_and_decompress()-> TestResult {
compress_and_decompress(Codec::Bzip2)
}

#[cfg(feature = "xz")]
#[test]
fn xz_compress_and_decompress() {
compress_and_decompress(Codec::Xz);
fn xz_compress_and_decompress()-> TestResult {
compress_and_decompress(Codec::Xz)
}

fn compress_and_decompress(codec: Codec) {
fn compress_and_decompress(codec: Codec) -> TestResult {
let mut stream = INPUT.to_vec();
codec.compress(&mut stream).unwrap();
codec.compress(&mut stream)?;
assert_ne!(INPUT, stream.as_slice());
assert!(INPUT.len() > stream.len());
codec.decompress(&mut stream).unwrap();
codec.decompress(&mut stream)?;
assert_eq!(INPUT, stream.as_slice());
Ok(())
}

#[test]
Expand Down
Loading