Skip to content

Commit

Permalink
Released v0.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
Dennis Schwerdel committed Apr 27, 2017
1 parent c2bf23d commit 737f78a
Show file tree
Hide file tree
Showing 15 changed files with 132 additions and 86 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
This project follows [semantic versioning](http://semver.org).


### UNRELEASED
### v0.3.0 (2017-04-27)
* [added] Ability to read/write tar file from/to stdin/stdout
* [added] Added date to bundles
* [added] Option to combine small bundles
* [added] Fixed chunker
* [modified] Logging to stderr
* [modified] Enforce deterministic bundle ordering
* [modified] More info in analyze subcommand
Expand Down
130 changes: 77 additions & 53 deletions Cargo.lock

Large diffs are not rendered by default.

13 changes: 7 additions & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
[package]
name = "zvault"
version = "0.2.0"
version = "0.3.0"
authors = ["Dennis Schwerdel <[email protected]>"]
description = "Deduplicating backup tool"

[dependencies]
serde = "0.9"
rmp-serde = "0.12"
serde_yaml = "0.6"
serde_utils = "0.5.2"
serde = "1.0"
rmp-serde = "0.13"
serde_yaml = "0.7"
serde_utils = "0.6"
serde_bytes = "0.10"
squash-sys = "0.9"
quick-error = "1.1"
blake2-rfc = "0.2"
Expand All @@ -30,7 +31,7 @@ crossbeam = "0.2"
pbr = "1.0"
users = "0.5"
time = "*"
libc = "*"
libc = "0.2"
index = {path="index"}
chunking = {path="chunking"}

Expand Down
19 changes: 19 additions & 0 deletions deb/zvault/debian/changelog
Original file line number Diff line number Diff line change
@@ -1,3 +1,22 @@
zvault (0.3.0) stable; urgency=medium

* [added] Ability to read/write tar file from/to stdin/stdout
* [added] Added date to bundles
* [added] Option to combine small bundles
* [added] Fixed chunker
* [modified] Logging to stderr
* [modified] Enforce deterministic bundle ordering
* [modified] More info in analyze subcommand
* [modified] Estimating final bundle size in order to reach it
* [fixed] Only print "repairing bundles" if actually repairing bundles
* [fixed] Only put mode bits of st_mode into metadata
* [fixed] Only repairing backups with --repair
* [fixed] Fixed vacuum
* [fixed] First removing bundles, then adding new ones
* [fixed] No longer clobbering broken files

-- Dennis Schwerdel <[email protected]> Thu, 27 Apr 2017 13:34:34 +0200

zvault (0.2.0) stable; urgency=medium

* [added] Added CHANGELOG
Expand Down
6 changes: 3 additions & 3 deletions src/bundledb/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -255,13 +255,13 @@ impl BundleDb {
let id = bundle.id();
let (folder, filename) = self.layout.local_bundle_path(&id, self.local_bundles.len());
try!(fs::create_dir_all(&folder).context(&folder as &Path));
let bundle = try!(bundle.copy_to(&self.layout.base_path(), folder.join(filename)));
let bundle = try!(bundle.copy_to(self.layout.base_path(), folder.join(filename)));
self.local_bundles.insert(id, bundle);
Ok(())
}

pub fn add_bundle(&mut self, bundle: BundleWriter) -> Result<BundleInfo, BundleDbError> {
let mut bundle = try!(bundle.finish(&self));
let mut bundle = try!(bundle.finish(self));
if bundle.info.mode == BundleMode::Meta {
try!(self.copy_remote_bundle_to_cache(&bundle))
}
Expand All @@ -288,7 +288,7 @@ impl BundleDb {
}

pub fn get_chunk_list(&self, bundle: &BundleId) -> Result<ChunkList, BundleDbError> {
let mut bundle = try!(self.get_stored_bundle(bundle).and_then(|stored| self.get_bundle(&stored)));
let mut bundle = try!(self.get_stored_bundle(bundle).and_then(|stored| self.get_bundle(stored)));
Ok(try!(bundle.get_chunk_list()).clone())
}

Expand Down
4 changes: 2 additions & 2 deletions src/bundledb/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ impl Serialize for BundleId {
}
}

impl Deserialize for BundleId {
impl<'a> Deserialize<'a> for BundleId {
#[inline]
fn deserialize<D: serde::Deserializer>(de: D) -> Result<Self, D::Error> {
fn deserialize<D: serde::Deserializer<'a>>(de: D) -> Result<Self, D::Error> {
let hash = try!(Hash::deserialize(de));
Ok(BundleId(hash))
}
Expand Down
1 change: 1 addition & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#![recursion_limit="128"]
#![allow(unknown_lints, float_cmp)]
extern crate serde;
extern crate serde_bytes;
extern crate rmp_serde;
#[macro_use] extern crate serde_utils;
extern crate squash_sys as squash;
Expand Down
2 changes: 1 addition & 1 deletion src/repository/backup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ impl Repository {
if inode.file_type == FileType::Directory {
let path = path.join(inode.name);
for chunks in inode.children.unwrap().values() {
let inode = try!(self.get_inode(&chunks));
let inode = try!(self.get_inode(chunks));
queue.push_back((path.clone(), inode));
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/repository/basic_io.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ impl Repository {

pub fn get_stream<W: Write>(&mut self, chunks: &[Chunk], w: &mut W) -> Result<(), RepositoryError> {
for &(ref hash, len) in chunks {
let data = try!(try!(self.get_chunk(*hash)).ok_or_else(|| IntegrityError::MissingChunk(hash.clone())));
let data = try!(try!(self.get_chunk(*hash)).ok_or_else(|| IntegrityError::MissingChunk(*hash)));
debug_assert_eq!(data.len() as u32, len);
try!(w.write_all(&data));
}
Expand Down
14 changes: 7 additions & 7 deletions src/repository/integrity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ impl Repository {
},
Some(FileData::ChunkedIndirect(ref chunks)) => {
if try!(self.check_chunks(checked, chunks, true)) {
let chunk_data = try!(self.get_data(&chunks));
let chunk_data = try!(self.get_data(chunks));
let chunks = ChunkList::read_from(&chunk_data);
try!(self.check_chunks(checked, &chunks, true));
}
Expand Down Expand Up @@ -191,12 +191,12 @@ impl Repository {
try!(self.flush());
backup.root = chunks;
backup.modified = true;
try!(self.evacuate_broken_backup(&name));
try!(self.save_backup(&backup, &name));
try!(self.evacuate_broken_backup(name));
try!(self.save_backup(backup, name));
},
Err(err) => if repair {
warn!("The root of the backup {} has been corrupted\n\tcaused by: {}", name, err);
try!(self.evacuate_broken_backup(&name));
try!(self.evacuate_broken_backup(name));
} else {
return Err(err)
}
Expand All @@ -213,7 +213,7 @@ impl Repository {
};
info!("Checking inode...");
let mut checked = Bitmap::new(self.index.capacity());
let mut inodes = try!(self.get_backup_path(&backup, path));
let mut inodes = try!(self.get_backup_path(backup, path));
let mut inode = inodes.pop().unwrap();
let mut modified = false;
if let Err(err) = self.check_inode_contents(&inode, &mut checked) {
Expand Down Expand Up @@ -260,8 +260,8 @@ impl Repository {
try!(self.flush());
backup.root = chunks;
backup.modified = true;
try!(self.evacuate_broken_backup(&name));
try!(self.save_backup(&backup, &name));
try!(self.evacuate_broken_backup(name));
try!(self.save_backup(backup, name));
}
Ok(())
}
Expand Down
4 changes: 2 additions & 2 deletions src/repository/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ impl Inode {

#[inline]
pub fn decode(data: &[u8]) -> Result<Self, InodeError> {
Ok(try!(msgpack::decode(&data)))
Ok(try!(msgpack::decode(data)))
}
}

Expand Down Expand Up @@ -299,7 +299,7 @@ impl Repository {
if let Some(ref contents) = inode.data {
match *contents {
FileData::Inline(ref data) => {
try!(file.write_all(&data));
try!(file.write_all(data));
},
FileData::ChunkedDirect(ref chunks) => {
try!(self.get_stream(chunks, &mut file));
Expand Down
6 changes: 3 additions & 3 deletions src/util/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::io::{self, Write, Read, Cursor};
use std::ops::{Deref, DerefMut};

use serde::{self, Serialize, Deserialize};
use serde::bytes::{Bytes, ByteBuf};
use serde_bytes::{Bytes, ByteBuf};
use serde::de::Error;

use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
Expand Down Expand Up @@ -118,9 +118,9 @@ impl Serialize for ChunkList {
}
}

impl Deserialize for ChunkList {
impl<'a> Deserialize<'a> for ChunkList {
#[inline]
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'a> {
let data: Vec<u8> = try!(ByteBuf::deserialize(deserializer)).into();
if data.len() % 20 != 0 {
return Err(D::Error::custom("Invalid chunk list length"));
Expand Down
2 changes: 1 addition & 1 deletion src/util/encryption.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::fs::{self, File};
use std::sync::{Once, ONCE_INIT};

use serde_yaml;
use serde::bytes::ByteBuf;
use serde_bytes::ByteBuf;

use sodiumoxide;
use sodiumoxide::crypto::sealedbox;
Expand Down
6 changes: 3 additions & 3 deletions src/util/hash.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use serde::{self, Serialize, Deserialize};
use serde::de::Error;
use serde::bytes::{ByteBuf, Bytes};
use serde_bytes::{ByteBuf, Bytes};

use murmurhash3::murmurhash3_x64_128;
use blake2::blake2b::blake2b;
Expand Down Expand Up @@ -80,8 +80,8 @@ impl Serialize for Hash {
}
}

impl Deserialize for Hash {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer {
impl<'a> Deserialize<'a> for Hash {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'a> {
let dat: Vec<u8> = try!(ByteBuf::deserialize(deserializer)).into();
if dat.len() != 16 {
return Err(D::Error::custom("Invalid key length"));
Expand Down
6 changes: 3 additions & 3 deletions src/util/msgpack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use serde::{Serialize, Deserialize};

use std::io::{Write, Read, Cursor};

pub use serde::bytes::ByteBuf as Bytes;
pub use serde_bytes::ByteBuf as Bytes;
pub use rmp_serde::decode::Error as DecodeError;
pub use rmp_serde::encode::Error as EncodeError;

Expand All @@ -25,14 +25,14 @@ pub fn encode_to_stream<T: Serialize>(t: &T, w: &mut Write) -> Result<(), Encode
}

#[inline]
pub fn decode<T: Deserialize>(data: &[u8]) -> Result<T, DecodeError> {
pub fn decode<'a, T: Deserialize<'a>>(data: &[u8]) -> Result<T, DecodeError> {
let data = Cursor::new(data);
let mut reader = rmp_serde::Deserializer::new(data);
T::deserialize(&mut reader)
}

#[inline]
pub fn decode_from_stream<T: Deserialize>(r: &mut Read) -> Result<T, DecodeError> {
pub fn decode_from_stream<'a, T: Deserialize<'a>>(r: &mut Read) -> Result<T, DecodeError> {
let mut reader = rmp_serde::Deserializer::new(r);
T::deserialize(&mut reader)
}

0 comments on commit 737f78a

Please sign in to comment.