Skip to content
This repository has been archived by the owner on May 3, 2023. It is now read-only.

Commit

Permalink
Merge pull request #47 from mbrt/fix_incomplete_sig
Browse files Browse the repository at this point in the history
Fix incomplete sig
  • Loading branch information
mbrt committed Jan 9, 2016
2 parents 07e57c9 + 37187b3 commit 6df8e80
Show file tree
Hide file tree
Showing 17 changed files with 184 additions and 31 deletions.
6 changes: 6 additions & 0 deletions Docs.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,12 @@ To list all the backup snapshots contained in a directory:
duplicity collection-status --no-encryption file://<absolute-path-of-backup>
```

If you don't have enough space on `/tmp` directory mount it to a more capable drive:

```
sudo mount -o bind /path/to/hd /tmp
```

### Time formats

duplicity uses time strings in two places. Firstly, many of the files duplicity creates will have the time in their filenames in the w3 datetime format as described in a [w3 note](http://www.w3.org/TR/NOTE-datetime). Basically they look like "2001-07-15T04:09:38-07:00", which means what it looks like. The "-07:00" section means the time zone is 7 hours behind UTC.
Expand Down
34 changes: 34 additions & 0 deletions src/backend/local.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,3 +76,37 @@ impl Iterator for FileNameIterator {
None
}
}


#[cfg(test)]
mod test {
use super::*;
use backend::Backend;

#[test]
fn multi_chain_files() {
let backend = LocalBackend::new("tests/backups/multi_chain");
let files = backend.file_names().unwrap().collect::<Vec<_>>();
let actual = {
let mut r = files.iter()
.map(|p| p.to_str().unwrap())
.filter(|p| p.starts_with("duplicity-"))
.collect::<Vec<_>>();
r.sort();
r
};
let expected = vec!["duplicity-full-signatures.20160108T223144Z.sigtar.gz",
"duplicity-full-signatures.20160108T223209Z.sigtar.gz",
"duplicity-full.20160108T223144Z.manifest",
"duplicity-full.20160108T223144Z.vol1.difftar.gz",
"duplicity-full.20160108T223209Z.manifest",
"duplicity-full.20160108T223209Z.vol1.difftar.gz",
"duplicity-inc.20160108T223144Z.to.20160108T223159Z.manifest",
"duplicity-inc.20160108T223144Z.to.20160108T223159Z.vol1.difftar.gz",
"duplicity-inc.20160108T223209Z.to.20160108T223217Z.manifest",
"duplicity-inc.20160108T223209Z.to.20160108T223217Z.vol1.difftar.gz",
"duplicity-new-signatures.20160108T223144Z.to.20160108T223159Z.sigtar.gz",
"duplicity-new-signatures.20160108T223209Z.to.20160108T223217Z.sigtar.gz"];
assert_eq!(actual, expected);
}
}
4 changes: 4 additions & 0 deletions src/collections/file_naming.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,10 @@ impl<'a> FileNameInfo<'a> {
info: info,
}
}

pub fn start_time(&self) -> Timespec {
self.info.tp.time_range().0
}
}

impl Type {
Expand Down
58 changes: 45 additions & 13 deletions src/collections/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,7 @@ impl SignatureChain {
/// Adds the given incremental signature to the signature chain if possible,
/// returns false otherwise.
pub fn add_new_sig(&mut self, fname: &FileNameInfo) -> bool {
if let fnm::Type::NewSig{ .. } = fname.info.tp {
if matches!(fname.info.tp, fnm::Type::NewSig{ .. }) {
self.incsigs.push(SignatureFile::from_filename_info(fname));
true
} else {
Expand Down Expand Up @@ -588,28 +588,31 @@ fn compute_backup_sets(fname_infos: &[FileNameInfo]) -> Vec<BackupSet> {
}

fn compute_signature_chains(fname_infos: &[FileNameInfo]) -> Vec<SignatureChain> {
// create a new signature chain for each fill signature
// collect full signatures, sort them by start time and make the chains from them
let mut sig_chains = fname_infos.iter()
.filter(|f| matches!(f.info.tp, fnm::Type::FullSig{..}))
.map(|f| SignatureChain::from_filename_info(f))
.map(SignatureChain::from_filename_info)
.collect::<Vec<_>>();
// and collect all the new signatures, sorted by start time
let mut new_sig = fname_infos.iter()
.filter(|f| matches!(f.info.tp, fnm::Type::NewSig{..}))
.collect::<Vec<_>>();
new_sig.sort_by(|a, b| a.info.tp.time_range().0.cmp(&b.info.tp.time_range().0));

// add the new signatures to signature chains
for sig in new_sig.into_iter() {
sig_chains.sort_by(|a, b| a.start_time().cmp(&b.start_time()));
// collect inc signatures and sort them by start time
let inc_sigs = {
let mut is = fname_infos.iter()
.filter(|f| matches!(f.info.tp, fnm::Type::NewSig{..}))
.collect::<Vec<_>>();
is.sort_by(|a, b| a.start_time().cmp(&b.start_time()));
is
};
// add inc signatures to chains
for inc in inc_sigs {
let mut added = false;
for chain in &mut sig_chains {
if chain.add_new_sig(&sig) {
if chain.end_time() == inc.start_time() && chain.add_new_sig(inc) {
added = true;
break;
}
}
if !added {
// TODO: add to orphaned filenames
// TODO: add to orphaned incremental signatures
}
}
sig_chains
Expand Down Expand Up @@ -731,4 +734,33 @@ mod test {
assert_eq!(inc.end_time(), parse_time_str("20150617t182650z").unwrap());
}
}

#[test]
fn multi_chain() {
let fnames = vec!["duplicity-full.20160108T223144Z.manifest",
"duplicity-full.20160108T223144Z.vol1.difftar.gz",
"duplicity-full.20160108T223209Z.manifest",
"duplicity-full.20160108T223209Z.vol1.difftar.gz",
"duplicity-full-signatures.20160108T223144Z.sigtar.gz",
"duplicity-full-signatures.20160108T223209Z.sigtar.gz",
"duplicity-inc.20160108T223144Z.to.20160108T223159Z.manifest",
"duplicity-inc.20160108T223144Z.to.20160108T223159Z.vol1.difftar.gz",
"duplicity-inc.20160108T223209Z.to.20160108T223217Z.manifest",
"duplicity-inc.20160108T223209Z.to.20160108T223217Z.vol1.difftar.gz",
"duplicity-new-signatures.20160108T223144Z.to.20160108T223159Z.sigtar.gz",
"duplicity-new-signatures.20160108T223209Z.to.20160108T223217Z.sigtar.gz"];
let collection = Collections::from_filenames(&fnames);
assert_eq!(collection.backup_chains().count(), 2);
assert_eq!(collection.signature_chains().count(), 2);
// first chain
let chain = collection.backup_chains().nth(0).unwrap();
assert_eq!(chain.inc_sets().count(), 1);
let chain = collection.signature_chains().nth(0).unwrap();
assert_eq!(chain.inc_signatures().count(), 1);
// second chain
let chain = collection.backup_chains().nth(1).unwrap();
assert_eq!(chain.inc_sets().count(), 1);
let chain = collection.signature_chains().nth(1).unwrap();
assert_eq!(chain.inc_signatures().count(), 1);
}
}
89 changes: 71 additions & 18 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ impl<'a> Snapshot<'a> {
sig_id: self.sig_id,
})
} else {
Err(io::Error::new(io::ErrorKind::NotFound, "The signature chain is incomplete"))
Err(not_found("The signature chain is incomplete"))
}
}
}
Expand Down Expand Up @@ -284,9 +284,8 @@ impl<B: Backend> ResourceCache for Backup<B> {
let new_sig = try!(Chain::from_sigchain(sigchain, &self.backend));
*sig = Some(new_sig);
} else {
return Err(io::Error::new(io::ErrorKind::NotFound,
"The given backup snapshot does not have a \
corresponding signature"));
return Err(not_found("The given backup snapshot does not have a \
corresponding signature"));
}
}
}
Expand All @@ -298,15 +297,20 @@ impl<B: Backend> ResourceCache for Backup<B> {
}


fn not_found(msg: &str) -> io::Error {
io::Error::new(io::ErrorKind::NotFound, msg)
}


#[cfg(test)]
mod test {
use super::*;
use backend::local::LocalBackend;
use collections::{BackupSet, Collections};
use signatures::{Chain, Entry};
use time_utils::parse_time_str;

use std::path::PathBuf;

use std::path::{Path, PathBuf};
use time::Timespec;


Expand Down Expand Up @@ -334,6 +338,19 @@ mod test {
gname: file.groupname().unwrap().to_owned(),
}
}

pub fn from_info(path: &str,
mtime: &str,
uname: &str,
gname: &str)
-> Self {
EntryTest {
path: Path::new(path).to_path_buf(),
mtime: parse_time_str(mtime).unwrap(),
uname: uname.to_owned(),
gname: gname.to_owned(),
}
}
}

fn from_backup_set(set: &BackupSet, full: bool) -> SnapshotTest {
Expand Down Expand Up @@ -387,9 +404,23 @@ mod test {
.collect::<Vec<_>>()
}

fn from_backup<B: Backend>(backup: &Backup<B>) -> Vec<Vec<EntryTest>> {
backup.snapshots()
.unwrap()
.map(|s| {
s.entries()
.unwrap()
.as_signature()
.map(|f| EntryTest::from_entry(&f))
.filter(|f| f.path.to_str().is_some())
.collect::<Vec<_>>()
})
.collect::<Vec<_>>()
}


#[test]
fn same_collections() {
fn same_collections_single_vol() {
let backend = LocalBackend::new("tests/backups/single_vol");
let filenames = backend.file_names().unwrap();
let coll = Collections::from_filenames(filenames);
Expand All @@ -400,24 +431,46 @@ mod test {
assert_eq!(actual, expected);
}

#[test]
fn same_collections_multi_chain() {
let backend = LocalBackend::new("tests/backups/multi_chain");
let filenames = backend.file_names().unwrap();
let coll = Collections::from_filenames(filenames);
let backup = Backup::new(backend).unwrap();

let expected = from_collection(&coll);
let actual = to_test_snapshot(&backup);
assert_eq!(actual, expected);
}

#[test]
fn same_files() {
let sigchain = single_vol_signature_chain();
let expected = from_sigchain(&sigchain);

let backend = LocalBackend::new("tests/backups/single_vol");
let backup = Backup::new(backend).unwrap();
let actual = backup.snapshots()
.unwrap()
.map(|s| {
s.entries()
.unwrap()
.as_signature()
.map(|f| EntryTest::from_entry(&f))
.filter(|f| f.path.to_str().is_some())
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
let actual = from_backup(&backup);
assert_eq!(actual, expected);
}

#[test]
fn multi_chain_files() {
let backend = LocalBackend::new("tests/backups/multi_chain");
let backup = Backup::new(backend).unwrap();
let actual = from_backup(&backup);
let expected = vec![vec![make_entry_test("", "20160108t223141z"),
make_entry_test("file", "20160108t222924z")],
vec![make_entry_test("", "20160108t223153z"),
make_entry_test("file", "20160108t223153z")],
vec![make_entry_test("", "20160108t223206z"),
make_entry_test("file", "20160108t223206z")],
vec![make_entry_test("", "20160108t223215z"),
make_entry_test("file", "20160108t223215z")]];
assert_eq!(actual, expected);

fn make_entry_test(path: &str, mtime: &str) -> EntryTest {
EntryTest::from_info(path, mtime, "michele", "michele")
}
}
}
Binary file not shown.
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Hostname dellxps
Localdir .
Volume 1:
StartingPath .
EndingPath file
Hash SHA1 94bdf3c601d9e1569679c1717c0136c27c1d0493
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Hostname dellxps
Localdir .
Volume 1:
StartingPath .
EndingPath file
Hash SHA1 ae84b5357f21818de93104e513adba9dbd492c17
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Hostname dellxps
Localdir .
Volume 1:
StartingPath .
EndingPath file
Hash SHA1 fa30f19cdbc1df480bd7fe8e607bcd7c64cfefc9
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Hostname dellxps
Localdir .
Volume 1:
StartingPath .
EndingPath file
Hash SHA1 a68ccd119638628c16640d91e768fb8bad1aef17
Binary file not shown.
Binary file not shown.
Binary file not shown.

0 comments on commit 6df8e80

Please sign in to comment.