Commit 3482d0d1 authored by bescoto's avatar bescoto

Various modifications to backup, restore, and regress systems.

This version passes many tests but not all of them.  The backup patch
system was copied to restore.py.


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@281 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent 12a6f2db
......@@ -28,24 +28,22 @@ def Mirror(src_rpath, dest_rpath):
SourceS = src_rpath.conn.backup.SourceStruct
DestS = dest_rpath.conn.backup.DestinationStruct
DestS.init_statistics()
source_rpiter = SourceS.get_source_select()
dest_sigiter = DestS.process_source_get_sigs(dest_rpath, source_rpiter, 0)
DestS.set_rorp_cache(dest_rpath, source_rpiter, 0)
dest_sigiter = DestS.get_sigs()
source_diffiter = SourceS.get_diffs(dest_sigiter)
DestS.patch(dest_rpath, source_diffiter)
DestS.write_statistics()
def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath):
"""Mirror + put increments in tree based at inc_rpath"""
SourceS = src_rpath.conn.backup.SourceStruct
DestS = dest_rpath.conn.backup.DestinationStruct
DestS.init_statistics()
source_rpiter = SourceS.get_source_select()
dest_sigiter = DestS.process_source_get_sigs(dest_rpath, source_rpiter, 1)
DestS.set_rorp_cache(dest_rpath, source_rpiter, 1)
dest_sigiter = DestS.get_sigs()
source_diffiter = SourceS.get_diffs(dest_sigiter)
DestS.patch_and_increment(dest_rpath, source_diffiter, inc_rpath)
DestS.write_statistics()
class SourceStruct:
......@@ -99,14 +97,6 @@ static.MakeClass(SourceStruct)
class DestinationStruct:
"""Hold info used by destination side when backing up"""
def init_statistics(cls):
"""Set cls.stats to StatFileObj object"""
cls.statfileobj = statistics.init_statfileobj()
def write_statistics(cls):
"""Write statistics file"""
statistics.write_active_statfileobj()
def get_dest_select(cls, rpath, use_metadata = 1):
"""Return destination select rorpath iterator
......@@ -125,66 +115,175 @@ class DestinationStruct:
sel.parse_rbdir_exclude()
return sel.set_iter()
def dest_iter_filter(cls, dest_iter):
"""Destination rorps pass through this - record stats"""
for dest_rorp in dest_iter:
cls.statfileobj.add_dest_file(dest_rorp)
Hardlink.add_rorp(dest_rorp, source = 0)
yield dest_rorp
def src_iter_filter(cls, source_iter):
"""Source rorps pass through this - record stats, write metadata"""
metadata.OpenMetadata()
for src_rorp in source_iter:
cls.statfileobj.add_source_file(src_rorp)
Hardlink.add_rorp(src_rorp, source = 1)
metadata.WriteMetadata(src_rorp)
yield src_rorp
metadata.CloseMetadata()
def set_rorp_cache(cls, baserp, source_iter, for_increment):
"""Initialize cls.CCPP, the destination rorp cache
def process_source_get_sigs(cls, baserp, source_iter, for_increment):
"""Process the source rorpiter and return signatures of dest dir
Write all metadata to file, then return signatures of any
destination files that have changed. for_increment should be
true if we are mirror+incrementing, and false if we are just
mirroring.
for_increment should be true if we are mirror+incrementing,
false if we are just mirroring.
"""
source_iter = cls.src_iter_filter(source_iter)
dest_iter = cls.dest_iter_filter(cls.get_dest_select(baserp,
for_increment))
for index in rorpiter.get_dissimilar_indicies(source_iter, dest_iter,
cls.statfileobj):
dest_rp = baserp.new_index(index)
dest_sig = dest_rp.getRORPath()
if Globals.preserve_hardlinks and Hardlink.islinked(dest_rp):
dest_sig.flaglinked(Hardlink.get_link_index(dest_rp))
elif dest_rp.isreg():
dest_sig.setfile(Rdiff.get_signature(dest_rp))
dest_iter = cls.get_dest_select(baserp, for_increment)
collated = rorpiter.Collate2Iters(source_iter, dest_iter)
cls.CCPP = CacheCollatedPostProcess(collated,
Globals.pipeline_max_length*2)
def get_sigs(cls):
"""Yield signatures of any changed destination files"""
for src_rorp, dest_rorp in cls.CCPP:
if (src_rorp and dest_rorp and src_rorp == dest_rorp and
(not Globals.preserve_hardlinks or
Hardlink.rorp_eq(src_rorp, dest_rorp))): continue
index = src_rorp and src_rorp.index or dest_rorp.index
cls.CCPP.flag_changed(index)
if (Globals.preserve_hardlinks and
Hardlink.islinked(src_rorp or dest_rorp)):
dest_sig = rpath.RORPath(index)
dest_sig.flaglinked(Hardlink.get_link_index(dest_sig))
elif dest_rorp:
dest_sig = dest_rorp.getRORPath()
if dest_rorp.isreg():
dest_sig.setfile(Rdiff.get_signature(dest_rorp))
else: dest_sig = rpath.RORPath(index)
yield dest_sig
def patch(cls, dest_rpath, source_diffiter, start_index = ()):
"""Patch dest_rpath with an rorpiter of diffs"""
ITR = rorpiter.IterTreeReducer(PatchITRB, [dest_rpath])
ITR = rorpiter.IterTreeReducer(PatchITRB, [dest_rpath, cls.CCPP])
for diff in rorpiter.FillInIter(source_diffiter, dest_rpath):
log.Log("Processing changed file " + diff.get_indexpath(), 5)
ITR(diff.index, diff)
ITR.Finish()
cls.CCPP.close()
dest_rpath.setdata()
def patch_and_increment(cls, dest_rpath, source_diffiter, inc_rpath):
"""Patch dest_rpath with rorpiter of diffs and write increments"""
ITR = rorpiter.IterTreeReducer(IncrementITRB, [dest_rpath, inc_rpath])
ITR = rorpiter.IterTreeReducer(IncrementITRB,
[dest_rpath, inc_rpath, cls.CCPP])
for diff in rorpiter.FillInIter(source_diffiter, dest_rpath):
log.Log("Processing changed file " + diff.get_indexpath(), 5)
ITR(diff.index, diff)
ITR.Finish()
cls.CCPP.close()
dest_rpath.setdata()
static.MakeClass(DestinationStruct)
class CacheCollatedPostProcess:
"""Cache a collated iter of (source_rorp, dest_rp) pairs
This is necessary for two reasons:
1. The patch function may need the original source_rorp or
dest_rp information, which is not present in the diff it
receives.
2. The metadata must match what is stored in the destination
directory. If there is an error we do not update the dest
directory for that file, and the old metadata is used. Thus
we cannot write any metadata until we know the file has been
procesed correctly.
The class caches older source_rorps and dest_rps so the patch
function can retrieve them if necessary. The patch function can
also update the processed correctly flag. When an item falls out
of the cache, we assume it has been processed, and write the
metadata for it.
"""
def __init__(self, collated_iter, cache_size):
"""Initialize new CCWP."""
self.iter = collated_iter # generates (source_rorp, dest_rorp) pairs
self.cache_size = cache_size
self.statfileobj = statistics.init_statfileobj()
metadata.OpenMetadata()
# the following should map indicies to lists [source_rorp,
# dest_rorp, changed_flag, success_flag] where changed_flag
# should be true if the rorps are different, and success_flag
# should be true if dest_rorp has been successfully updated to
# source_rorp. They both default to false.
self.cache_dict = {}
self.cache_indicies = []
def __iter__(self): return self
def next(self):
"""Return next (source_rorp, dest_rorp) pair. StopIteration passed"""
source_rorp, dest_rorp = self.iter.next()
self.pre_process(source_rorp, dest_rorp)
index = source_rorp and source_rorp.index or dest_rorp.index
self.cache_dict[index] = [source_rorp, dest_rorp, 0, 0]
self.cache_indicies.append(index)
if len(self.cache_indicies) > self.cache_size: self.shorten_cache()
return source_rorp, dest_rorp
def pre_process(self, source_rorp, dest_rorp):
"""Do initial processing on source_rorp and dest_rorp
It will not be clear whether source_rorp and dest_rorp have
errors at this point, so don't do anything which assumes they
will be backed up correctly.
"""
if source_rorp: Hardlink.add_rorp(source_rorp, source = 1)
if dest_rorp: Hardlink.add_rorp(dest_rorp, source = 0)
def shorten_cache(self):
"""Remove one element from cache, possibly adding it to metadata"""
first_index = self.cache_indicies[0]
del self.cache_indicies[0]
old_source_rorp, old_dest_rorp, changed_flag, success_flag = \
self.cache_dict[first_index]
del self.cache_dict[first_index]
self.post_process(old_source_rorp, old_dest_rorp,
changed_flag, success_flag)
def post_process(self, source_rorp, dest_rorp, changed, success):
"""Post process source_rorp and dest_rorp.
changed will be true if the files have changed. success will
be true if the files have been successfully updated (this is
always false for un-changed files).
"""
if not changed or success:
self.statfileobj.add_source_file(source_rorp)
self.statfileobj.add_dest_file(dest_rorp)
if success:
self.statfileobj.add_changed(source_rorp, dest_rorp)
metadata_rorp = source_rorp
else:
metadata_rorp = dest_rorp
if changed: self.statfileobj.add_error()
if metadata_rorp and metadata_rorp.lstat():
metadata.WriteMetadata(metadata_rorp)
def flag_success(self, index):
"""Signal that the file with given index was updated successfully"""
self.cache_dict[index][3] = 1
def flag_changed(self, index):
"""Signal that the file with given index has changed"""
self.cache_dict[index][2] = 1
def get_rorps(self, index):
"""Retrieve (source_rorp, dest_rorp) from cache"""
return self.cache_dict[index][:2]
def get_source_rorp(self, index):
"""Retrieve source_rorp with given index from cache"""
return self.cache_dict[index][0]
def close(self):
"""Process the remaining elements in the cache"""
while self.cache_indicies: self.shorten_cache()
metadata.CloseMetadata()
statistics.write_active_statfileobj()
class PatchITRB(rorpiter.ITRBranch):
"""Patch an rpath with the given diff iters (use with IterTreeReducer)
......@@ -195,7 +294,7 @@ class PatchITRB(rorpiter.ITRBranch):
contents.
"""
def __init__(self, basis_root_rp):
def __init__(self, basis_root_rp, rorp_cache):
"""Set basis_root_rp, the base of the tree to be incremented"""
self.basis_root_rp = basis_root_rp
assert basis_root_rp.conn is Globals.local_connection
......@@ -267,6 +366,7 @@ class PatchITRB(rorpiter.ITRBranch):
else:
assert self.dir_replacement
self.base_rp.rmdir()
if self.dir_replacement.lstat():
rpath.rename(self.dir_replacement, self.base_rp)
......@@ -276,10 +376,10 @@ class IncrementITRB(PatchITRB):
Like PatchITRB, but this time also write increments.
"""
def __init__(self, basis_root_rp, inc_root_rp):
def __init__(self, basis_root_rp, inc_root_rp, rorp_cache):
self.inc_root_rp = inc_root_rp
self.cached_incrp = None
PatchITRB.__init__(self, basis_root_rp)
PatchITRB.__init__(self, basis_root_rp, rorp_cache)
def get_incrp(self, index):
"""Return inc RPath by adding index to self.basis_root_rp"""
......
......@@ -194,21 +194,21 @@ class ErrorLog:
created. See the error policy file for more info.
"""
log_fileobj = None
log_inc_rp = None
_log_fileobj = None
_log_inc_rp = None
def open(cls, compress = 1):
"""Open the error log, prepare for writing"""
assert not cls.log_fileobj and not cls.log_inc_rp, "log already open"
assert not cls._log_fileobj and not cls._log_inc_rp, "log already open"
if compress: typestr = 'data.gz'
else: typestr = 'data'
cls.log_inc_rp = Global.rbdir.append("error_log.%s.%s" %
cls._log_inc_rp = Global.rbdir.append("error_log.%s.%s" %
(Time.curtimestr, typestr))
assert not cls.log_inc_rp.lstat(), "Error file already exists"
cls.log_fileobj = cls.log_inc_rp.open("wb", compress = compress)
assert not cls._log_inc_rp.lstat(), "Error file already exists"
cls._log_fileobj = cls._log_inc_rp.open("wb", compress = compress)
def isopen(cls):
"""True if the error log file is currently open"""
return cls.log_fileobj is not None
return cls._log_fileobj is not None
def write(cls, error_type, rp, exc):
"""Add line to log file indicating error exc with file rp"""
......@@ -218,7 +218,7 @@ class ErrorLog:
else:
s = re.sub("\n", " ", s)
s += "\n"
cls.log_fileobj.write(s)
cls._log_fileobj.write(s)
def get_indexpath(cls, rp):
"""Return filename for logging. rp is a rpath, string, or tuple"""
......@@ -240,8 +240,8 @@ class ErrorLog:
def close(cls):
"""Close the error log file"""
assert not cls.log_fileobj.close()
cls.log_fileobj = cls.log_inc_rp = None
assert not cls._log_fileobj.close()
cls._log_fileobj = cls._log_inc_rp = None
static.MakeClass(ErrorLog)
......@@ -280,6 +280,7 @@ def WriteMetadata(rorp):
def CloseMetadata():
"""Close the metadata file"""
global metadata_rp, metadata_fileobj
assert metadata_fileobj, "Metadata file not open"
try: fileno = metadata_fileobj.fileno() # will not work if GzipFile
except AttributeError: fileno = metadata_fileobj.fileobj.fileno()
os.fsync(fileno)
......
......@@ -34,7 +34,7 @@ recovered.
"""
from __future__ import generators
import Globals, restore, log, rorpiter, journal, TempFile
import Globals, restore, log, rorpiter, journal, TempFile, metadata, rpath
# regress_time should be set to the time we want to regress back to
# (usually the time of the last successful backup)
......@@ -43,6 +43,10 @@ regress_time = None
# This should be set to the latest unsuccessful backup time
unsuccessful_backup_time = None
# This is set by certain tests and allows overriding of global time
# variables.
time_override_mode = None
class RegressException(Exception):
"""Raised on any exception in regress process"""
......@@ -64,6 +68,9 @@ def Regress(mirror_rp):
assert mirror_rp.conn is inc_rpath.conn is Globals.local_connection
set_regress_time()
set_restore_times()
ITR = rorpiter.IterTreeReducer(RegressITRB, [])
for rf in iterate_meta_rfs(mirror_rp, inc_rpath): ITR(rf.index, rf)
ITR.Finish()
def set_regress_time():
"""Set global regress_time to previous sucessful backup
......@@ -73,6 +80,10 @@ def set_regress_time():
"""
global regress_time, unsuccessful_backup_time
if time_override_mode:
assert regress_time and unsuccessful_backup_time
return
curmir_incs = restore.get_inclist(Globals.rbdir.append("current_mirror"))
assert len(curmir_incs) == 2, \
"Found %s current_mirror flags, expected 2" % len(curmir_incs)
......@@ -134,10 +145,10 @@ class RegressFile(restore.RestoreFile):
"""
def __init__(self, mirror_rp, inc_rp, inc_list):
restore.RestoreFile._init__(self, mirror_rp, inc_rp, inclist)
restore.RestoreFile.__init__(self, mirror_rp, inc_rp, inc_list)
assert len(self.relevant_incs) <= 2, "Too many incs"
if len(self.relevant_incs) == 2:
self.regress_inc = self.relevant.incs[-1]
self.regress_inc = self.relevant_incs[-1]
else: self.regress_inc = None
def set_metadata_rorp(self, metadata_rorp):
......@@ -178,7 +189,8 @@ class RegressITRB(rorpiter.ITRBranch):
def fast_process(self, index, rf):
"""Process when nothing is a directory"""
if not rpath.cmp_attribs(rf.metadata_rorp, rf.mirror_rp):
if (not rf.metadata_rorp.lstat() or not rf.mirror_rp.lstat() or
not rpath.cmp_attribs(rf.metadata_rorp, rf.mirror_rp)):
if rf.metadata_rorp.isreg(): self.restore_orig_regfile(rf)
else:
if rf.mirror_rp.lstat(): rf.mirror_rp.delete()
......@@ -242,8 +254,8 @@ class RegressITRB(rorpiter.ITRBranch):
"""
if args and args[0] and isinstance(args[0], tuple):
filename = os.path.join(*args[0])
elif self.index: filename = os.path.join(*self.index)
filename = "/".join(args[0])
elif self.index: filename = "/".join(*self.index)
else: filename = "."
log.Log("Error '%s' processing %s" % (exc, filename), 2)
raise RegressException("Error during Regress")
......@@ -22,7 +22,7 @@
from __future__ import generators
import tempfile, os
import Globals, Time, Rdiff, Hardlink, rorpiter, selection, rpath, \
log, backup, static, robust, metadata
log, static, robust, metadata, statistics, TempFile
# This should be set to selection.Select objects over the source and
......@@ -220,11 +220,18 @@ class TargetStruct:
def patch(cls, target, diff_iter):
"""Patch target with the diffs from the mirror side
This function was already written for use when backing up, so
just use that.
This function and the associated ITRB is similar to the
patching code in backup.py, but they have different error
correction requirements, so it seemed easier to just repeat it
all in this module.
"""
backup.DestinationStruct.patch(target, diff_iter)
ITR = rorpiter.IterTreeReducer(PatchITRB, [target])
for diff in rorpiter.FillInIter(diff_iter, target):
log.Log("Processing changed file " + diff.get_indexpath(), 5)
ITR(diff.index, diff)
ITR.Finish()
target.setdata()
static.MakeClass(TargetStruct)
......@@ -279,6 +286,7 @@ class CachedRF:
assert new_rfs, "No RFs added for index %s" % index
self.rf_list[0:0] = new_rfs
class RestoreFile:
"""Hold data about a single mirror file and its related increments
......@@ -390,8 +398,12 @@ class RestoreFile:
def yield_sub_rfs(self):
"""Return RestoreFiles under current RestoreFile (which is dir)"""
assert self.mirror_rp.isdir() or self.inc_rp.isdir()
if self.mirror_rp.isdir():
mirror_iter = self.yield_mirrorrps(self.mirror_rp)
else: mirror_iter = iter([])
if self.inc_rp.isdir():
inc_pair_iter = self.yield_inc_complexes(self.inc_rp)
else: inc_pair_iter = iter([])
collated = rorpiter.Collate2Iters(mirror_iter, inc_pair_iter)
for mirror_rp, inc_pair in collated:
......@@ -405,6 +417,7 @@ class RestoreFile:
def yield_mirrorrps(self, mirrorrp):
"""Yield mirrorrps underneath given mirrorrp"""
assert mirrorrp.isdir()
for filename in robust.listrp(mirrorrp):
rp = mirrorrp.append(filename)
if rp.index != ('rdiff-backup-data',): yield rp
......@@ -440,3 +453,92 @@ class RestoreFile:
keys = inc_dict.keys()
keys.sort()
for key in keys: yield inc_dict[key]
class PatchITRB(rorpiter.ITRBranch):
"""Patch an rpath with the given diff iters (use with IterTreeReducer)
The main complication here involves directories. We have to
finish processing the directory after what's in the directory, as
the directory may have inappropriate permissions to alter the
contents or the dir's mtime could change as we change the
contents.
This code was originally taken from backup.py. However, because
of different error correction requirements, it is repeated here.
"""
def __init__(self, basis_root_rp):
"""Set basis_root_rp, the base of the tree to be incremented"""
self.basis_root_rp = basis_root_rp
assert basis_root_rp.conn is Globals.local_connection
self.statfileobj = (statistics.get_active_statfileobj() or
statistics.StatFileObj())
self.dir_replacement, self.dir_update = None, None
self.cached_rp = None
def get_rp_from_root(self, index):
"""Return RPath by adding index to self.basis_root_rp"""
if not self.cached_rp or self.cached_rp.index != index:
self.cached_rp = self.basis_root_rp.new_index(index)
return self.cached_rp
def can_fast_process(self, index, diff_rorp):
"""True if diff_rorp and mirror are not directories"""
rp = self.get_rp_from_root(index)
return not diff_rorp.isdir() and not rp.isdir()
def fast_process(self, index, diff_rorp):
"""Patch base_rp with diff_rorp (case where neither is directory)"""
rp = self.get_rp_from_root(index)
tf = TempFile.new(rp)
self.patch_to_temp(rp, diff_rorp, tf)
rpath.rename(tf, rp)
def patch_to_temp(self, basis_rp, diff_rorp, new):
"""Patch basis_rp, writing output in new, which doesn't exist yet"""
if diff_rorp.isflaglinked():
Hardlink.link_rp(diff_rorp, new, self.basis_root_rp)
elif diff_rorp.get_attached_filetype() == 'snapshot':
rpath.copy(diff_rorp, new)
else:
assert diff_rorp.get_attached_filetype() == 'diff'
Rdiff.patch_local(basis_rp, diff_rorp, new)
if new.lstat(): rpath.copy_attribs(diff_rorp, new)
def start_process(self, index, diff_rorp):
"""Start processing directory - record information for later"""
base_rp = self.base_rp = self.get_rp_from_root(index)
assert diff_rorp.isdir() or base_rp.isdir() or not base_rp.index
if diff_rorp.isdir(): self.prepare_dir(diff_rorp, base_rp)
else: self.set_dir_replacement(diff_rorp, base_rp)
def set_dir_replacement(self, diff_rorp, base_rp):
"""Set self.dir_replacement, which holds data until done with dir
This is used when base_rp is a dir, and diff_rorp is not.
"""
assert diff_rorp.get_attached_filetype() == 'snapshot'
self.dir_replacement = TempFile.new(base_rp)
rpath.copy_with_attribs(diff_rorp, self.dir_replacement)
if base_rp.isdir(): base_rp.chmod(0700)
def prepare_dir(self, diff_rorp, base_rp):
"""Prepare base_rp to turn into a directory"""
self.dir_update = diff_rorp.getRORPath() # make copy in case changes
if not base_rp.isdir():
if base_rp.lstat(): base_rp.delete()
base_rp.mkdir()
base_rp.chmod(0700)
def end_process(self):
"""Finish processing directory"""
if self.dir_update:
assert self.base_rp.isdir()
rpath.copy_attribs(self.dir_update, self.base_rp)
else:
assert self.dir_replacement
self.base_rp.rmdir()
if self.dir_replacement.lstat():
rpath.rename(self.dir_replacement, self.base_rp)
......@@ -149,24 +149,6 @@ def Collate2Iters(riter1, riter2):
yield (None, relem2)
relem2 = None
def get_dissimilar_indicies(src_init_iter, dest_init_iter, statfileobj = None):
"""Get dissimilar indicies given two rorpiters
Returns an iterator which enumerates the indicies of the rorps
which are different on the source and destination ends. If
statfileobj is given, call add_changed on each pair of different
indicies.
"""
collated = Collate2Iters(src_init_iter, dest_init_iter)
for src_rorp, dest_rorp in collated:
if (src_rorp and dest_rorp and src_rorp == dest_rorp and
(not Globals.preserve_hardlinks or
Hardlink.rorp_eq(src_rorp, dest_rorp))): continue
if statfileobj: statfileobj.add_changed(src_rorp, dest_rorp)
if not dest_rorp: yield src_rorp.index
else: yield dest_rorp.index
class IndexedTuple(UserList.UserList):
"""Like a tuple, but has .index
......
......@@ -90,7 +90,7 @@ def copy(rpin, rpout, compress = 0):
if rpout.lstat():
if rpin.isreg() or not cmp(rpin, rpout):
rpout.delete() # easier to write that compare
rpout.delete() # easier to write than compare
else: return
if rpin.isreg(): copy_reg_file(rpin, rpout, compress)
......@@ -177,7 +177,7 @@ def cmp_attribs(rp1, rp2):
elif rp1.ischardev() and rp2.ischardev(): result = 1
else: result = (rp1.getmtime() == rp2.getmtime())
log.Log("Compare attribs of %s and %s: %s" %
(rp1.path, rp2.path, result), 7)
(rp1.get_indexpath(), rp2.get_indexpath(), result), 7)
return result
def copy_with_attribs(rpin, rpout, compress = 0):
......@@ -694,11 +694,7 @@ class RPath(RORPath):
def delete(self):
"""Delete file at self.path. Recursively deletes directories."""
log.Log("Deleting %s" % self.path, 7)
self.setdata()
if not self.lstat():
log.Log("Warning: %s does not exist---deleted in meantime?"
% (self.path,), 2)
elif self.isdir():
if self.isdir():
try: self.rmdir()
except os.error: shutil.rmtree(self.path)
else: self.conn.os.unlink(self.path)
......
......@@ -5,6 +5,7 @@ from rdiff_backup.rpath import RPath
from rdiff_backup import Globals, Hardlink, SetConnections, Main, \
selection, lazy, Time, rpath
RBBin = "../rdiff-backup"
SourceDir = "../rdiff_backup"
AbsCurdir = os.getcwd() # Absolute path name of current directory
AbsTFdir = AbsCurdir+"/testfiles"
......@@ -56,12 +57,14 @@ def rdiff_backup(source_local, dest_local, src_dir, dest_dir,
dest_dir = ("test2/tmp; ../../%s/rdiff-backup --server::../../%s" %
(SourceDir, dest_dir))
cmdargs = [SourceDir + "/rdiff-backup", extra_options]
cmdargs = [RBBin, extra_options]
if not (source_local and dest_local): cmdargs.append("--remote-schema %s")
if current_time: cmdargs.append("--current-time %s" % current_time)
os.system(" ".join(cmdargs))
cmdargs.extend([src_dir, dest_dir])
cmdline = " ".join(cmdargs)
print "Executing: ", cmdline
assert not os.system(cmdline)
def cmd_schemas2rps(schema_list, remote_schema):
"""Input list of file descriptions and the remote schema, return rps
......
"""regresstest - test the regress module. Not to be confused with the
regression tests."""
"""regresstest - test the regress module.
Not to be confused with the regression tests.
"""
import unittest
from commontest import *
from rdiff_backup import regress
Log.setverbosity(7)
class RegressTest(unittest.TestCase):
XXX
regress_rp1 = rpath.RPath(Globals.local_connection,
"testfiles/regress_output1")
regress_rp2 = rpath.RPath(Globals.local_connection,
"testfiles/regress_output2")
def make_output(self, level):
"""Set up two rdiff-backup destination dir of level and level+1
testfiles/regress_output1 will be a copy of
testfiles/increment1 through testfiles/increment{level}
testfiles/regress_output2 will have all everything backed up
in testfiles/regress_output1 + testfiles/increment{level+1}.
The time of each increment will be 10000*level.
"""
assert 1 <= level <= 3
if self.regress_rp1.lstat(): Myrm(self.regress_rp1.path)
if self.regress_rp2.lstat(): Myrm(self.regress_rp2.path)
# Make regress_output1
Log("Making %s" % (self.regress_rp1.path,), 4)
for i in range(1, level+1):
rdiff_backup(1, 1,
"testfiles/increment%s" % (i,),
self.regress_rp1.path,
current_time = 10000*i)
# Now make regress_output2
Log("Making %s" % (self.regress_rp2.path,), 4)
assert not os.system("cp -a %s %s" %
(self.regress_rp1.path, self.regress_rp2.path))
rdiff_backup(1, 1,
"testfiles/increment%s" % (level+1),
self.regress_rp2.path,
current_time = 10000*(level+1))
self.regress_rp1.setdata()
self.regress_rp2.setdata()
def test_full(self):
"""Test regressing a full directory to older state
Make two directories, one with one more backup in it. Then
regress the bigger one, and then make sure they compare the
same.
"""
for level in range(1, 4):
self.make_output(level)
regress.regress_time = 10000*level
regress.unsuccessful_backup_time = 10000*(level+1)
regress.time_override_mode = 1
Globals.rbdir = self.regress_rp2.append_path("rdiff-backup-data")
Log("######### Beginning regress ###########", 5)
regress.Regress(self.regress_rp2)
assert CompareRecursive(self.regress_rp1, self.regress_rp2,
exclude_rbdir = 0)
if __name__ == "__main__": unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment