Commit d7abfbe2 authored by bescoto's avatar bescoto

More changes in preparation for 0.11.2


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@284 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent a24ac864
...@@ -10,6 +10,38 @@ Fixed selection bug: In 0.11.1, files which were included in one ...@@ -10,6 +10,38 @@ Fixed selection bug: In 0.11.1, files which were included in one
backup would be automatically included in the next. Now you can backup would be automatically included in the next. Now you can
include/exclude files session-by-session. include/exclude files session-by-session.
Fixed ownership compare bug: In 0.11.1, backups where the destination
side was not root would preserve ownership information by recording it
in the metadata file. However, mere ownership changes would not
trigger creation of new increments. This has been fixed.
Added the --no-inode-compare switch. You probably don't need to use
it though.
If a special file cannot be created on the destination side, a 0
length regular file will be written instead as a placeholder.
(Restores should work fine because of the metadata file.)
Yet another error handling strategy (hopefully this is the last one
for a while, because this stuff isn't very exciting, and takes a long
time to write):
All recoverable errors are classified into one of three groups:
ListErrors, UpdateErrors, and SpecialFileErrors. rdiff-backup's
reaction to each error is more formally defined (see the error
policy page, currently at
http://rdiff-backup.stanford.edu/error_policy.html).
rdiff-backup makes no attempt to recover or clean up after
unrecoverable errors.
Improved error logging. Instead of the old haphazard reporting
method, which sometimes didn't indicate the file an error occurred on,
now all recoverable errors are reported in a standard format and also
written to the error_log.<time>.data file in the rdiff-backup-data
directory.
New in v0.11.1 (2002/12/31) New in v0.11.1 (2002/12/31)
--------------------------- ---------------------------
......
.TH RDIFF-BACKUP 1 "AUGUST 2001" "Version 0.2.1" "User Manuals" \" -*- nroff -*- .TH RDIFF-BACKUP 1 "AUGUST 2001" "Version 0.2.1" "User Manuals" \" -*- nroff -*-
.SH NAME .SH NAME
rdiff-backup \- local/remote mirror and incremental backup rdiff-backup \- local/remote mirror and incremental backup
...@@ -207,6 +205,14 @@ Do not create an rdiff-backup-data directory or make any increments. ...@@ -207,6 +205,14 @@ Do not create an rdiff-backup-data directory or make any increments.
In this mode rdiff-backup is similar to rsync (but usually In this mode rdiff-backup is similar to rsync (but usually
slower). slower).
.TP .TP
.B --no-compare-inode
This relative esoteric option prevents rdiff-backup from flagging a
file as changed when its inode changes. This option may be useful if
you are backing up two different directories to the same rdiff-backup
destination directory. The downside is that hard link information may
get messed up, as the metadata file may no longer have the correct
inode information.
.TP
.B --no-compression .B --no-compression
Disable the default gzip compression of most of the .snapshot and .diff Disable the default gzip compression of most of the .snapshot and .diff
increment files stored in the rdiff-backup-data directory. A backup increment files stored in the rdiff-backup-data directory. A backup
......
...@@ -40,7 +40,7 @@ conn_bufsize = 98304 ...@@ -40,7 +40,7 @@ conn_bufsize = 98304
# This is used in rorpiter.CacheIndexable. The number represents the # This is used in rorpiter.CacheIndexable. The number represents the
# number of rpaths which may be stuck in buffers when moving over a # number of rpaths which may be stuck in buffers when moving over a
# remote connection. # remote connection.
pipeline_max_length = int(conn_bufsize / 150) pipeline_max_length = int(conn_bufsize / 150)*2
# True if script is running as a server # True if script is running as a server
server = None server = None
...@@ -171,6 +171,10 @@ security_level = "all" ...@@ -171,6 +171,10 @@ security_level = "all"
# deal with paths inside of restrict_path. # deal with paths inside of restrict_path.
restrict_path = None restrict_path = None
# If set, a file will be marked as changed if its inode changes. See
# the man page under --no-compare-inode for more information.
compare_inode = 1
def get(name): def get(name):
"""Return the value of something in this module""" """Return the value of something in this module"""
......
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
from __future__ import generators from __future__ import generators
import getopt, sys, re, os import getopt, sys, re, os
from log import Log, LoggerError from log import Log, LoggerError, ErrorLog
import Globals, Time, SetConnections, selection, robust, rpath, \ import Globals, Time, SetConnections, selection, robust, rpath, \
manage, backup, connection, restore, FilenameMapping, \ manage, backup, connection, restore, FilenameMapping, \
Security, Hardlink, regress, C Security, Hardlink, regress, C
...@@ -51,7 +51,8 @@ def parse_cmdlineoptions(arglist): ...@@ -51,7 +51,8 @@ def parse_cmdlineoptions(arglist):
"exclude-regexp=", "exclude-special-files", "force", "exclude-regexp=", "exclude-special-files", "force",
"include=", "include-filelist=", "include-filelist-stdin", "include=", "include-filelist=", "include-filelist-stdin",
"include-globbing-filelist=", "include-regexp=", "include-globbing-filelist=", "include-regexp=",
"list-changed-since=", "list-increments", "no-compression", "list-changed-since=", "list-increments",
"no-compare-inode", "no-compression",
"no-compression-regexp=", "no-hard-links", "null-separator", "no-compression-regexp=", "no-hard-links", "null-separator",
"parsable-output", "print-statistics", "quoting-char=", "parsable-output", "print-statistics", "quoting-char=",
"remote-cmd=", "remote-schema=", "remove-older-than=", "remote-cmd=", "remote-schema=", "remove-older-than=",
...@@ -104,6 +105,7 @@ def parse_cmdlineoptions(arglist): ...@@ -104,6 +105,7 @@ def parse_cmdlineoptions(arglist):
restore_timestr, action = arg, "list-changed-since" restore_timestr, action = arg, "list-changed-since"
elif opt == "-l" or opt == "--list-increments": elif opt == "-l" or opt == "--list-increments":
action = "list-increments" action = "list-increments"
elif opt == "--no-compare-inode": Globals.set("compare_inode", 0)
elif opt == "--no-compression": Globals.set("compression", None) elif opt == "--no-compression": Globals.set("compression", None)
elif opt == "--no-compression-regexp": elif opt == "--no-compression-regexp":
Globals.set("no_compression_regexp_string", arg) Globals.set("no_compression_regexp_string", arg)
...@@ -219,6 +221,7 @@ def take_action(rps): ...@@ -219,6 +221,7 @@ def take_action(rps):
def cleanup(): def cleanup():
"""Do any last minute cleaning before exiting""" """Do any last minute cleaning before exiting"""
Log("Cleaning up", 6) Log("Cleaning up", 6)
if ErrorLog.isopen(): ErrorLog.close()
Log.close_logfile() Log.close_logfile()
if not Globals.server: SetConnections.CloseConnections() if not Globals.server: SetConnections.CloseConnections()
...@@ -296,6 +299,7 @@ option.""" % rpout.path) ...@@ -296,6 +299,7 @@ option.""" % rpout.path)
if not datadir.lstat(): datadir.mkdir() if not datadir.lstat(): datadir.mkdir()
if Log.verbosity > 0: if Log.verbosity > 0:
Log.open_logfile(datadir.append("backup.log")) Log.open_logfile(datadir.append("backup.log"))
ErrorLog.open(Time.curtimestr, compress = Globals.compression)
backup_warn_if_infinite_regress(rpin, rpout) backup_warn_if_infinite_regress(rpin, rpout)
def backup_warn_if_infinite_regress(rpin, rpout): def backup_warn_if_infinite_regress(rpin, rpout):
...@@ -517,10 +521,11 @@ def RemoveOlderThan(rootrp): ...@@ -517,10 +521,11 @@ def RemoveOlderThan(rootrp):
Log("Deleting increment(s) before %s" % timep, 4) Log("Deleting increment(s) before %s" % timep, 4)
times_in_secs = [inc.getinctime() for inc in times_in_secs = [inc.getinctime() for inc in
restore.get_inclist(datadir.append_path("increments"))] restore.get_inclist(Globals.rbdir.append_path("increments"))]
times_in_secs = filter(lambda t: t < time, times_in_secs) times_in_secs = filter(lambda t: t < time, times_in_secs)
if not times_in_secs: if not times_in_secs:
Log.FatalError("No increments older than %s found" % timep) Log.FatalError("No increments older than %s found, exiting."
% (timep,), 1)
times_in_secs.sort() times_in_secs.sort()
inc_pretty_time = "\n".join(map(Time.timetopretty, times_in_secs)) inc_pretty_time = "\n".join(map(Time.timetopretty, times_in_secs))
...@@ -532,7 +537,7 @@ def RemoveOlderThan(rootrp): ...@@ -532,7 +537,7 @@ def RemoveOlderThan(rootrp):
if len(times_in_secs) == 1: if len(times_in_secs) == 1:
Log("Deleting increment at time:\n" + inc_pretty_time, 3) Log("Deleting increment at time:\n" + inc_pretty_time, 3)
else: Log("Deleting increments at times:\n" + inc_pretty_time, 3) else: Log("Deleting increments at times:\n" + inc_pretty_time, 3)
manage.delete_earlier_than(datadir, time) manage.delete_earlier_than(Globals.rbdir, time)
def rom_check_dir(rootrp): def rom_check_dir(rootrp):
"""Check destination dir before RemoveOlderThan""" """Check destination dir before RemoveOlderThan"""
...@@ -571,7 +576,16 @@ def checkdest_need_check(dest_rp): ...@@ -571,7 +576,16 @@ def checkdest_need_check(dest_rp):
if not dest_rp.isdir() or not Globals.rbdir.isdir(): return None if not dest_rp.isdir() or not Globals.rbdir.isdir(): return None
curmirroot = Globals.rbdir.append("current_mirror") curmirroot = Globals.rbdir.append("current_mirror")
curmir_incs = restore.get_inclist(curmirroot) curmir_incs = restore.get_inclist(curmirroot)
if not curmir_incs: return None if not curmir_incs:
Log.FatalError(
"""Bad rdiff-backup-data dir on destination side
The rdiff-backup data directory
%s
exists, but we cannot find a valid current_mirror marker. You can
avoid this message by removing this directory; however any data in it
will be lost.
""" % (Globals.rbdir.path,))
elif len(curmir_incs) == 1: return 0 elif len(curmir_incs) == 1: return 0
else: else:
assert len(curmir_incs) == 2, "Found too many current_mirror incs!" assert len(curmir_incs) == 2, "Found too many current_mirror incs!"
......
...@@ -44,7 +44,7 @@ def open_dir_stats_file(): ...@@ -44,7 +44,7 @@ def open_dir_stats_file():
if Globals.compression: suffix = "data.gz" if Globals.compression: suffix = "data.gz"
else: suffix = "data" else: suffix = "data"
_dir_stats_rp = increment.get_inc( _dir_stats_rp = increment.get_inc(
Globals.rbdir.append("directory_statistics"), Time.curtime, suffix) Globals.rbdir.append("directory_statistics"), suffix, Time.curtime)
if _dir_stats_rp.lstat(): if _dir_stats_rp.lstat():
log.Log("Warning, statistics file %s already exists, appending" % log.Log("Warning, statistics file %s already exists, appending" %
...@@ -69,7 +69,7 @@ def close_dir_stats_file(): ...@@ -69,7 +69,7 @@ def close_dir_stats_file():
def write_session_statistics(statobj): def write_session_statistics(statobj):
"""Write session statistics into file, log""" """Write session statistics into file, log"""
stat_inc = increment.get_inc( stat_inc = increment.get_inc(
Globals.rbdir.append("session_statistics"), Time.curtime, "data") Globals.rbdir.append("session_statistics"), "data", Time.curtime)
statobj.StartTime = Time.curtime statobj.StartTime = Time.curtime
statobj.EndTime = time.time() statobj.EndTime = time.time()
......
...@@ -25,7 +25,7 @@ import Globals, log, static, TempFile, rpath ...@@ -25,7 +25,7 @@ import Globals, log, static, TempFile, rpath
def get_signature(rp): def get_signature(rp):
"""Take signature of rpin file and return in file object""" """Take signature of rpin file and return in file object"""
log.Log("Getting signature of %s" % rp.path, 7) log.Log("Getting signature of %s" % rp.get_indexpath(), 7)
return librsync.SigFile(rp.open("rb")) return librsync.SigFile(rp.open("rb"))
def get_delta_sigfileobj(sig_fileobj, rp_new): def get_delta_sigfileobj(sig_fileobj, rp_new):
......
...@@ -172,8 +172,8 @@ def init_connection_routing(conn, conn_number, remote_cmd): ...@@ -172,8 +172,8 @@ def init_connection_routing(conn, conn_number, remote_cmd):
def init_connection_settings(conn): def init_connection_settings(conn):
"""Tell new conn about log settings and updated globals""" """Tell new conn about log settings and updated globals"""
conn.Log.setverbosity(Log.verbosity) conn.log.Log.setverbosity(Log.verbosity)
conn.Log.setterm_verbosity(Log.term_verbosity) conn.log.Log.setterm_verbosity(Log.term_verbosity)
for setting_name in Globals.changed_settings: for setting_name in Globals.changed_settings:
conn.Globals.set(setting_name, Globals.get(setting_name)) conn.Globals.set(setting_name, Globals.get(setting_name))
FilenameMapping.set_init_quote_vals() FilenameMapping.set_init_quote_vals()
......
...@@ -49,7 +49,8 @@ def setcurtime_local(timeinseconds): ...@@ -49,7 +49,8 @@ def setcurtime_local(timeinseconds):
def setprevtime(timeinseconds): def setprevtime(timeinseconds):
"""Sets the previous inc time in prevtime and prevtimestr""" """Sets the previous inc time in prevtime and prevtimestr"""
assert timeinseconds > 0, timeinseconds assert 0 < timeinseconds < curtime, \
"Time %s is out of bounds" % (timeinseconds,)
timestr = timetostring(timeinseconds) timestr = timetostring(timeinseconds)
for conn in Globals.connections: for conn in Globals.connections:
conn.Time.setprevtime_local(timeinseconds, timestr) conn.Time.setprevtime_local(timeinseconds, timestr)
......
...@@ -30,7 +30,7 @@ def Mirror(src_rpath, dest_rpath): ...@@ -30,7 +30,7 @@ def Mirror(src_rpath, dest_rpath):
source_rpiter = SourceS.get_source_select() source_rpiter = SourceS.get_source_select()
DestS.set_rorp_cache(dest_rpath, source_rpiter, 0) DestS.set_rorp_cache(dest_rpath, source_rpiter, 0)
dest_sigiter = DestS.get_sigs() dest_sigiter = DestS.get_sigs(dest_rpath)
source_diffiter = SourceS.get_diffs(dest_sigiter) source_diffiter = SourceS.get_diffs(dest_sigiter)
DestS.patch(dest_rpath, source_diffiter) DestS.patch(dest_rpath, source_diffiter)
...@@ -41,7 +41,7 @@ def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath): ...@@ -41,7 +41,7 @@ def Mirror_and_increment(src_rpath, dest_rpath, inc_rpath):
source_rpiter = SourceS.get_source_select() source_rpiter = SourceS.get_source_select()
DestS.set_rorp_cache(dest_rpath, source_rpiter, 1) DestS.set_rorp_cache(dest_rpath, source_rpiter, 1)
dest_sigiter = DestS.get_sigs() dest_sigiter = DestS.get_sigs(dest_rpath)
source_diffiter = SourceS.get_diffs(dest_sigiter) source_diffiter = SourceS.get_diffs(dest_sigiter)
DestS.patch_and_increment(dest_rpath, source_diffiter, inc_rpath) DestS.patch_and_increment(dest_rpath, source_diffiter, inc_rpath)
...@@ -74,23 +74,37 @@ class SourceStruct: ...@@ -74,23 +74,37 @@ class SourceStruct:
def get_diffs(cls, dest_sigiter): def get_diffs(cls, dest_sigiter):
"""Return diffs of any files with signature in dest_sigiter""" """Return diffs of any files with signature in dest_sigiter"""
source_rps = cls.source_select source_rps = cls.source_select
def get_one_diff(dest_sig): error_handler = robust.get_error_handler("ListError")
def attach_snapshot(diff_rorp, src_rp):
"""Attach file of snapshot to diff_rorp, w/ error checking"""
fileobj = robust.check_common_error(
error_handler, rpath.RPath.open, (src_rp, "rb"))
if fileobj: diff_rorp.setfile(fileobj)
else: diff_rorp.zero()
diff_rorp.set_attached_filetype('snapshot')
def attach_diff(diff_rorp, src_rp, dest_sig):
"""Attach file of diff to diff_rorp, w/ error checking"""
fileobj = robust.check_common_error(
error_handler, Rdiff.get_delta_sigrp, (dest_sig, src_rp))
if fileobj:
diff_rorp.setfile(fileobj)
diff_rorp.set_attached_filetype('diff')
else:
diff_rorp.zero()
diff_rorp.set_attached_filetype('snapshot')
for dest_sig in dest_sigiter:
src_rp = (source_rps.get(dest_sig.index) or src_rp = (source_rps.get(dest_sig.index) or
rpath.RORPath(dest_sig.index)) rpath.RORPath(dest_sig.index))
diff_rorp = src_rp.getRORPath() diff_rorp = src_rp.getRORPath()
if dest_sig.isflaglinked(): if dest_sig.isflaglinked():
diff_rorp.flaglinked(dest_sig.get_link_flag()) diff_rorp.flaglinked(dest_sig.get_link_flag())
elif dest_sig.isreg() and src_rp.isreg(): elif dest_sig.isreg() and src_rp.isreg():
diff_rorp.setfile(Rdiff.get_delta_sigrp(dest_sig, src_rp)) attach_diff(diff_rorp, src_rp, dest_sig)
diff_rorp.set_attached_filetype('diff') elif src_rp.isreg(): attach_snapshot(diff_rorp, src_rp)
else: else: diff_rorp.set_attached_filetype('snapshot')
diff_rorp.set_attached_filetype('snapshot') yield diff_rorp
if src_rp.isreg(): diff_rorp.setfile(src_rp.open("rb"))
return diff_rorp
for dest_sig in dest_sigiter:
diff = robust.check_common_error(None, get_one_diff, [dest_sig])
if diff: yield diff
static.MakeClass(SourceStruct) static.MakeClass(SourceStruct)
...@@ -127,7 +141,7 @@ class DestinationStruct: ...@@ -127,7 +141,7 @@ class DestinationStruct:
cls.CCPP = CacheCollatedPostProcess(collated, cls.CCPP = CacheCollatedPostProcess(collated,
Globals.pipeline_max_length*2) Globals.pipeline_max_length*2)
def get_sigs(cls): def get_sigs(cls, dest_base_rpath):
"""Yield signatures of any changed destination files""" """Yield signatures of any changed destination files"""
for src_rorp, dest_rorp in cls.CCPP: for src_rorp, dest_rorp in cls.CCPP:
if (src_rorp and dest_rorp and src_rorp == dest_rorp and if (src_rorp and dest_rorp and src_rorp == dest_rorp and
...@@ -142,7 +156,9 @@ class DestinationStruct: ...@@ -142,7 +156,9 @@ class DestinationStruct:
elif dest_rorp: elif dest_rorp:
dest_sig = dest_rorp.getRORPath() dest_sig = dest_rorp.getRORPath()
if dest_rorp.isreg(): if dest_rorp.isreg():
dest_sig.setfile(Rdiff.get_signature(dest_rorp)) dest_rp = dest_base_rpath.new_index(index)
assert dest_rp.isreg()
dest_sig.setfile(Rdiff.get_signature(dest_rp))
else: dest_sig = rpath.RORPath(index) else: dest_sig = rpath.RORPath(index)
yield dest_sig yield dest_sig
...@@ -250,14 +266,12 @@ class CacheCollatedPostProcess: ...@@ -250,14 +266,12 @@ class CacheCollatedPostProcess:
""" """
if not changed or success: if not changed or success:
self.statfileobj.add_source_file(source_rorp) if source_rorp: self.statfileobj.add_source_file(source_rorp)
self.statfileobj.add_dest_file(dest_rorp) if dest_rorp: self.statfileobj.add_dest_file(dest_rorp)
if success: if success:
self.statfileobj.add_changed(source_rorp, dest_rorp) self.statfileobj.add_changed(source_rorp, dest_rorp)
metadata_rorp = source_rorp metadata_rorp = source_rorp
else: else: metadata_rorp = dest_rorp
metadata_rorp = dest_rorp
if changed: self.statfileobj.add_error()
if metadata_rorp and metadata_rorp.lstat(): if metadata_rorp and metadata_rorp.lstat():
metadata.WriteMetadata(metadata_rorp) metadata.WriteMetadata(metadata_rorp)
...@@ -294,7 +308,7 @@ class PatchITRB(rorpiter.ITRBranch): ...@@ -294,7 +308,7 @@ class PatchITRB(rorpiter.ITRBranch):
contents. contents.
""" """
def __init__(self, basis_root_rp, rorp_cache): def __init__(self, basis_root_rp, CCPP):
"""Set basis_root_rp, the base of the tree to be incremented""" """Set basis_root_rp, the base of the tree to be incremented"""
self.basis_root_rp = basis_root_rp self.basis_root_rp = basis_root_rp
assert basis_root_rp.conn is Globals.local_connection assert basis_root_rp.conn is Globals.local_connection
...@@ -302,6 +316,8 @@ class PatchITRB(rorpiter.ITRBranch): ...@@ -302,6 +316,8 @@ class PatchITRB(rorpiter.ITRBranch):
statistics.StatFileObj()) statistics.StatFileObj())
self.dir_replacement, self.dir_update = None, None self.dir_replacement, self.dir_update = None, None
self.cached_rp = None self.cached_rp = None
self.CCPP = CCPP
self.error_handler = robust.get_error_handler("UpdateError")
def get_rp_from_root(self, index): def get_rp_from_root(self, index):
"""Return RPath by adding index to self.basis_root_rp""" """Return RPath by adding index to self.basis_root_rp"""
...@@ -318,19 +334,36 @@ class PatchITRB(rorpiter.ITRBranch): ...@@ -318,19 +334,36 @@ class PatchITRB(rorpiter.ITRBranch):
"""Patch base_rp with diff_rorp (case where neither is directory)""" """Patch base_rp with diff_rorp (case where neither is directory)"""
rp = self.get_rp_from_root(index) rp = self.get_rp_from_root(index)
tf = TempFile.new(rp) tf = TempFile.new(rp)
self.patch_to_temp(rp, diff_rorp, tf) if self.patch_to_temp(rp, diff_rorp, tf):
rpath.rename(tf, rp) if tf.lstat(): rpath.rename(tf, rp)
elif rp.lstat(): rp.delete()
self.CCPP.flag_success(index)
else:
tf.setdata()
if tf.lstat(): tf.delete()
def patch_to_temp(self, basis_rp, diff_rorp, new): def patch_to_temp(self, basis_rp, diff_rorp, new):
"""Patch basis_rp, writing output in new, which doesn't exist yet""" """Patch basis_rp, writing output in new, which doesn't exist yet"""
if diff_rorp.isflaglinked(): if diff_rorp.isflaglinked():
Hardlink.link_rp(diff_rorp, new, self.basis_root_rp) Hardlink.link_rp(diff_rorp, new, self.basis_root_rp)
elif diff_rorp.get_attached_filetype() == 'snapshot': elif diff_rorp.get_attached_filetype() == 'snapshot':
rpath.copy(diff_rorp, new) if diff_rorp.isspecial(): self.write_special(diff_rorp, new)
elif robust.check_common_error(self.error_handler, rpath.copy,
(diff_rorp, new)) == 0: return 0
else: else:
assert diff_rorp.get_attached_filetype() == 'diff' assert diff_rorp.get_attached_filetype() == 'diff'
Rdiff.patch_local(basis_rp, diff_rorp, new) if robust.check_common_error(self.error_handler,
Rdiff.patch_local, (basis_rp, diff_rorp, new)) == 0: return 0
if new.lstat(): rpath.copy_attribs(diff_rorp, new) if new.lstat(): rpath.copy_attribs(diff_rorp, new)
return 1
def write_special(self, diff_rorp, new):
"""Write diff_rorp (which holds special file) to new"""
eh = robust.get_error_handler("SpecialFileError")
if robust.check_common_error(eh, rpath.copy, (diff_rorp, new)) == 0:
new.setdata()
if new.lstat(): new.delete()
new.touch()
def start_process(self, index, diff_rorp): def start_process(self, index, diff_rorp):
"""Start processing directory - record information for later""" """Start processing directory - record information for later"""
...@@ -368,6 +401,7 @@ class PatchITRB(rorpiter.ITRBranch): ...@@ -368,6 +401,7 @@ class PatchITRB(rorpiter.ITRBranch):
self.base_rp.rmdir() self.base_rp.rmdir()
if self.dir_replacement.lstat(): if self.dir_replacement.lstat():
rpath.rename(self.dir_replacement, self.base_rp) rpath.rename(self.dir_replacement, self.base_rp)
self.CCPP.flag_success(self.base_rp.index)
class IncrementITRB(PatchITRB): class IncrementITRB(PatchITRB):
...@@ -393,7 +427,9 @@ class IncrementITRB(PatchITRB): ...@@ -393,7 +427,9 @@ class IncrementITRB(PatchITRB):
tf = TempFile.new(rp) tf = TempFile.new(rp)
self.patch_to_temp(rp, diff_rorp, tf) self.patch_to_temp(rp, diff_rorp, tf)
increment.Increment(tf, rp, self.get_incrp(index)) increment.Increment(tf, rp, self.get_incrp(index))
rpath.rename(tf, rp) if tf.lstat(): rpath.rename(tf, rp)
else: rp.delete()
self.CCPP.flag_success(index)
def start_process(self, index, diff_rorp): def start_process(self, index, diff_rorp):
"""Start processing directory""" """Start processing directory"""
......
...@@ -20,7 +20,8 @@ ...@@ -20,7 +20,8 @@
"""Support code for remote execution and data transfer""" """Support code for remote execution and data transfer"""
from __future__ import generators from __future__ import generators
import types, os, tempfile, cPickle, shutil, traceback, pickle, socket, sys import types, os, tempfile, cPickle, shutil, traceback, pickle, \
socket, sys, gzip
class ConnectionError(Exception): pass class ConnectionError(Exception): pass
...@@ -39,6 +40,7 @@ class Connection: ...@@ -39,6 +40,7 @@ class Connection:
""" """
def __repr__(self): return self.__str__() def __repr__(self): return self.__str__()
def __str__(self): return "Simple Connection" # override later def __str__(self): return "Simple Connection" # override later
def __nonzero__(self): return 1
class LocalConnection(Connection): class LocalConnection(Connection):
"""Local connection """Local connection
...@@ -117,7 +119,7 @@ class LowLevelPipeConnection(Connection): ...@@ -117,7 +119,7 @@ class LowLevelPipeConnection(Connection):
def _put(self, obj, req_num): def _put(self, obj, req_num):
"""Put an object into the pipe (will send raw if string)""" """Put an object into the pipe (will send raw if string)"""
Log.conn("sending", obj, req_num) log.Log.conn("sending", obj, req_num)
if type(obj) is types.StringType: self._putbuf(obj, req_num) if type(obj) is types.StringType: self._putbuf(obj, req_num)
elif isinstance(obj, connection.Connection):self._putconn(obj, req_num) elif isinstance(obj, connection.Connection):self._putconn(obj, req_num)
elif isinstance(obj, rpath.RPath): self._putrpath(obj, req_num) elif isinstance(obj, rpath.RPath): self._putrpath(obj, req_num)
...@@ -231,7 +233,7 @@ class LowLevelPipeConnection(Connection): ...@@ -231,7 +233,7 @@ class LowLevelPipeConnection(Connection):
else: else:
assert format_string == "c", header_string assert format_string == "c", header_string
result = Globals.connection_dict[int(data)] result = Globals.connection_dict[int(data)]
Log.conn("received", result, req_num) log.Log.conn("received", result, req_num)
return (req_num, result) return (req_num, result)
def _getrorpath(self, raw_rorpath_buf): def _getrorpath(self, raw_rorpath_buf):
...@@ -315,17 +317,17 @@ class PipeConnection(LowLevelPipeConnection): ...@@ -315,17 +317,17 @@ class PipeConnection(LowLevelPipeConnection):
def extract_exception(self): def extract_exception(self):
"""Return active exception""" """Return active exception"""
if Log.verbosity >= 5 or Log.term_verbosity >= 5: if log.Log.verbosity >= 5 or log.Log.term_verbosity >= 5:
Log("Sending back exception %s of type %s: \n%s" % log.Log("Sending back exception %s of type %s: \n%s" %
(sys.exc_info()[1], sys.exc_info()[0], (sys.exc_info()[1], sys.exc_info()[0],
"".join(traceback.format_tb(sys.exc_info()[2]))), 5) "".join(traceback.format_tb(sys.exc_info()[2]))), 5)
return sys.exc_info()[1] return sys.exc_info()[1]
def Server(self): def Server(self):
"""Start server's read eval return loop""" """Start server's read eval return loop"""
Globals.server = 1 Globals.server = 1
Globals.connections.append(self) Globals.connections.append(self)
Log("Starting server", 6) log.Log("Starting server", 6)
self.get_response(-1) self.get_response(-1)
def reval(self, function_string, *args): def reval(self, function_string, *args):
...@@ -510,8 +512,7 @@ class VirtualFile: ...@@ -510,8 +512,7 @@ class VirtualFile:
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \ import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \
Main, rorpiter, selection, increment, statistics, manage, lazy, \ Main, rorpiter, selection, increment, statistics, manage, lazy, \
iterfile, rpath, robust, restore, manage, backup, connection, \ iterfile, rpath, robust, restore, manage, backup, connection, \
TempFile, SetConnections, librsync TempFile, SetConnections, librsync, log
from log import Log
Globals.local_connection = LocalConnection() Globals.local_connection = LocalConnection()
Globals.connections.append(Globals.local_connection) Globals.connections.append(Globals.local_connection)
......
...@@ -50,7 +50,7 @@ def Increment(new, mirror, incpref): ...@@ -50,7 +50,7 @@ def Increment(new, mirror, incpref):
def makemissing(incpref): def makemissing(incpref):
"""Signify that mirror file was missing""" """Signify that mirror file was missing"""
incrp = get_inc_ext(incpref, "missing") incrp = get_inc(incpref, "missing")
incrp.touch() incrp.touch()
return incrp return incrp
...@@ -62,16 +62,16 @@ def iscompressed(mirror): ...@@ -62,16 +62,16 @@ def iscompressed(mirror):
def makesnapshot(mirror, incpref): def makesnapshot(mirror, incpref):
"""Copy mirror to incfile, since new is quite different""" """Copy mirror to incfile, since new is quite different"""
compress = iscompressed(mirror) compress = iscompressed(mirror)
if compress: snapshotrp = get_inc_ext(incpref, "snapshot.gz") if compress: snapshotrp = get_inc(incpref, "snapshot.gz")
else: snapshotrp = get_inc_ext(incpref, "snapshot") else: snapshotrp = get_inc(incpref, "snapshot")
rpath.copy_with_attribs(mirror, snapshotrp, compress) rpath.copy_with_attribs(mirror, snapshotrp, compress)
return snapshotrp return snapshotrp
def makediff(new, mirror, incpref): def makediff(new, mirror, incpref):
"""Make incfile which is a diff new -> mirror""" """Make incfile which is a diff new -> mirror"""
compress = iscompressed(mirror) compress = iscompressed(mirror)
if compress: diff = get_inc_ext(incpref, "diff.gz") if compress: diff = get_inc(incpref, "diff.gz")
else: diff = get_inc_ext(incpref, "diff") else: diff = get_inc(incpref, "diff")
Rdiff.write_delta(new, mirror, diff, compress) Rdiff.write_delta(new, mirror, diff, compress)
rpath.copy_attribs(mirror, diff) rpath.copy_attribs(mirror, diff)
...@@ -79,18 +79,19 @@ def makediff(new, mirror, incpref): ...@@ -79,18 +79,19 @@ def makediff(new, mirror, incpref):
def makedir(mirrordir, incpref): def makedir(mirrordir, incpref):
"""Make file indicating directory mirrordir has changed""" """Make file indicating directory mirrordir has changed"""
dirsign = get_inc_ext(incpref, "dir") dirsign = get_inc(incpref, "dir")
dirsign.touch() dirsign.touch()
rpath.copy_attribs(mirrordir, dirsign) rpath.copy_attribs(mirrordir, dirsign)
return dirsign return dirsign
def get_inc(rp, time, typestr): def get_inc(rp, typestr, time = None):
"""Return increment like rp but with time and typestr suffixes """Return increment like rp but with time and typestr suffixes
To avoid any quoting, the returned rpath has empty index, and the To avoid any quoting, the returned rpath has empty index, and the
whole filename is in the base (which is not quoted). whole filename is in the base (which is not quoted).
""" """
if time is None: time = Time.prevtime
addtostr = lambda s: "%s.%s.%s" % (s, Time.timetostring(time), typestr) addtostr = lambda s: "%s.%s.%s" % (s, Time.timetostring(time), typestr)
if rp.index: if rp.index:
incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] + incrp = rp.__class__(rp.conn, rp.base, rp.index[:-1] +
...@@ -98,22 +99,7 @@ def get_inc(rp, time, typestr): ...@@ -98,22 +99,7 @@ def get_inc(rp, time, typestr):
else: else:
dirname, basename = rp.dirsplit() dirname, basename = rp.dirsplit()
incrp = rp.__class__(rp.conn, dirname, (addtostr(basename),)) incrp = rp.__class__(rp.conn, dirname, (addtostr(basename),))
assert not incrp.lstat()
return incrp return incrp
def get_inc_ext(rp, typestr, inctime = None):
"""Return increment with specified type and time t
If the file exists, then probably a previous backup has been
aborted. We then keep asking FindTime to get a time later
than the one that already has an inc file.
"""
if inctime is None: inctime = Time.prevtime
while 1:
incrp = get_inc(rp, inctime, typestr)
if not incrp.lstat(): break
else:
inctime += 1
log.Log("Warning, increment %s already exists" % (incrp.path,), 2)
return incrp
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
"""Manage logging, displaying and recording messages with required verbosity""" """Manage logging, displaying and recording messages with required verbosity"""
import time, sys, traceback, types import time, sys, traceback, types
import Globals, static import Globals, static, re
class LoggerError(Exception): pass class LoggerError(Exception): pass
...@@ -58,9 +58,9 @@ class Logger: ...@@ -58,9 +58,9 @@ class Logger:
""" """
assert not self.log_file_open assert not self.log_file_open
rpath.conn.Log.open_logfile_local(rpath) rpath.conn.log.Log.open_logfile_local(rpath)
for conn in Globals.connections: for conn in Globals.connections:
conn.Log.open_logfile_allconn(rpath.conn) conn.log.Log.open_logfile_allconn(rpath.conn)
def open_logfile_allconn(self, log_file_conn): def open_logfile_allconn(self, log_file_conn):
"""Run on all connections to signal log file is open""" """Run on all connections to signal log file is open"""
...@@ -81,8 +81,8 @@ class Logger: ...@@ -81,8 +81,8 @@ class Logger:
"""Close logfile and inform all connections""" """Close logfile and inform all connections"""
if self.log_file_open: if self.log_file_open:
for conn in Globals.connections: for conn in Globals.connections:
conn.Log.close_logfile_allconn() conn.log.Log.close_logfile_allconn()
self.log_file_conn.Log.close_logfile_local() self.log_file_conn.log.Log.close_logfile_local()
def close_logfile_allconn(self): def close_logfile_allconn(self):
"""Run on every connection""" """Run on every connection"""
...@@ -125,7 +125,7 @@ class Logger: ...@@ -125,7 +125,7 @@ class Logger:
if self.log_file_open: if self.log_file_open:
if self.log_file_local: if self.log_file_local:
self.logfp.write(self.format(message, self.verbosity)) self.logfp.write(self.format(message, self.verbosity))
else: self.log_file_conn.Log.log_to_file(message) else: self.log_file_conn.log.Log.log_to_file(message)
def log_to_term(self, message, verbosity): def log_to_term(self, message, verbosity):
"""Write message to stdout/stderr""" """Write message to stdout/stderr"""
...@@ -150,8 +150,12 @@ class Logger: ...@@ -150,8 +150,12 @@ class Logger:
self.log_to_term("%s %s (%d): %s" % self.log_to_term("%s %s (%d): %s" %
(conn_str, direction, req_num, result_repr), 9) (conn_str, direction, req_num, result_repr), 9)
def FatalError(self, message): def FatalError(self, message, no_fatal_message = 0):
self("Fatal Error: " + message, 1) """Log a fatal error and exit"""
assert no_fatal_message == 0 or no_fatal_message == 1
if no_fatal_message: prefix_string = ""
else: prefix_string = "Fatal Error: "
self(prefix_string + message, 1)
import Main import Main
Main.cleanup() Main.cleanup()
sys.exit(1) sys.exit(1)
...@@ -196,22 +200,35 @@ class ErrorLog: ...@@ -196,22 +200,35 @@ class ErrorLog:
""" """
_log_fileobj = None _log_fileobj = None
_log_inc_rp = None _log_inc_rp = None
def open(cls, compress = 1): def open(cls, time_string, compress = 1):
"""Open the error log, prepare for writing""" """Open the error log, prepare for writing"""
if not Globals.isbackup_writer:
return Globals.backup_writer.log.ErrorLog.open(time_string,
compress)
assert not cls._log_fileobj and not cls._log_inc_rp, "log already open" assert not cls._log_fileobj and not cls._log_inc_rp, "log already open"
assert Globals.isbackup_writer
if compress: typestr = 'data.gz' if compress: typestr = 'data.gz'
else: typestr = 'data' else: typestr = 'data'
cls._log_inc_rp = Global.rbdir.append("error_log.%s.%s" % cls._log_inc_rp = Globals.rbdir.append("error_log.%s.%s" %
(Time.curtimestr, typestr)) (time_string, typestr))
assert not cls._log_inc_rp.lstat(), "Error file already exists" assert not cls._log_inc_rp.lstat(), ("""Error file %s already exists.
This is probably caused by your attempting to run two backups simultaneously
or within one second of each other. Wait a second and try again.""" %
(cls._log_inc_rp.path,))
cls._log_fileobj = cls._log_inc_rp.open("wb", compress = compress) cls._log_fileobj = cls._log_inc_rp.open("wb", compress = compress)
def isopen(cls): def isopen(cls):
"""True if the error log file is currently open""" """True if the error log file is currently open"""
return cls._log_fileobj is not None if Globals.isbackup_writer or not Globals.backup_writer:
return cls._log_fileobj is not None
else: return Globals.backup_writer.log.ErrorLog.isopen()
def write(cls, error_type, rp, exc): def write(cls, error_type, rp, exc):
"""Add line to log file indicating error exc with file rp""" """Add line to log file indicating error exc with file rp"""
if not Globals.isbackup_writer:
return Globals.backup_writer.log.ErrorLog.write(error_type,
rp, exc)
s = cls.get_log_string(error_type, rp, exc) s = cls.get_log_string(error_type, rp, exc)
Log(s, 2) Log(s, 2)
if Globals.null_separator: s += "\0" if Globals.null_separator: s += "\0"
...@@ -220,15 +237,18 @@ class ErrorLog: ...@@ -220,15 +237,18 @@ class ErrorLog:
s += "\n" s += "\n"
cls._log_fileobj.write(s) cls._log_fileobj.write(s)
def get_indexpath(cls, rp): def get_indexpath(cls, obj):
"""Return filename for logging. rp is a rpath, string, or tuple""" """Return filename for logging. rp is a rpath, string, or tuple"""
try: return rp.get_indexpath() try: return obj.get_indexpath()
except AttributeError: except AttributeError:
if type(rp) is types.TupleTypes: return "/".join(rp) if type(obj) is types.TupleType: return "/".join(obj)
else: return str(rp) else: return str(obj)
def write_if_open(cls, error_type, rp, exc): def write_if_open(cls, error_type, rp, exc):
"""Call cls.write(...) if error log open, only log otherwise""" """Call cls.write(...) if error log open, only log otherwise"""
if not Globals.isbackup_writer:
return Globals.backup_writer.log.ErrorLog.write_if_open(
error_type, rp, exc)
if cls.isopen(): cls.write(error_type, rp, exc) if cls.isopen(): cls.write(error_type, rp, exc)
else: Log(cls.get_log_string(error_type, rp, exc), 2) else: Log(cls.get_log_string(error_type, rp, exc), 2)
...@@ -240,6 +260,8 @@ class ErrorLog: ...@@ -240,6 +260,8 @@ class ErrorLog:
def close(cls): def close(cls):
"""Close the error log file""" """Close the error log file"""
if not Globals.isbackup_writer:
return Globals.backup_writer.log.ErrorLog.close()
assert not cls._log_fileobj.close() assert not cls._log_fileobj.close()
cls._log_fileobj = cls._log_inc_rp = None cls._log_fileobj = cls._log_inc_rp = None
......
...@@ -472,8 +472,6 @@ class PatchITRB(rorpiter.ITRBranch): ...@@ -472,8 +472,6 @@ class PatchITRB(rorpiter.ITRBranch):
"""Set basis_root_rp, the base of the tree to be incremented""" """Set basis_root_rp, the base of the tree to be incremented"""
self.basis_root_rp = basis_root_rp self.basis_root_rp = basis_root_rp
assert basis_root_rp.conn is Globals.local_connection assert basis_root_rp.conn is Globals.local_connection
self.statfileobj = (statistics.get_active_statfileobj() or
statistics.StatFileObj())
self.dir_replacement, self.dir_update = None, None self.dir_replacement, self.dir_update = None, None
self.cached_rp = None self.cached_rp = None
......
...@@ -19,7 +19,8 @@ ...@@ -19,7 +19,8 @@
"""Catch various exceptions given system call""" """Catch various exceptions given system call"""
import librsync, errno, signal, C, static, rpath, Globals, log, statistics import errno, signal
import librsync, C, static, rpath, Globals, log, statistics
def check_common_error(error_handler, function, args = []): def check_common_error(error_handler, function, args = []):
"""Apply function to args, if error, run error_handler on exception """Apply function to args, if error, run error_handler on exception
...@@ -34,9 +35,9 @@ def check_common_error(error_handler, function, args = []): ...@@ -34,9 +35,9 @@ def check_common_error(error_handler, function, args = []):
if catch_error(exc): if catch_error(exc):
log.Log.exception() log.Log.exception()
conn = Globals.backup_writer conn = Globals.backup_writer
if conn is not None: statistics.record_error() if conn is not None: conn.statistics.record_error()
if error_handler: return error_handler(exc, *args) if error_handler: return error_handler(exc, *args)
else: return else: return None
log.Log.exception(1, 2) log.Log.exception(1, 2)
raise raise
...@@ -46,13 +47,29 @@ def catch_error(exc): ...@@ -46,13 +47,29 @@ def catch_error(exc):
librsync.librsyncError, C.UnknownFileTypeError): librsync.librsyncError, C.UnknownFileTypeError):
if isinstance(exc, exception_class): return 1 if isinstance(exc, exception_class): return 1
if (isinstance(exc, EnvironmentError) and if (isinstance(exc, EnvironmentError) and
errno.errorcode[exc[0]] in ('EPERM', 'ENOENT', 'EACCES', 'EBUSY', # the invalid mode shows up in backups of /proc for some reason
'EEXIST', 'ENOTDIR', 'ENAMETOOLONG', (exc[0] == 'invalid mode: rb' or
'EINTR', 'ENOTEMPTY', 'EIO', 'ETXTBSY', errno.errorcode.has_key(exc[0]) and
'ESRCH', 'EINVAL')): errno.errorcode[exc[0]] in ('EPERM', 'ENOENT', 'EACCES', 'EBUSY',
'EEXIST', 'ENOTDIR', 'ENAMETOOLONG',
'EINTR', 'ENOTEMPTY', 'EIO', 'ETXTBSY',
'ESRCH', 'EINVAL'))):
return 1 return 1
return 0 return 0
def get_error_handler(error_type):
"""Return error handler function that can be used above
Function will just log error to the error_log and then return
None. First two arguments must be the exception and then an rp
(from which the filename will be extracted).
"""
def error_handler(exc, rp, *args):
log.ErrorLog.write_if_open(error_type, rp, exc)
return 0
return error_handler
def listrp(rp): def listrp(rp):
"""Like rp.listdir() but return [] if error, and sort results""" """Like rp.listdir() but return [] if error, and sort results"""
def error_handler(exc): def error_handler(exc):
......
...@@ -209,6 +209,8 @@ def rename(rp_source, rp_dest): ...@@ -209,6 +209,8 @@ def rename(rp_source, rp_dest):
if not rp_source.lstat(): rp_dest.delete() if not rp_source.lstat(): rp_dest.delete()
else: else:
if rp_dest.lstat() and rp_source.getinode() == rp_dest.getinode(): if rp_dest.lstat() and rp_source.getinode() == rp_dest.getinode():
assert 0, ("Rename over same inode: %s to %s" %
(rp_source.path, rp_dest.path))
# You can't rename one hard linked file over another # You can't rename one hard linked file over another
rp_source.delete() rp_source.delete()
else: rp_source.conn.os.rename(rp_source.path, rp_dest.path) else: rp_source.conn.os.rename(rp_source.path, rp_dest.path)
...@@ -266,24 +268,30 @@ class RORPath: ...@@ -266,24 +268,30 @@ class RORPath:
else: self.data = {'type':None} # signify empty file else: self.data = {'type':None} # signify empty file
self.file = None self.file = None
def zero(self):
"""Set inside of self to type None"""
self.data = {'type': None}
self.file = None
def __eq__(self, other): def __eq__(self, other):
"""True iff the two rorpaths are equivalent""" """True iff the two rorpaths are equivalent"""
if self.index != other.index: return None if self.index != other.index: return None
for key in self.data.keys(): # compare dicts key by key for key in self.data.keys(): # compare dicts key by key
if ((key == 'uid' or key == 'gid') and if (key == 'uid' or key == 'gid') and self.issym():
(not Globals.change_ownership or self.issym())): # Don't compare gid/uid for symlinks
# Don't compare gid/uid for symlinks or if not change_ownership
pass pass
elif key == 'atime' and not Globals.preserve_atime: pass elif key == 'atime' and not Globals.preserve_atime: pass
elif key == 'devloc' or key == 'inode' or key == 'nlink': pass elif key == 'devloc' or key == 'nlink': pass
elif key == 'size' and not self.isreg(): elif key == 'size' and not self.isreg(): pass
pass # size only matters for regular files elif key == 'inode' and (not self.isreg() or
not Globals.compare_inode): pass
elif (not other.data.has_key(key) or elif (not other.data.has_key(key) or
self.data[key] != other.data[key]): return None self.data[key] != other.data[key]): return None
return 1 return 1
def equal_verbose(self, other, check_index = 1): def equal_verbose(self, other, check_index = 1,
compare_inodes = 0, compare_ownership = 0):
"""Like __eq__, but log more information. Useful when testing""" """Like __eq__, but log more information. Useful when testing"""
if check_index and self.index != other.index: if check_index and self.index != other.index:
log.Log("Index %s != index %s" % (self.index, other.index), 2) log.Log("Index %s != index %s" % (self.index, other.index), 2)
...@@ -291,12 +299,14 @@ class RORPath: ...@@ -291,12 +299,14 @@ class RORPath:
for key in self.data.keys(): # compare dicts key by key for key in self.data.keys(): # compare dicts key by key
if ((key == 'uid' or key == 'gid') and if ((key == 'uid' or key == 'gid') and
(not Globals.change_ownership or self.issym())): (self.issym() or not compare_ownership)):
# Don't compare gid/uid for symlinks or if not change_ownership # Don't compare gid/uid for symlinks, or if told not to
pass pass
elif key == 'atime' and not Globals.preserve_atime: pass elif key == 'atime' and not Globals.preserve_atime: pass
elif key == 'devloc' or key == 'inode' or key == 'nlink': pass elif key == 'devloc' or key == 'nlink': pass
elif key == 'size' and not self.isreg(): pass elif key == 'size' and not self.isreg(): pass
elif key == 'inode' and (not self.isreg() or not compare_inodes):
pass
elif (not other.data.has_key(key) or elif (not other.data.has_key(key) or
self.data[key] != other.data[key]): self.data[key] != other.data[key]):
if not other.data.has_key(key): if not other.data.has_key(key):
...@@ -312,6 +322,10 @@ class RORPath: ...@@ -312,6 +322,10 @@ class RORPath:
"""Pretty print file statistics""" """Pretty print file statistics"""
return "Index: %s\nData: %s" % (self.index, self.data) return "Index: %s\nData: %s" % (self.index, self.data)
def summary_string(self):
"""Return summary string"""
return "%s %s" % (self.get_indexpath(), self.lstat())
def __getstate__(self): def __getstate__(self):
"""Return picklable state """Return picklable state
...@@ -373,6 +387,12 @@ class RORPath: ...@@ -373,6 +387,12 @@ class RORPath:
"""True if path is a socket""" """True if path is a socket"""
return self.data['type'] == 'sock' return self.data['type'] == 'sock'
def isspecial(self):
"""True if the file is a sock, symlink, device, or fifo"""
type = self.data['type']
return (type == 'dev' or type == 'sock' or
type == 'fifo' or type == 'sym')
def getperms(self): def getperms(self):
"""Return permission block of file""" """Return permission block of file"""
return self.data['perms'] return self.data['perms']
...@@ -662,7 +682,7 @@ class RPath(RORPath): ...@@ -662,7 +682,7 @@ class RPath(RORPath):
log.Log("Touching " + self.path, 7) log.Log("Touching " + self.path, 7)
self.conn.open(self.path, "w").close() self.conn.open(self.path, "w").close()
self.setdata() self.setdata()
assert self.isreg() assert self.isreg(), self.path
def hasfullperms(self): def hasfullperms(self):
"""Return true if current process has full permissions on the file""" """Return true if current process has full permissions on the file"""
......
...@@ -95,19 +95,11 @@ class Select: ...@@ -95,19 +95,11 @@ class Select:
""" """
if not sel_func: sel_func = self.Select if not sel_func: sel_func = self.Select
self.rpath.setdata() # this may have changed since Select init self.rpath.setdata() # this may have changed since Select init
self.iter = self.filter_readable(self.Iterate_fast(self.rpath, self.iter = self.Iterate_fast(self.rpath, sel_func)
sel_func))
self.next = self.iter.next self.next = self.iter.next
self.__iter__ = lambda: self self.__iter__ = lambda: self
return self return self
def filter_readable(self, rp_iter):
"""Yield rps in iter except the unreadable regular files"""
for rp in rp_iter:
if not rp.isreg() or rp.readable(): yield rp
else: log.ErrorLog.write_if_open("ListError", rp,
"Regular file lacks read permissions")
def Iterate_fast(self, rpath, sel_func): def Iterate_fast(self, rpath, sel_func):
"""Like Iterate, but don't recur, saving time""" """Like Iterate, but don't recur, saving time"""
def error_handler(exc, filename): def error_handler(exc, filename):
......
...@@ -338,7 +338,7 @@ def write_active_statfileobj(): ...@@ -338,7 +338,7 @@ def write_active_statfileobj():
global _active_statfileobj global _active_statfileobj
assert _active_statfileobj assert _active_statfileobj
rp_base = Globals.rbdir.append("session_statistics") rp_base = Globals.rbdir.append("session_statistics")
session_stats_rp = increment.get_inc_ext(rp_base, 'data', Time.curtime) session_stats_rp = increment.get_inc(rp_base, 'data', Time.curtime)
_active_statfileobj.finish() _active_statfileobj.finish()
_active_statfileobj.write_stats_to_rp(session_stats_rp) _active_statfileobj.write_stats_to_rp(session_stats_rp)
_active_statfileobj = None _active_statfileobj = None
...@@ -112,14 +112,7 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir): ...@@ -112,14 +112,7 @@ def InternalMirror(source_local, dest_local, src_dir, dest_dir):
src_root = rpath.RPath(Globals.local_connection, src_dir) src_root = rpath.RPath(Globals.local_connection, src_dir)
dest_root = rpath.RPath(Globals.local_connection, dest_dir) dest_root = rpath.RPath(Globals.local_connection, dest_dir)
dest_rbdir = dest_root.append("rdiff-backup-data") dest_rbdir = dest_root.append("rdiff-backup-data")
dest_incdir = dest_rbdir.append("increments")
# We need to create these directories or else failure because
# --force option not given.
if not dest_root.lstat(): dest_root.mkdir()
if not dest_rbdir.lstat(): dest_rbdir.mkdir()
if not dest_incdir.lstat(): dest_incdir.mkdir()
InternalBackup(source_local, dest_local, src_dir, dest_dir) InternalBackup(source_local, dest_local, src_dir, dest_dir)
dest_root.setdata() dest_root.setdata()
Myrm(dest_rbdir.path) Myrm(dest_rbdir.path)
...@@ -173,7 +166,7 @@ def _reset_connections(src_rp, dest_rp): ...@@ -173,7 +166,7 @@ def _reset_connections(src_rp, dest_rp):
def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1, def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
equality_func = None, exclude_rbdir = 1, equality_func = None, exclude_rbdir = 1,
ignore_tmp_files = None): ignore_tmp_files = None, compare_ownership = 0):
"""Compare src_rp and dest_rp, which can be directories """Compare src_rp and dest_rp, which can be directories
This only compares file attributes, not the actual data. This This only compares file attributes, not the actual data. This
...@@ -218,7 +211,9 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1, ...@@ -218,7 +211,9 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
dsiter1, dsiter2 = src_select.set_iter(), dest_select.set_iter() dsiter1, dsiter2 = src_select.set_iter(), dest_select.set_iter()
def hardlink_equal(src_rorp, dest_rorp): def hardlink_equal(src_rorp, dest_rorp):
if not src_rorp.equal_verbose(dest_rorp): return None if not src_rorp.equal_verbose(dest_rorp,
compare_ownership = compare_ownership):
return None
if Hardlink.rorp_eq(src_rorp, dest_rorp): return 1 if Hardlink.rorp_eq(src_rorp, dest_rorp): return 1
Log("%s: %s" % (src_rorp.index, Hardlink.get_indicies(src_rorp, 1)), 3) Log("%s: %s" % (src_rorp.index, Hardlink.get_indicies(src_rorp, 1)), 3)
Log("%s: %s" % (dest_rorp.index, Log("%s: %s" % (dest_rorp.index,
...@@ -240,7 +235,9 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1, ...@@ -240,7 +235,9 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
if dest_rorp.index[-1].endswith('.missing'): return 1 if dest_rorp.index[-1].endswith('.missing'): return 1
if compare_hardlinks: if compare_hardlinks:
if Hardlink.rorp_eq(src_rorp, dest_rorp): return 1 if Hardlink.rorp_eq(src_rorp, dest_rorp): return 1
elif src_rorp.equal_verbose(dest_rorp): return 1 elif src_rorp.equal_verbose(dest_rorp,
compare_ownership = compare_ownership):
return 1
Log("%s: %s" % (src_rorp.index, Hardlink.get_indicies(src_rorp, 1)), 3) Log("%s: %s" % (src_rorp.index, Hardlink.get_indicies(src_rorp, 1)), 3)
Log("%s: %s" % (dest_rorp.index, Log("%s: %s" % (dest_rorp.index,
Hardlink.get_indicies(dest_rorp, None)), 3) Hardlink.get_indicies(dest_rorp, None)), 3)
...@@ -256,7 +253,8 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1, ...@@ -256,7 +253,8 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
else: result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal) else: result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
elif not exclude_rbdir: elif not exclude_rbdir:
result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal) result = lazy.Iter.equal(dsiter1, dsiter2, 1, rbdir_equal)
else: result = lazy.Iter.equal(dsiter1, dsiter2, 1) else: result = lazy.Iter.equal(dsiter1, dsiter2, 1,
lambda x, y: x.equal_verbose(y, compare_ownership = compare_ownership))
for i in dsiter1: pass # make sure all files processed anyway for i in dsiter1: pass # make sure all files processed anyway
for i in dsiter2: pass for i in dsiter2: pass
...@@ -321,6 +319,8 @@ def MirrorTest(source_local, dest_local, list_of_dirnames, ...@@ -321,6 +319,8 @@ def MirrorTest(source_local, dest_local, list_of_dirnames,
"""Mirror each of list_of_dirnames, and compare after each""" """Mirror each of list_of_dirnames, and compare after each"""
Globals.set('preserve_hardlinks', compare_hardlinks) Globals.set('preserve_hardlinks', compare_hardlinks)
dest_rp = rpath.RPath(Globals.local_connection, dest_dirname) dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
old_force_val = Main.force
Main.force = 1
Myrm(dest_dirname) Myrm(dest_dirname)
for dirname in list_of_dirnames: for dirname in list_of_dirnames:
...@@ -331,3 +331,4 @@ def MirrorTest(source_local, dest_local, list_of_dirnames, ...@@ -331,3 +331,4 @@ def MirrorTest(source_local, dest_local, list_of_dirnames,
InternalMirror(source_local, dest_local, dirname, dest_dirname) InternalMirror(source_local, dest_local, dirname, dest_dirname)
_reset_connections(src_rp, dest_rp) _reset_connections(src_rp, dest_rp)
assert CompareRecursive(src_rp, dest_rp, compare_hardlinks) assert CompareRecursive(src_rp, dest_rp, compare_hardlinks)
Main.force = old_force_val
...@@ -42,8 +42,9 @@ class PathSetter(unittest.TestCase): ...@@ -42,8 +42,9 @@ class PathSetter(unittest.TestCase):
self.reset_schema() self.reset_schema()
def reset_schema(self): def reset_schema(self):
self.rb_schema = SourceDir + \ self.rb_schema = (SourceDir +
"/../rdiff-backup -v5 --remote-schema './chdir-wrapper2 %s' " "/../rdiff-backup -v3 --no-compare-inode "
"--remote-schema './chdir-wrapper2 %s' ")
def refresh(self, *rp_list): def refresh(self, *rp_list):
"""Reread data for the given rps""" """Reread data for the given rps"""
...@@ -215,6 +216,12 @@ class Final(PathSetter): ...@@ -215,6 +216,12 @@ class Final(PathSetter):
self.set_connections(None, None, "test2/tmp/", "../../") self.set_connections(None, None, "test2/tmp/", "../../")
self.exec_rb(None, '../../../../../../proc', 'testfiles/procoutput') self.exec_rb(None, '../../../../../../proc', 'testfiles/procoutput')
def testProcRemote2(self):
"""Test mirroring proc, this time when proc is remote, dest local"""
Myrm("testfiles/procoutput")
self.set_connections("test1/", "../", None, None)
self.exec_rb(None, '../../../../../../proc', 'testfiles/procoutput')
def testWindowsMode(self): def testWindowsMode(self):
"""Test backup with the --windows-mode option""" """Test backup with the --windows-mode option"""
old_schema = self.rb_schema old_schema = self.rb_schema
......
...@@ -2,7 +2,7 @@ import os, unittest, time ...@@ -2,7 +2,7 @@ import os, unittest, time
from commontest import * from commontest import *
from rdiff_backup import Globals, Hardlink, selection, rpath from rdiff_backup import Globals, Hardlink, selection, rpath
Log.setverbosity(7) Log.setverbosity(3)
class HardlinkTest(unittest.TestCase): class HardlinkTest(unittest.TestCase):
"""Test cases for Hard links""" """Test cases for Hard links"""
...@@ -142,10 +142,27 @@ class HardlinkTest(unittest.TestCase): ...@@ -142,10 +142,27 @@ class HardlinkTest(unittest.TestCase):
hl2_4.hardlink(hl2_1.path) hl2_4.hardlink(hl2_1.path)
rpath.copy_attribs(hlout1_sub, hlout2_sub) rpath.copy_attribs(hlout1_sub, hlout2_sub)
# Now try backing up twice, making sure hard links are preserved
InternalBackup(1, 1, hlout1.path, output.path) InternalBackup(1, 1, hlout1.path, output.path)
out_subdir = output.append("subdir")
assert out_subdir.append("hardlink1").getinode() == \
out_subdir.append("hardlink2").getinode()
assert out_subdir.append("hardlink3").getinode() == \
out_subdir.append("hardlink4").getinode()
assert out_subdir.append("hardlink1").getinode() != \
out_subdir.append("hardlink3").getinode()
time.sleep(1) time.sleep(1)
InternalBackup(1, 1, hlout2.path, output.path) InternalBackup(1, 1, hlout2.path, output.path)
out_subdir.setdata()
assert out_subdir.append("hardlink1").getinode() == \
out_subdir.append("hardlink4").getinode()
assert out_subdir.append("hardlink2").getinode() == \
out_subdir.append("hardlink3").getinode()
assert out_subdir.append("hardlink1").getinode() != \
out_subdir.append("hardlink2").getinode()
# Now try restoring, still checking hard links.
out2 = rpath.RPath(Globals.local_connection, "testfiles/out2") out2 = rpath.RPath(Globals.local_connection, "testfiles/out2")
hlout1 = out2.append("hardlink1") hlout1 = out2.append("hardlink1")
hlout2 = out2.append("hardlink2") hlout2 = out2.append("hardlink2")
...@@ -165,7 +182,8 @@ class HardlinkTest(unittest.TestCase): ...@@ -165,7 +182,8 @@ class HardlinkTest(unittest.TestCase):
int(time.time())) int(time.time()))
out2.setdata() out2.setdata()
for rp in [hlout1, hlout2, hlout3, hlout4]: rp.setdata() for rp in [hlout1, hlout2, hlout3, hlout4]: rp.setdata()
assert hlout1.getinode() == hlout4.getinode() assert hlout1.getinode() == hlout4.getinode(), \
"%s %s" % (hlout1.path, hlout4.path)
assert hlout2.getinode() == hlout3.getinode() assert hlout2.getinode() == hlout3.getinode()
assert hlout1.getinode() != hlout2.getinode() assert hlout1.getinode() != hlout2.getinode()
......
...@@ -245,21 +245,15 @@ class MirrorTest(PathSetter): ...@@ -245,21 +245,15 @@ class MirrorTest(PathSetter):
def testPermSkipLocal(self): def testPermSkipLocal(self):
"""Test to see if rdiff-backup will skip unreadable files""" """Test to see if rdiff-backup will skip unreadable files"""
self.setPathnames(None, None, None, None) self.setPathnames(None, None, None, None)
Globals.change_source_perms = None
Time.setcurtime() Time.setcurtime()
self.Mirror(self.one_unreadable, self.one_unreadable_out) self.Mirror(self.one_unreadable, self.one_unreadable_out)
Globals.change_source_perms = 1
self.Mirror(self.one_unreadable, self.one_unreadable_out)
# Could add test, but for now just make sure it doesn't exit # Could add test, but for now just make sure it doesn't exit
def testPermSkipRemote(self): def testPermSkipRemote(self):
"""Test skip of unreadable files remote""" """Test skip of unreadable files remote"""
self.setPathnames('test1', '../', 'test2/tmp', '../../') self.setPathnames('test1', '../', 'test2/tmp', '../../')
Globals.change_source_perms = None
Time.setcurtime() Time.setcurtime()
self.Mirror(self.one_unreadable, self.one_unreadable_out) self.Mirror(self.one_unreadable, self.one_unreadable_out)
Globals.change_source_perms = 1
self.Mirror(self.one_unreadable, self.one_unreadable_out)
# Could add test, but for now just make sure it doesn't exit # Could add test, but for now just make sure it doesn't exit
def refresh(self, *rps): def refresh(self, *rps):
...@@ -294,8 +288,6 @@ class MirrorTest(PathSetter): ...@@ -294,8 +288,6 @@ class MirrorTest(PathSetter):
def deleteoutput(self): def deleteoutput(self):
assert not os.system("rm -rf testfiles/output*") assert not os.system("rm -rf testfiles/output*")
self.rbdir = self.rpout.append('rdiff-backup-data') self.rbdir = self.rpout.append('rdiff-backup-data')
self.rpout.mkdir()
self.rbdir.mkdir()
self.reset_rps() self.reset_rps()
def reset_rps(self): def reset_rps(self):
...@@ -309,8 +301,9 @@ class MirrorTest(PathSetter): ...@@ -309,8 +301,9 @@ class MirrorTest(PathSetter):
rp.setdata() rp.setdata()
def runtest(self): def runtest(self):
self.deleteoutput()
Time.setcurtime() Time.setcurtime()
assert self.rbdir.lstat() assert not self.rbdir.lstat()
self.Mirror(self.inc1rp, self.rpout) self.Mirror(self.inc1rp, self.rpout)
assert CompareRecursive(Local.inc1rp, Local.rpout) assert CompareRecursive(Local.inc1rp, Local.rpout)
...@@ -328,6 +321,7 @@ class MirrorTest(PathSetter): ...@@ -328,6 +321,7 @@ class MirrorTest(PathSetter):
self.Mirror(self.inc1rp, self.rpout) self.Mirror(self.inc1rp, self.rpout)
#rpath.RPath.copy_attribs(self.inc1rp, self.rpout) #rpath.RPath.copy_attribs(self.inc1rp, self.rpout)
assert CompareRecursive(Local.inc1rp, Local.rpout) assert CompareRecursive(Local.inc1rp, Local.rpout)
Myrm(Local.rpout.append("rdiff-backup-data").path)
self.Mirror(self.inc2rp, self.rpout) self.Mirror(self.inc2rp, self.rpout)
assert CompareRecursive(Local.inc2rp, Local.rpout) assert CompareRecursive(Local.inc2rp, Local.rpout)
...@@ -335,11 +329,13 @@ class MirrorTest(PathSetter): ...@@ -335,11 +329,13 @@ class MirrorTest(PathSetter):
def Mirror(self, rpin, rpout): def Mirror(self, rpin, rpout):
"""Like backup.Mirror, but setup first, cleanup later""" """Like backup.Mirror, but setup first, cleanup later"""
Main.force = 1 Main.force = 1
assert not rpout.append("rdiff-backup-data").lstat()
Main.misc_setup([rpin, rpout]) Main.misc_setup([rpin, rpout])
Main.backup_set_select(rpin) Main.backup_set_select(rpin)
Main.backup_init_dirs(rpin, rpout) Main.backup_init_dirs(rpin, rpout)
backup.Mirror(rpin, rpout) backup.Mirror(rpin, rpout)
Log.close_logfile() log.ErrorLog.close()
log.Log.close_logfile()
Hardlink.clear_dictionaries() Hardlink.clear_dictionaries()
if __name__ == "__main__": unittest.main() if __name__ == "__main__": unittest.main()
...@@ -4,42 +4,6 @@ from commontest import * ...@@ -4,42 +4,6 @@ from commontest import *
from rdiff_backup import rpath, robust, TempFile, Globals from rdiff_backup import rpath, robust, TempFile, Globals
class TempFileTest(unittest.TestCase):
"""Test creation and management of tempfiles in TempFile module"""
rp_base = rpath.RPath(Globals.local_connection,
"./testfiles/robust/testfile_base")
def testBasic(self):
"""Make a temp file, write to it, and then delete it
Also test tempfile accounting and file name prefixing.
"""
assert not TempFile._tempfiles
tf = TempFile.new(self.rp_base)
assert TempFile._tempfiles == [tf]
assert tf.dirsplit()[0] == "testfiles/robust", tf.dirsplit()[0]
assert not tf.lstat()
fp = tf.open("w")
fp.write("hello")
assert not fp.close()
fp = tf.open("r")
assert fp.read() == "hello"
assert not fp.close()
tf.delete()
assert not TempFile._tempfiles
def testRename(self):
"""Test renaming of tempfile"""
tf = TempFile.new(self.rp_base)
assert TempFile._tempfiles
tf.touch()
destination = rpath.RPath(Globals.local_connection,
"./testfiles/robust/testfile_dest")
tf.rename(destination)
assert not TempFile._tempfiles
assert destination.lstat()
destination.delete()
class RobustTest(unittest.TestCase): class RobustTest(unittest.TestCase):
"""Test robust module""" """Test robust module"""
def test_check_common_error(self): def test_check_common_error(self):
......
...@@ -10,7 +10,7 @@ that are meant to be run as root. ...@@ -10,7 +10,7 @@ that are meant to be run as root.
Globals.set('change_source_perms', None) Globals.set('change_source_perms', None)
Globals.counter = 0 Globals.counter = 0
log.Log.setverbosity(4) log.Log.setverbosity(6)
def Run(cmd): def Run(cmd):
print "Running: ", cmd print "Running: ", cmd
...@@ -33,9 +33,9 @@ class NonRoot(unittest.TestCase): ...@@ -33,9 +33,9 @@ class NonRoot(unittest.TestCase):
""" """
user = 'ben' user = 'ben'
def make_root_dir(self): def make_root_dirs(self):
"""Make directory createable only by root""" """Make directory createable only by root"""
rp = rpath.RPath(Globals.local_connection, "testfiles/root_out") rp = rpath.RPath(Globals.local_connection, "testfiles/root_out1")
if rp.lstat(): Myrm(rp.path) if rp.lstat(): Myrm(rp.path)
rp.mkdir() rp.mkdir()
rp1 = rp.append("1") rp1 = rp.append("1")
...@@ -48,39 +48,59 @@ class NonRoot(unittest.TestCase): ...@@ -48,39 +48,59 @@ class NonRoot(unittest.TestCase):
rp3.chown(2, 2) rp3.chown(2, 2)
rp4 = rp.append("dev") rp4 = rp.append("dev")
rp4.makedev('c', 4, 28) rp4.makedev('c', 4, 28)
return rp
sp = rpath.RPath(Globals.local_connection, "testfiles/root_out2")
if sp.lstat(): Myrm(sp.path)
Run("cp -a %s %s" % (rp.path, sp.path))
rp2 = sp.append("2")
rp2.chown(2, 2)
rp3 = sp.append("3")
rp3.chown(1, 1)
assert not CompareRecursive(rp, sp, compare_ownership = 1)
return rp, sp
def backup(self, input_rp, output_rp, time):
backup_cmd = ("rdiff-backup --no-compare-inode "
"--current-time %s %s %s" %
(time, input_rp.path, output_rp.path))
Run("su %s -c '%s'" % (self.user, backup_cmd))
def restore(self, dest_rp, restore_rp, time = None):
assert restore_rp.path == "testfiles/rest_out"
Myrm(restore_rp.path)
if time is None: time = "now"
restore_cmd = "rdiff-backup -r %s %s %s" % (time, dest_rp.path,
restore_rp.path,)
Run(restore_cmd)
def test_non_root(self): def test_non_root(self):
"""Main non-root -> root test""" """Main non-root -> root test"""
Myrm("testfiles/output") Myrm("testfiles/output")
input_rp = self.make_root_dir() input_rp1, input_rp2 = self.make_root_dirs()
Globals.change_ownership = 1 Globals.change_ownership = 1
output_rp = rpath.RPath(Globals.local_connection, "testfiles/output") output_rp = rpath.RPath(Globals.local_connection, "testfiles/output")
restore_rp = rpath.RPath(Globals.local_connection, restore_rp = rpath.RPath(Globals.local_connection,
"testfiles/rest_out") "testfiles/rest_out")
empty_rp = rpath.RPath(Globals.local_connection, "testfiles/empty") empty_rp = rpath.RPath(Globals.local_connection, "testfiles/empty")
backup_cmd = "rdiff-backup %s %s" % (input_rp.path, output_rp.path) self.backup(input_rp1, output_rp, 1000000)
Run("su %s -c '%s'" % (self.user, backup_cmd)) self.restore(output_rp, restore_rp)
assert CompareRecursive(input_rp1, restore_rp, compare_ownership = 1)
Myrm("testfiles/rest_out") self.backup(input_rp2, output_rp, 2000000)
restore_cmd = "rdiff-backup -r now %s %s" % (output_rp.path, self.restore(output_rp, restore_rp)
restore_rp.path,) assert CompareRecursive(input_rp2, restore_rp, compare_ownership = 1)
Run(restore_cmd)
assert CompareRecursive(input_rp, restore_rp)
backup_cmd = "rdiff-backup %s %s" % (empty_rp.path, output_rp.path) self.backup(empty_rp, output_rp, 3000000)
Run("su %s -c '%s'" % (self.user, backup_cmd)) self.restore(output_rp, restore_rp)
assert CompareRecursive(empty_rp, restore_rp, compare_ownership = 1)
self.restore(output_rp, restore_rp, 1000000)
assert CompareRecursive(input_rp1, restore_rp, compare_ownership = 1)
Myrm("testfiles/rest_out") self.restore(output_rp, restore_rp, 2000000)
Run(restore_cmd) assert CompareRecursive(input_rp2, restore_rp, compare_ownership = 1)
assert CompareRecursive(empty_rp, restore_rp)
Myrm("testfiles/rest_out")
restore_cmd = "rdiff-backup -r 1 %s %s" % (output_rp.path,
restore_rp.path,)
Run(restore_cmd)
assert CompareRecursive(input_rp, restore_rp)
if __name__ == "__main__": unittest.main() if __name__ == "__main__": unittest.main()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment