Commit ce46e33f authored by bescoto's avatar bescoto

Many changes - added extended attribute support and file system

ability detection


git-svn-id: http://svn.savannah.nongnu.org/svn/rdiff-backup@334 2b77aa54-bcbc-44c9-a7ec-4f6cf2b41109
parent 6ed5f128
Use ctime to check whether files have been changed
Include some option to summarize space taken up
---------[ Medium term ]---------------------------------------
......
......@@ -56,23 +56,14 @@ ability to restore previous versions of that file.
.SH OPTIONS
.TP
.B -b, --backup-mode
Force backup mode even if first argument appears to be an increment file.
Force backup mode even if first argument appears to be an increment or
mirror file.
.TP
.B --calculate-average
Enter calculate average mode. The arguments should be a number of
statistics files. rdiff-backup will print the average of the listed
statistics files and exit.
.TP
.BI "--chars-to-quote " chars
If this option is set, any characters in
.I chars
present in filenames on the source side will be quoted on the
destination side, so that they do not appear in filenames on the
remote side. See
.B --quoting-char
and
.BR --windows-mode .
.TP
.B --check-destination-dir
If an rdiff-backup session fails, running rdiff-backup with this
option on the destination dir will undo the failed directory. This
......@@ -132,8 +123,7 @@ See the
section for more information.
.TP
.B --exclude-special-files
Exclude all device files, fifos, sockets, and symlinks. This option
is implied by --windows-mode.
Exclude all device files, fifos, sockets, and symlinks.
.TP
.B --force
Authorize the updating or overwriting of a destination path.
......@@ -202,7 +192,7 @@ In this mode rdiff-backup is similar to rsync (but usually
slower).
.TP
.B --no-compare-inode
This relative esoteric option prevents rdiff-backup from flagging a
This relatively esoteric option prevents rdiff-backup from flagging a
file as changed when its inode changes. This option may be useful if
you are backing up two different directories to the same rdiff-backup
destination directory. The downside is that hard link information may
......@@ -255,13 +245,6 @@ session statistics file. See the
.B STATISTICS
section for more information.
.TP
.BI "--quoting-char " char
Use the specified character for quoting characters specified to be
escaped by the
.B --chars-to-quote
option. The default is the semicolon ";". See also
.BR --windows-mode .
.TP
.BI "-r, --restore-as-of " restore_time
Restore the specified directory as it was as of
.IR restore_time .
......@@ -273,10 +256,6 @@ and see the
.B RESTORING
section for more information on restoring.
.TP
.BI "--remote-cmd " command
This command has been depreciated as of version 0.4.1. Use
--remote-schema instead.
.TP
.BI "--remote-schema " schema
Specify an alternate method of connecting to a remote computer. This
is necessary to get rdiff-backup not to use ssh for remote backups, or
......@@ -354,20 +333,6 @@ is noisiest). This determines how much is written to the log file.
.TP
.B "-V, --version"
Print the current version and exit
.TP
.B --windows-mode
This option quotes characters not allowable on windows, and does not
try to preserve ownership, hardlinks, or permissions on the
destination side. It is appropriate when backing up a normal unix
file system to a windows one such as VFS, or a file system with
similar limitations. Because metadata is stored in a separate regular
file, this option does not prevent all data from being restored.
.TP
.B --windows-restore
This option turns on windows quoting, but does not disable
permissions, hard linking, or ownership. Use this when restoring from
an rdiff-backup directory on a windows file system to a unix file
system.
.SH EXAMPLES
Simplest case---backup directory foo to directory bar, with increments
......
# Copyright 2002 Ben Escoto
# Copyright 2002, 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
......@@ -29,7 +29,7 @@ handle that error.)
"""
import re
import re, types
import Globals, log, rpath
max_filename_length = 255
......@@ -66,12 +66,14 @@ def set_init_quote_vals_local():
def init_quoting_regexps():
"""Compile quoting regular expressions"""
global chars_to_quote_regexp, unquoting_regexp
assert chars_to_quote and type(chars_to_quote) is types.StringType, \
"Chars to quote: '%s'" % (chars_to_quote,)
try:
chars_to_quote_regexp = \
re.compile("[%s]|%s" % (chars_to_quote, quoting_char), re.S)
unquoting_regexp = re.compile("%s[0-9]{3}" % quoting_char, re.S)
except re.error:
log.Log.FatalError("Error '%s' when processing char quote list %s" %
log.Log.FatalError("Error '%s' when processing char quote list '%s'" %
(re.error, chars_to_quote))
def quote(path):
......@@ -131,8 +133,16 @@ class QuotedRPath(rpath.RPath):
def isincfile(self):
"""Return true if path indicates increment, sets various variables"""
result = rpath.RPath.isincfile(self)
if result: self.inc_basestr = unquote(self.inc_basestr)
if not self.index: # consider the last component as quoted
dirname, basename = self.dirsplit()
temp_rp = rpath.RPath(self.conn, dirname, (unquote(basename),))
result = temp_rp.isincfile()
if result:
self.inc_basestr = unquote(temp_rp.inc_basestr)
self.inc_timestr = unquote(temp_rp.inc_timestr)
else:
result = rpath.RPath.isincfile(self)
if result: self.inc_basestr = unquote(self.inc_basestr)
return result
def get_quotedrpath(rp, separate_basename = 0):
......
......@@ -66,6 +66,14 @@ change_mirror_perms = (process_uid != 0)
# If true, try to reset the atimes of the source partition.
preserve_atime = None
# If true, save the extended attributes when backing up.
read_eas = None
# If true, preserve the extended attributes on the mirror directory
# when backing up, or write them to the restore directory. This
# implies read_eas.
write_eas = None
# This will be set as soon as the LocalConnection class loads
local_connection = None
......@@ -112,10 +120,12 @@ rbdir = None
# quoting_enabled is true if we should quote certain characters in
# filenames on the source side (see FilenameMapping for more
# info). chars_to_quote is a string whose characters should be
# quoted, and quoting_char is the character to quote with.
quoting_enabled = None
chars_to_quote = "A-Z:"
# info).
# chars_to_quote is a string whose characters should be quoted. It
# should be true if certain characters in filenames on the source side
# should be escaped (see FilenameMapping for more info).
chars_to_quote = None
quoting_char = ';'
# If true, emit output intended to be easily readable by a
......
# Copyright 2002 Ben Escoto
# Copyright 2002, 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
......@@ -24,7 +24,7 @@ import getopt, sys, re, os
from log import Log, LoggerError, ErrorLog
import Globals, Time, SetConnections, selection, robust, rpath, \
manage, backup, connection, restore, FilenameMapping, \
Security, Hardlink, regress, C
Security, Hardlink, regress, C, fs_abilities
action = None
......@@ -32,6 +32,9 @@ remote_cmd, remote_schema = None, None
force = None
select_opts = []
select_files = []
# These are global because they are set while we are trying to figure
# whether to restore or to backup
restore_root, restore_index, restore_root_set = None, None, 0
def parse_cmdlineoptions(arglist):
"""Parse argument list and set global preferences"""
......@@ -43,24 +46,24 @@ def parse_cmdlineoptions(arglist):
except IOError: Log.FatalError("Error opening file %s" % filename)
try: optlist, args = getopt.getopt(arglist, "blr:sv:V",
["backup-mode", "calculate-average", "chars-to-quote=",
"check-destination-dir", "current-time=", "exclude=",
"exclude-device-files", "exclude-filelist=",
"exclude-filelist-stdin", "exclude-globbing-filelist=",
"exclude-mirror=", "exclude-other-filesystems",
"exclude-regexp=", "exclude-special-files", "force",
"include=", "include-filelist=", "include-filelist-stdin",
["backup-mode", "calculate-average", "check-destination-dir",
"current-time=", "exclude=", "exclude-device-files",
"exclude-filelist=", "exclude-filelist-stdin",
"exclude-globbing-filelist=", "exclude-mirror=",
"exclude-other-filesystems", "exclude-regexp=",
"exclude-special-files", "force", "include=",
"include-filelist=", "include-filelist-stdin",
"include-globbing-filelist=", "include-regexp=",
"list-at-time=", "list-changed-since=", "list-increments",
"no-compare-inode", "no-compression",
"no-compression-regexp=", "no-file-statistics",
"no-hard-links", "null-separator", "parsable-output",
"print-statistics", "quoting-char=", "remote-cmd=",
"remote-schema=", "remove-older-than=", "restore-as-of=",
"restrict=", "restrict-read-only=", "restrict-update-only=",
"server", "ssh-no-compression", "terminal-verbosity=",
"test-server", "verbosity=", "version", "windows-mode",
"windows-restore"])
"no-hard-links", "null-separator",
"override-chars-to-quote=", "parsable-output",
"print-statistics", "remote-cmd=", "remote-schema=",
"remove-older-than=", "restore-as-of=", "restrict=",
"restrict-read-only=", "restrict-update-only=", "server",
"ssh-no-compression", "terminal-verbosity=", "test-server",
"verbosity=", "version"])
except getopt.error, e:
commandline_error("Bad commandline options: %s" % str(e))
......@@ -68,9 +71,6 @@ def parse_cmdlineoptions(arglist):
if opt == "-b" or opt == "--backup-mode": action = "backup"
elif opt == "--calculate-average": action = "calculate-average"
elif opt == "--check-destination-dir": action = "check-destination-dir"
elif opt == "--chars-to-quote":
Globals.set('chars_to_quote', arg)
Globals.set('quoting_enabled', 1)
elif opt == "--current-time":
Globals.set_integer('current_time', arg)
elif opt == "--exclude": select_opts.append((opt, arg))
......@@ -112,11 +112,10 @@ def parse_cmdlineoptions(arglist):
elif opt == "--no-file-statistics": Globals.set('file_statistics', 0)
elif opt == "--no-hard-links": Globals.set('preserve_hardlinks', 0)
elif opt == "--null-separator": Globals.set("null_separator", 1)
elif opt == "--override-chars-to-quote":
Globals.set('chars_to_quote', arg)
elif opt == "--parsable-output": Globals.set('parsable_output', 1)
elif opt == "--print-statistics": Globals.set('print_statistics', 1)
elif opt == "--quoting-char":
Globals.set('quoting_char', arg)
Globals.set('quoting_enabled', 1)
elif opt == "-r" or opt == "--restore-as-of":
restore_timestr, action = arg, "restore-as-of"
elif opt == "--remote-cmd": remote_cmd = arg
......@@ -142,55 +141,30 @@ def parse_cmdlineoptions(arglist):
print "rdiff-backup " + Globals.version
sys.exit(0)
elif opt == "-v" or opt == "--verbosity": Log.setverbosity(arg)
elif opt == "--windows-mode":
Globals.set('chars_to_quote', "^a-z0-9._ -")
Globals.set('quoting_enabled', 1)
Globals.set('preserve_hardlinks', 0)
Globals.set('change_ownership', 0)
Globals.set('change_permissions', 0)
Globals.set('fsync_directories', 0)
elif opt == '--windows-restore':
Globals.set('chars_to_quote', "^a-z0-9._ -")
Globals.set('quoting_enabled', 1)
else: Log.FatalError("Unknown option %s" % opt)
def isincfilename(path):
"""Return true if path is of a (possibly quoted) increment file"""
rp = rpath.RPath(Globals.local_connection, path)
if Globals.quoting_enabled:
if not FilenameMapping.quoting_char:
FilenameMapping.set_init_quote_vals()
rp = FilenameMapping.get_quotedrpath(rp, separate_basename = 1)
result = rp.isincfile()
return result
def set_action():
"""Check arguments and try to set action"""
def check_action():
"""Check to make sure action is compatible with args"""
global action
arg_action_dict = {0: ['server'],
1: ['list-increments', 'remove-older-than',
'list-at-time', 'list-changed-since',
'check-destination-dir'],
2: ['backup', 'restore', 'restore-as-of']}
l = len(args)
if not action:
if not action: assert l == 2, args # cannot tell backup or restore yet
elif action == 'calculate-average':
if l == 0: commandline_error("No arguments given")
elif l == 1: action = "restore"
elif l == 2:
if isincfilename(args[0]): action = "restore"
else: action = "backup"
else: commandline_error("Too many arguments given")
if l == 0 and action != "server":
commandline_error("No arguments given")
if l > 0 and action == "server":
commandline_error("Too many arguments given")
if l < 2 and (action == "backup" or action == "restore-as-of"):
commandline_error("Two arguments are required (source, destination).")
if l == 2 and (action == "list-increments" or
action == "remove-older-than" or
action == "list-at-time" or
action == "list-changed-since" or
action == "check-destination-dir"):
commandline_error("Only use one argument, "
"the root of the backup directory")
if l > 2 and action != "calculate-average":
commandline_error("Too many arguments given")
elif l > 2 or action not in arg_action_dict[l]:
commandline_error("Wrong number of arguments given. See man page.")
def final_set_action(rps):
"""If no action set, decide between backup and restore at this point"""
global action
if action: return
assert len(rps) == 2, rps
if restore_get_root(rps[0]): action = "restore"
else: action = "backup"
def commandline_error(message):
sys.stderr.write("Error: %s\n" % message)
......@@ -201,7 +175,6 @@ def misc_setup(rps):
"""Set default change ownership flag, umask, relay regexps"""
os.umask(077)
Time.setcurtime(Globals.current_time)
FilenameMapping.set_init_quote_vals()
SetConnections.UpdateGlobal("client_conn", Globals.local_connection)
Globals.postset_regexp('no_compression_regexp',
Globals.no_compression_regexp_string)
......@@ -216,7 +189,7 @@ def take_action(rps):
sys.exit(0)
elif action == "backup": Backup(rps[0], rps[1])
elif action == "restore": Restore(*rps)
elif action == "restore-as-of": RestoreAsOf(rps[0], rps[1])
elif action == "restore-as-of": Restore(rps[0], rps[1], 1)
elif action == "test-server": SetConnections.TestConnections()
elif action == "list-at-time": ListAtTime(rps[0])
elif action == "list-changed-since": ListChangedSince(rps[0])
......@@ -236,10 +209,11 @@ def cleanup():
def Main(arglist):
"""Start everything up!"""
parse_cmdlineoptions(arglist)
set_action()
check_action()
cmdpairs = SetConnections.get_cmd_pairs(args, remote_schema, remote_cmd)
Security.initialize(action, cmdpairs)
Security.initialize(action or "mirror", cmdpairs)
rps = map(SetConnections.cmdpair2rp, cmdpairs)
final_set_action(rps)
misc_setup(rps)
take_action(rps)
cleanup()
......@@ -247,11 +221,15 @@ def Main(arglist):
def Backup(rpin, rpout):
"""Backup, possibly incrementally, src_path to dest_path."""
if Globals.quoting_enabled:
rpout = FilenameMapping.get_quotedrpath(rpout)
SetConnections.BackupInitConnections(rpin.conn, rpout.conn)
backup_check_dirs(rpin, rpout)
backup_set_fs_globals(rpin, rpout)
if Globals.chars_to_quote:
rpout = FilenameMapping.get_quotedrpath(rpout)
SetConnections.UpdateGlobal(
'rbdir', FilenameMapping.get_quotedrpath(Globals.rbdir))
backup_set_rbdir(rpin, rpout)
backup_set_select(rpin)
backup_init_dirs(rpin, rpout)
if prevtime:
rpout.conn.Main.backup_touch_curmirror_local(rpin, rpout)
Time.setprevtime(prevtime)
......@@ -266,32 +244,39 @@ def backup_set_select(rpin):
rpin.conn.backup.SourceStruct.set_source_select(rpin, select_opts,
*select_files)
def backup_init_dirs(rpin, rpout):
"""Make sure rpin and rpout are valid, init data dir and logging"""
global datadir, incdir, prevtime
def backup_check_dirs(rpin, rpout):
"""Make sure in and out dirs exist and are directories"""
if rpout.lstat() and not rpout.isdir():
if not force: Log.FatalError("Destination %s exists and is not a "
"directory" % rpout.path)
else:
Log("Deleting %s" % rpout.path, 3)
rpout.delete()
if not rpout.lstat():
try: rpout.mkdir()
except os.error:
Log.FatalError("Unable to create directory %s" % rpout.path)
if not rpin.lstat():
Log.FatalError("Source directory %s does not exist" % rpin.path)
elif not rpin.isdir():
Log.FatalError("Source %s is not a directory" % rpin.path)
backup_warn_if_infinite_regress(rpin, rpout)
Globals.rbdir = rpout.append_path("rdiff-backup-data")
datadir = rpout.append_path("rdiff-backup-data")
SetConnections.UpdateGlobal('rbdir', datadir)
def backup_set_rbdir(rpin, rpout):
"""Initialize data dir and logging"""
global incdir, prevtime
SetConnections.UpdateGlobal('rbdir', Globals.rbdir)
checkdest_if_necessary(rpout)
incdir = datadir.append_path("increments")
incdir = Globals.rbdir.append_path("increments")
prevtime = backup_get_mirrortime()
if rpout.lstat():
if rpout.isdir() and not rpout.listdir(): # rpout is empty dir
if Globals.change_permissions:
rpout.chmod(0700) # just make sure permissions aren't too lax
elif not datadir.lstat() and not force: Log.FatalError(
assert rpout.lstat(), (rpout.path, rpout.lstat())
if rpout.isdir() and not rpout.listdir(): # rpout is empty dir
if Globals.change_permissions:
rpout.chmod(0700) # just make sure permissions aren't too lax
elif not Globals.rbdir.lstat() and not force: Log.FatalError(
"""Destination directory
%s
......@@ -301,17 +286,12 @@ rdiff-backup like this could mess up what is currently in it. If you
want to update or overwrite it, run rdiff-backup with the --force
option.""" % rpout.path)
if not rpout.lstat():
try: rpout.mkdir()
except os.error:
Log.FatalError("Unable to create directory %s" % rpout.path)
if not datadir.lstat(): datadir.mkdir()
inc_base = datadir.append_path("increments")
if not Globals.rbdir.lstat(): Globals.rbdir.mkdir()
inc_base = Globals.rbdir.append_path("increments")
if not inc_base.lstat(): inc_base.mkdir()
if Log.verbosity > 0:
Log.open_logfile(datadir.append("backup.log"))
Log.open_logfile(Globals.rbdir.append("backup.log"))
ErrorLog.open(Time.curtimestr, compress = Globals.compression)
backup_warn_if_infinite_regress(rpin, rpout)
def backup_warn_if_infinite_regress(rpin, rpout):
"""Warn user if destination area contained in source area"""
......@@ -336,6 +316,25 @@ def backup_get_mirrortime():
if mirror_rps: return mirror_rps[0].getinctime()
else: return None
def backup_set_fs_globals(rpin, rpout):
"""Use fs_abilities to set the globals that depend on filesystem"""
src_fsa = fs_abilities.FSAbilities().init_readonly(rpin)
SetConnections.UpdateGlobal('read_acls', src_fsa.acls)
if src_fsa.eas: rpin.get_ea()
SetConnections.UpdateGlobal('read_eas', src_fsa.eas)
dest_fsa = fs_abilities.FSAbilities().init_readwrite(
Globals.rbdir, override_chars_to_quote = Globals.chars_to_quote)
SetConnections.UpdateGlobal('preserve_hardlinks', dest_fsa.hardlinks)
SetConnections.UpdateGlobal('fsync_directories', dest_fsa.fsync_dirs)
SetConnections.UpdateGlobal('write_acls', dest_fsa.acls)
SetConnections.UpdateGlobal('write_eas', Globals.read_eas and dest_fsa.eas)
SetConnections.UpdateGlobal('change_ownership', dest_fsa.ownership)
SetConnections.UpdateGlobal('chars_to_quote', dest_fsa.chars_to_quote)
if Globals.chars_to_quote:
for conn in Globals.connections:
conn.FilenameMapping.set_init_quote_vals()
def backup_touch_curmirror_local(rpin, rpout):
"""Make a file like current_mirror.time.data to record time
......@@ -367,40 +366,56 @@ def backup_remove_curmirror_local():
older_inc.delete()
def Restore(src_rp, dest_rp = None):
def Restore(src_rp, dest_rp, restore_as_of = None):
"""Main restoring function
Here src_rp should be an increment file, and if dest_rp is
missing it defaults to the base of the increment.
Here src_rp should be the source file (either an increment or
mirror file), dest_rp should be the target rp to be written.
"""
rpin, rpout = restore_check_paths(src_rp, dest_rp)
restore_common(rpin, rpout, rpin.getinctime())
def RestoreAsOf(rpin, target):
"""Secondary syntax for restore operation
rpin - RPath of mirror file to restore (not nec. with correct index)
target - RPath of place to put restored file
"""
rpin, rpout = restore_check_paths(rpin, target, 1)
try: time = Time.genstrtotime(restore_timestr)
except Time.TimeException, exc: Log.FatalError(str(exc))
restore_common(rpin, target, time)
def restore_common(rpin, target, time):
"""Restore operation common to Restore and RestoreAsOf"""
if target.conn.os.getuid() == 0:
SetConnections.UpdateGlobal('change_ownership', 1)
mirror_root, index = restore_get_root(rpin)
restore_check_backup_dir(mirror_root)
mirror = mirror_root.new_index(index)
inc_rpath = datadir.append_path('increments', index)
restore_set_select(mirror_root, target)
restore_start_log(rpin, target, time)
restore.Restore(mirror, inc_rpath, target, time)
Log("Restore ended", 4)
if not restore_root_set: assert restore_get_root(src_rp)
restore_check_paths(src_rp, dest_rp, restore_as_of)
restore_set_fs_globals(Globals.rbdir)
src_rp = restore_init_quoting(src_rp)
restore_check_backup_dir(restore_root, src_rp, restore_as_of)
if restore_as_of:
try: time = Time.genstrtotime(restore_timestr)
except Time.TimeException, exc: Log.FatalError(str(exc))
else: time = src_rp.getinctime()
inc_rpath = Globals.rbdir.append_path('increments', restore_index)
restore_set_select(restore_root, dest_rp)
restore_start_log(src_rp, dest_rp, time)
restore.Restore(restore_root.new_index(restore_index),
inc_rpath, dest_rp, time)
Log("Restore finished", 4)
def restore_init_quoting(src_rp):
"""Change rpaths into quoted versions of themselves if necessary"""
global restore_root
if not Globals.chars_to_quote: return src_rp
for conn in Globals.connections: conn.FilenameMapping.set_init_quote_vals()
restore_root = FilenameMapping.get_quotedrpath(restore_root)
SetConnections.UpdateGlobal(
'rbdir', FilenameMapping.get_quotedrpath(Globals.rbdir))
return FilenameMapping.get_quotedrpath(src_rp)
def restore_set_fs_globals(target):
"""Use fs_abilities to set the globals that depend on filesystem"""
target_fsa = fs_abilities.FSAbilities().init_readwrite(target, 0)
SetConnections.UpdateGlobal('read_acls', target_fsa.acls)
SetConnections.UpdateGlobal('write_acls', target_fsa.acls)
SetConnections.UpdateGlobal('read_eas', target_fsa.eas)
SetConnections.UpdateGlobal('write_eas', target_fsa.eas)
if Globals.read_eas: target.get_ea()
SetConnections.UpdateGlobal('preserve_hardlinks', target_fsa.hardlinks)
SetConnections.UpdateGlobal('change_ownership', target_fsa.ownership)
mirror_fsa = fs_abilities.FSAbilities().init_readwrite(Globals.rbdir)
if Globals.chars_to_quote is None: # otherwise already overridden
if mirror_fsa.chars_to_quote:
SetConnections.UpdateGlobal('chars_to_quote',
mirror_fsa.chars_to_quote)
else: SetConnections.UpdateGlobal('chars_to_quote', "")
def restore_set_select(mirror_rp, target):
"""Set the selection iterator on mirror side from command line args
......@@ -416,7 +431,7 @@ def restore_set_select(mirror_rp, target):
def restore_start_log(rpin, target, time):
"""Open restore log file, log initial message"""
try: Log.open_logfile(datadir.append("restore.log"))
try: Log.open_logfile(Globals.rbdir.append("restore.log"))
except LoggerError, e: Log("Warning, " + str(e), 2)
# Log following message at file verbosity 3, but term verbosity 4
......@@ -430,34 +445,29 @@ def restore_check_paths(rpin, rpout, restoreasof = None):
if not restoreasof:
if not rpin.lstat():
Log.FatalError("Source file %s does not exist" % rpin.path)
if Globals.quoting_enabled:
rpin = FilenameMapping.get_quotedrpath(rpin, 1)
if not rpin.isincfile():
Log.FatalError("""File %s does not look like an increment file.
Try restoring from an increment file (the filenames look like
"foobar.2001-09-01T04:49:04-07:00.diff").""" % rpin.path)
if not rpout: rpout = rpath.RPath(Globals.local_connection,
rpin.getincbase_str())
if rpout.lstat() and not force:
if not force and rpout.lstat() and (not rpout.isdir() or rpout.listdir()):
Log.FatalError("Restore target %s already exists, "
"specify --force to overwrite." % rpout.path)
return rpin, rpout
def restore_check_backup_dir(rpin):
def restore_check_backup_dir(mirror_root, src_rp, restore_as_of):
"""Make sure backup dir root rpin is in consistent state"""
result = checkdest_need_check(rpin)
if not restore_as_of and not src_rp.isincfile():
Log.FatalError("""File %s does not look like an increment file.
Try restoring from an increment file (the filenames look like
"foobar.2001-09-01T04:49:04-07:00.diff").""" % src_rp.path)
result = checkdest_need_check(mirror_root)
if result is None:
Log.FatalError("%s does not appear to be an rdiff-backup directory."
% (rpin.path,))
% (Globals.rbdir.path,))
elif result == 1: Log.FatalError(
"Previous backup to %s seems to have failed."
"Rerun rdiff-backup with --check-destination-dir option to revert"
"directory to state before unsuccessful session." % (rpin.path,))
"Previous backup to %s seems to have failed.\nRerun rdiff-backup "
"rdiff-with --check-destination-dir option to revert directory "
"to state before unsuccessful session." % (mirror_root.path,))
def restore_get_root(rpin):
"""Return (mirror root, index) and set the data dir
"""Set data dir, restore_root and index, or return None if fail
The idea here is to keep backing up on the path until we find
a directory that contains "rdiff-backup-data". That is the
......@@ -470,7 +480,7 @@ def restore_get_root(rpin):
funny way, using symlinks or somesuch.
"""
global datadir
global restore_root, restore_index
if rpin.isincfile(): relpath = rpin.getincbase().path
else: relpath = rpin.path
pathcomps = os.path.join(rpin.conn.os.getcwd(), relpath).split("/")
......@@ -482,23 +492,25 @@ def restore_get_root(rpin):
if (parent_dir.isdir() and
"rdiff-backup-data" in parent_dir.listdir()): break
i = i-1
else: Log.FatalError("Unable to find rdiff-backup-data directory")
if not Globals.quoting_enabled: rootrp = parent_dir
else: rootrp = FilenameMapping.get_quotedrpath(parent_dir)
Log("Using mirror root directory %s" % rootrp.path, 6)
else: return None
datadir = rootrp.append_path("rdiff-backup-data")
SetConnections.UpdateGlobal('rbdir', datadir)
if not datadir.isdir():
restore_root = parent_dir
Log("Using mirror root directory %s" % restore_root.path, 6)
SetConnections.UpdateGlobal('rbdir',
restore_root.append_path("rdiff-backup-data"))
if not Globals.rbdir.isdir():
Log.FatalError("Unable to read rdiff-backup-data directory %s" %
datadir.path)
Globals.rbdir.path)
from_datadir = tuple(pathcomps[i:])
if not from_datadir or from_datadir[0] != "rdiff-backup-data":
return (rootrp, from_datadir) # in mirror, not increments
assert from_datadir[1] == "increments"
return (rootrp, from_datadir[2:])
restore_index = from_datadir # in mirror, not increments
else:
assert (from_datadir[1] == "increments" or
(len(from_datadir) == 2 and
from_datadir[1].startswith('increments'))), from_datadir
restore_index = from_datadir[2:]
return 1
def ListIncrements(rp):
......@@ -555,7 +567,7 @@ def rom_check_dir(rootrp):
rootrp.append_path("rdiff-backup-data"))
if not Globals.rbdir.isdir():
Log.FatalError("Unable to open rdiff-backup-data dir %s" %
(datadir.path,))
(Globals.rbdir.path,))
checkdest_if_necessary(rootrp)
......@@ -597,6 +609,9 @@ def CheckDest(dest_rp):
def checkdest_need_check(dest_rp):
"""Return None if no dest dir found, 1 if dest dir needs check, 0 o/w"""
if not dest_rp.isdir() or not Globals.rbdir.isdir(): return None
if Globals.rbdir.listdir() == ['chars_to_quote']:
# This may happen the first backup just after we test for quoting
return None
curmirroot = Globals.rbdir.append("current_mirror")
curmir_incs = restore.get_inclist(curmirroot)
if not curmir_incs:
......
......@@ -76,8 +76,9 @@ def set_security_level(action, cmdpairs):
rdir = tempfile.gettempdir()
elif islocal(cp1):
sec_level = "read-only"
rdir = Main.restore_get_root(rpath.RPath(Globals.local_connection,
getpath(cp1)))[0].path
Main.restore_get_root(rpath.RPath(Globals.local_connection,
getpath(cp1)))
rdir = Main.restore_root.path
else:
assert islocal(cp2)
sec_level = "all"
......
# Copyright 2002 Ben Escoto
# Copyright 2002, 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
......@@ -27,7 +27,7 @@ the related connections.
import os
from log import Log
import Globals, FilenameMapping, connection, rpath
import Globals, connection, rpath
# This is the schema that determines how rdiff-backup will open a
# pipe to the remote system. If the file is given as A::B, %s will
......@@ -178,7 +178,6 @@ def init_connection_settings(conn):
conn.log.Log.setterm_verbosity(Log.term_verbosity)
for setting_name in Globals.changed_settings:
conn.Globals.set(setting_name, Globals.get(setting_name))
FilenameMapping.set_init_quote_vals()
def init_connection_remote(conn_number):
"""Run on server side to tell self that have given conn_number"""
......@@ -203,8 +202,6 @@ def BackupInitConnections(reading_conn, writing_conn):
writing_conn.Globals.set("isbackup_writer", 1)
UpdateGlobal("backup_reader", reading_conn)
UpdateGlobal("backup_writer", writing_conn)
if writing_conn.os.getuid() == 0 and Globals.change_ownership != 0:
UpdateGlobal('change_ownership', 1)
def CloseConnections():
"""Close all connections. Run by client"""
......
# Copyright 2002 Ben Escoto
# Copyright 2002, 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
......@@ -22,7 +22,8 @@
from __future__ import generators
import errno
import Globals, metadata, rorpiter, TempFile, Hardlink, robust, increment, \
rpath, static, log, selection, Time, Rdiff, statistics, iterfile
rpath, static, log, selection, Time, Rdiff, statistics, iterfile, \
eas_acls
def Mirror(src_rpath, dest_rpath):
"""Turn dest_rpath into a copy of src_rpath"""
......@@ -122,16 +123,27 @@ class DestinationStruct:
destination except rdiff-backup-data directory.
"""
if use_metadata:
metadata_iter = metadata.GetMetadata_at_time(Globals.rbdir,
Time.prevtime)
def get_basic_iter():
"""Returns iterator of basic metadata"""
metadata_iter = metadata.MetadataFile.get_objects_at_time(
Globals.rbdir, Time.prevtime)
if metadata_iter: return metadata_iter
log.Log("Warning: Metadata file not found.\n"
"Metadata will be read from filesystem.", 2)
sel = selection.Select(rpath)
sel.parse_rbdir_exclude()
return sel.set_iter()
def get_iter_from_fs():
"""Get the combined iterator from the filesystem"""
sel = selection.Select(rpath)
sel.parse_rbdir_exclude()
return sel.set_iter()
if use_metadata:
if Globals.read_eas:
rorp_iter = eas_acls.ExtendedAttributesFile.\
get_combined_iter_at_time(Globals.rbdir, Time.prevtime)
else: rorp_iter = get_basic_iter()
if rorp_iter: return rorp_iter
return get_iter_from_fs()
def set_rorp_cache(cls, baserp, source_iter, for_increment):
"""Initialize cls.CCPP, the destination rorp cache
......@@ -243,7 +255,8 @@ class CacheCollatedPostProcess:
self.cache_size = cache_size
self.statfileobj = statistics.init_statfileobj()
if Globals.file_statistics: statistics.FileStats.init()
metadata.OpenMetadata()
metadata.MetadataFile.open_file()
if Globals.read_eas: eas_acls.ExtendedAttributesFile.open_file()
# the following should map indicies to lists
# [source_rorp, dest_rorp, changed_flag, success_flag, increment]
......@@ -317,7 +330,10 @@ class CacheCollatedPostProcess:
metadata_rorp = source_rorp
else: metadata_rorp = None
if metadata_rorp and metadata_rorp.lstat():
metadata.WriteMetadata(metadata_rorp)
metadata.MetadataFile.write_object(metadata_rorp)
if Globals.read_eas and not metadata_rorp.get_ea().empty():
eas_acls.ExtendedAttributesFile.write_object(
metadata_rorp.get_ea())
if Globals.file_statistics:
statistics.FileStats.update(source_rorp, dest_rorp, changed, inc)
......@@ -359,7 +375,8 @@ class CacheCollatedPostProcess:
def close(self):
"""Process the remaining elements in the cache"""
while self.cache_indicies: self.shorten_cache()
metadata.CloseMetadata()
metadata.MetadataFile.close_file()
if Globals.read_eas: eas_acls.ExtendedAttributesFile.close_file()
if Globals.print_statistics: statistics.print_active_stats()
if Globals.file_statistics: statistics.FileStats.close()
statistics.write_active_statfileobj()
......
......@@ -22,6 +22,11 @@
from __future__ import generators
import types, os, tempfile, cPickle, shutil, traceback, pickle, \
socket, sys, gzip
# The following EA and ACL modules may be used if available
try: import xattr
except ImportError: pass
try: import posix1e
except ImportError: pass
class ConnectionError(Exception): pass
......@@ -513,7 +518,7 @@ class VirtualFile:
import Globals, Time, Rdiff, Hardlink, FilenameMapping, C, Security, \
Main, rorpiter, selection, increment, statistics, manage, lazy, \
iterfile, rpath, robust, restore, manage, backup, connection, \
TempFile, SetConnections, librsync, log, regress
TempFile, SetConnections, librsync, log, regress, fs_abilities
Globals.local_connection = LocalConnection()
Globals.connections.append(Globals.local_connection)
......
# Copyright 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
# rdiff-backup is free software; you can redistribute it and/or modify
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# rdiff-backup is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with rdiff-backup; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
"""Store and retrieve extended attributes and access control lists
Not all file systems will have EAs and ACLs, but if they do, store
this information in separate files in the rdiff-backup-data directory,
called extended_attributes.<time>.snapshot and
access_control_lists.<time>.snapshot.
"""
from __future__ import generators
import base64, errno, re
import static, Globals, metadata, connection, rorpiter, log
class ExtendedAttributes:
"""Hold a file's extended attribute information"""
def __init__(self, index, attr_dict = None):
"""Initialize EA object with no attributes"""
self.index = index
if attr_dict is None: self.attr_dict = {}
else: self.attr_dict = attr_dict
def __eq__(self, ea):
"""Equal if all attributes and index are equal"""
assert isinstance(ea, ExtendedAttributes)
return ea.index == self.index and ea.attr_dict == self.attr_dict
def __ne__(self, ea): return not self.__eq__(ea)
def get_indexpath(self): return self.index and '/'.join(self.index) or '.'
def read_from_rp(self, rp):
"""Set the extended attributes from an rpath"""
try: attr_list = rp.conn.xattr.listxattr(rp.path)
except IOError, exc:
if exc[0] == errno.EOPNOTSUPP: return # if not sup, consider empty
raise
for attr in attr_list:
try: self.attr_dict[attr] = rp.conn.xattr.getxattr(rp.path, attr)
except IOError, exc:
# File probably modified while reading, just continue
if exc[0] == errno.ENODATA: continue
elif exc[0] == errno.ENOENT: break
else: raise
def clear_rp(self, rp):
"""Delete all the extended attributes in rpath"""
for name in rp.conn.xattr.listxattr(rp.path):
rp.conn.xattr.removexattr(rp.path, name)
def write_to_rp(self, rp):
"""Write extended attributes to rpath rp"""
self.clear_rp(rp)
for (name, value) in self.attr_dict.iteritems():
rp.conn.xattr.setxattr(rp.path, name, value)
def get(self, name):
"""Return attribute attached to given name"""
return self.attr_dict[name]
def set(self, name, value = ""):
"""Set given name to given value. Does not write to disk"""
self.attr_dict[name] = value
def delete(self, name):
"""Delete value associated with given name"""
del self.attr_dict[name]
def empty(self):
"""Return true if no extended attributes are set"""
return not self.attr_dict
def compare_rps(rp1, rp2):
"""Return true if rp1 and rp2 have same extended attributes"""
ea1 = ExtendedAttributes(rp1.index)
ea1.read_from_rp(rp1)
ea2 = ExtendedAttributes(rp2.index)
ea2.read_from_rp(rp2)
return ea1 == ea2
def EA2Record(ea):
"""Convert ExtendedAttributes object to text record"""
str_list = ['# file: %s' % ea.get_indexpath()]
for (name, val) in ea.attr_dict.iteritems():
if not val: str_list.append(name)
else:
encoded_val = base64.encodestring(val).replace('\n', '')
str_list.append('%s=0s%s' % (name, encoded_val))
return '\n'.join(str_list)+'\n'
def Record2EA(record):
"""Convert text record to ExtendedAttributes object"""
lines = record.split('\n')
first = lines.pop(0)
if not first[:8] == "# file: ":
raise metadata.ParsingError("Bad record beginning: " + first[:8])
filename = first[8:]
if filename == '.': index = ()
else: index = tuple(filename.split('/'))
ea = ExtendedAttributes(index)
for line in lines:
line = line.strip()
if not line: continue
assert line[0] != '#', line
eq_pos = line.find('=')
if eq_pos == -1: ea.set(line)
else:
name = line[:eq_pos]
assert line[eq_pos+1:eq_pos+3] == '0s', \
"Currently only base64 encoding supported"
encoded_val = line[eq_pos+3:]
ea.set(name, base64.decodestring(encoded_val))
return ea
def quote_path(path):
"""Quote a path for use EA/ACL records.
Right now no quoting!!! Change this to reflect the updated
quoting style of getfattr/setfattr when they are changed.
"""
return path
class EAExtractor(metadata.FlatExtractor):
"""Iterate ExtendedAttributes objects from the EA information file"""
record_boundary_regexp = re.compile("\\n# file:")
record_to_object = staticmethod(Record2EA)
def get_index_re(self, index):
"""Find start of EA record with given index"""
if not index: indexpath = '.'
else: indexpath = '/'.join(index)
# Right now there is no quoting, due to a bug in
# getfacl/setfacl. Replace later when bug fixed.
return re.compile('(^|\\n)(# file: %s\\n)' % indexpath)
class ExtendedAttributesFile(metadata.FlatFile):
"""Store/retrieve EAs from extended_attributes file"""
_prefix = "extended_attributes"
_extractor = EAExtractor
_object_to_record = staticmethod(EA2Record)
def get_combined_iter_at_time(cls, rbdir, rest_time,
restrict_index = None):
"""Return an iter of rorps with extended attributes added"""
def join_eas(basic_iter, ea_iter):
"""Join basic_iter with ea iter"""
collated = rorpiter.CollateIterators(basic_iter, ea_iter)
for rorp, ea in collated:
assert rorp, (rorp, (ea.index, ea.attr_dict), rest_time)
if not ea: ea = ExtendedAttributes(rorp.index)
rorp.set_ea(ea)
yield rorp
basic_iter = metadata.MetadataFile.get_objects_at_time(
Globals.rbdir, rest_time, restrict_index)
if not basic_iter: return None
ea_iter = cls.get_objects_at_time(rbdir, rest_time, restrict_index)
if not ea_iter:
log.Log("Warning: Extended attributes file not found", 2)
ea_iter = iter([])
return join_eas(basic_iter, ea_iter)
static.MakeClass(ExtendedAttributesFile)
# Copyright 2002 Ben Escoto
# Copyright 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
......@@ -50,12 +50,14 @@ class FSAbilities:
Only self.acls and self.eas are set.
"""
self.root_rp = rp
self.read_only = 1
self.set_eas(rp, 0)
self.set_acls(rp)
return self
def init_readwrite(self, rbdir, use_ctq_file = 1):
def init_readwrite(self, rbdir, use_ctq_file = 1,
override_chars_to_quote = None):
"""Set variables using fs tested at rp_base
This method creates a temp directory in rp_base and writes to
......@@ -69,17 +71,21 @@ class FSAbilities:
file in directory.
"""
assert rbdir.isdir()
if not rbdir.isdir():
assert not rbdir.lstat(), (rbdir.path, rbdir.lstat())
rbdir.mkdir()
self.root_rp = rbdir
self.read_only = 0
subdir = TempFile.new_in_dir(rbdir)
subdir = rbdir.conn.TempFile.new_in_dir(rbdir)
subdir.mkdir()
self.set_ownership(subdir)
self.set_hardlinks(subdir)
self.set_fsync_dirs(subdir)
self.set_eas(subdir, 1)
self.set_acls(subdir)
self.set_chars_to_quote(subdir)
if override_chars_to_quote is None: self.set_chars_to_quote(subdir)
else: self.chars_to_quote = override_chars_to_quote
if use_ctq_file: self.compare_chars_to_quote(rbdir)
subdir.delete()
return self
......@@ -95,19 +101,14 @@ class FSAbilities:
fp.write(self.chars_to_quote)
assert not fp.close()
def get_old_chars():
fp = ctq_rp.open("rb")
old_chars = fp.read()
assert not fp.close()
return old_chars
if not ctq_rp.lstat(): write_new_chars()
else:
old_chars = get_old_chars()
old_chars = ctq_rp.get_data()
if old_chars != self.chars_to_quote:
if self.chars_to_quote == "":
log.Log("Warning: File system no longer needs quoting, "
"but will retain for backwards compatibility.", 2)
self.chars_to_quote = old_chars
else: log.FatalError("""New quoting requirements
This may be caused when you copy an rdiff-backup directory from a
......@@ -127,7 +128,7 @@ rdiff-backup-data/chars_to_quote.
except (IOError, OSError), exc:
if exc[0] == errno.EPERM:
log.Log("Warning: ownership cannot be changed on filesystem "
"at device %s" % (testdir.getdevloc(),), 2)
"at %s" % (self.root_rp.path,), 2)
self.ownership = 0
else: raise
else: self.ownership = 1
......@@ -143,21 +144,15 @@ rdiff-backup-data/chars_to_quote.
assert hl_source.getinode() == hl_dest.getinode()
except (IOError, OSError), exc:
if exc[0] in (errno.EOPNOTSUPP, errno.EPERM):
log.Log("Warning: hard linking not supported by filesystem %s"
% (testdir.getdevloc(),), 2)
log.Log("Warning: hard linking not supported by filesystem "
"at %s" % (self.root_rp.path,), 2)
self.hardlinks = 0
else: raise
else: self.hardlinks = 1
def set_fsync_dirs(self, testdir):
"""Set self.fsync_dirs if directories can be fsync'd"""
try: testdir.fsync()
except (IOError, OSError), exc:
log.Log("Warning: Directories on file system at %s are not "
"fsyncable.\nAssuming it's unnecessary." %
(testdir.getdevloc(),), 2)
self.fsync_dirs = 0
else: self.fsync_dirs = 1
self.fsync_dirs = testdir.conn.fs_abilities.test_fsync_local(testdir)
def set_chars_to_quote(self, subdir):
"""Set self.chars_to_quote by trying to write various paths"""
......@@ -189,7 +184,7 @@ rdiff-backup-data/chars_to_quote.
def sanity_check():
"""Make sure basic filenames writable"""
for filename in ['5-_ a']:
for filename in ['5-_ a.']:
rp = subdir.append(filename)
rp.touch()
assert rp.lstat()
......@@ -198,57 +193,70 @@ rdiff-backup-data/chars_to_quote.
sanity_check()
if is_case_sensitive():
if supports_unusual_chars(): self.chars_to_quote = ""
else: self.chars_to_quote = "^A-Za-z0-9_ -"
else: self.chars_to_quote = "^A-Za-z0-9_ -."
else:
if supports_unusual_chars(): self.chars_to_quote = "A-Z;"
else: self.chars_to_quote = "^a-z0-9_ -"
else: self.chars_to_quote = "^a-z0-9_ -."
def set_acls(self, rp):
"""Set self.acls based on rp. Does not write. Needs to be local"""
assert Globals.local_connection is rp.conn
assert rp.lstat()
try: import posix1e
except ImportError:
log.Log("Warning: Unable to import module posix1e from pylibacl "
"package.\nACLs not supported on device %s" %
(rp.getdevloc(),), 2)
self.acls = 0
return
try: posix1e.ACL(file=rp.path)
except IOError, exc:
if exc[0] == errno.EOPNOTSUPP:
log.Log("Warning: ACLs appear not to be supported by "
"filesystem on device %s" % (rp.getdevloc(),), 2)
self.acls = 0
else: raise
else: self.acls = 1
self.acls = rp.conn.fs_abilities.test_acls_local(rp)
def set_eas(self, rp, write):
"""Set extended attributes from rp. Run locally.
Tests writing if write is true.
"""
assert Globals.local_connection is rp.conn
assert rp.lstat()
try: import xattr
except ImportError:
log.Log("Warning: Unable to import module xattr. ACLs not "
"supported on device %s" % (rp.getdevloc(),), 2)
self.eas = 0
return
try:
xattr.listxattr(rp.path)
if write:
xattr.setxattr(rp.path, "user.test", "test val")
assert xattr.getxattr(rp.path, "user.test") == "test val"
except IOError, exc:
if exc[0] == errno.EOPNOTSUPP:
log.Log("Warning: Extended attributes not supported by "
"filesystem on device %s" % (rp.getdevloc(),), 2)
self.eas = 0
else: raise
else: self.eas = 1
"""Set extended attributes from rp. Tests writing if write is true."""
self.eas = rp.conn.fs_abilities.test_eas_local(rp, write)
def test_eas_local(rp, write):
"""Test ea support. Must be called locally. Usedy by set_eas above."""
assert Globals.local_connection is rp.conn
assert rp.lstat()
try: import xattr
except ImportError:
log.Log("Warning: Unable to import module xattr. ACLs not "
"supported on filesystem at %s" % (rp.path,), 2)
return 0
try:
xattr.listxattr(rp.path)
if write:
xattr.setxattr(rp.path, "user.test", "test val")
assert xattr.getxattr(rp.path, "user.test") == "test val"
except IOError, exc:
if exc[0] == errno.EOPNOTSUPP:
log.Log("Warning: Extended attributes not supported by "
"filesystem at %s" % (rp.path,), 2)
return 0
else: raise
else: return 1
def test_acls_local(rp):
"""Test acl support. Call locally. Does not write."""
assert Globals.local_connection is rp.conn
assert rp.lstat()
try: import posix1e
except ImportError:
log.Log("Warning: Unable to import module posix1e from pylibacl "
"package.\nACLs not supported on filesystem at %s" %
(rp.path,), 2)
return 0
try: posix1e.ACL(file=rp.path)
except IOError, exc:
if exc[0] == errno.EOPNOTSUPP:
log.Log("Warning: ACLs appear not to be supported by "
"filesystem at %s" % (rp.path,), 2)
return 0
else: raise
else: return 1
def test_fsync_local(rp):
"""Test fsyncing directories locally"""
assert rp.conn is Globals.local_connection
try: rp.fsync()
except (IOError, OSError), exc:
log.Log("Warning: Directories on file system at %s are not "
"fsyncable.\nAssuming it's unnecessary." % (rp.path,), 2)
return 0
else: return 1
......@@ -56,7 +56,7 @@ field names and values.
from __future__ import generators
import re, gzip, os
import log, Globals, rpath, Time, robust, increment
import log, Globals, rpath, Time, robust, increment, static
class ParsingError(Exception):
"""This is raised when bad or unparsable data is received"""
......@@ -165,16 +165,14 @@ def unquote_path(quoted_string):
return re.sub("\\\\n|\\\\\\\\", replacement_func, quoted_string)
def write_rorp_iter_to_file(rorp_iter, file):
"""Given iterator of RORPs, write records to (pre-opened) file object"""
for rorp in rorp_iter: file.write(RORP2Record(rorp))
class rorp_extractor:
"""Controls iterating rorps from metadata file"""
class FlatExtractor:
"""Controls iterating objects from flat file"""
# The following two should be set in subclasses
record_boundary_regexp = None # Matches beginning of next record
record_to_object = None # Function that converts text record to object
def __init__(self, fileobj):
self.fileobj = fileobj # holds file object we are reading from
self.buf = "" # holds the next part of the file
self.record_boundary_regexp = re.compile("\\nFile")
self.at_end = 0 # True if we are at the end of the file
self.blocksize = 32 * 1024
......@@ -191,12 +189,13 @@ class rorp_extractor:
else: self.buf += newbuf
def iterate(self):
"""Return iterator over all records"""
"""Return iterator that yields all objects with records"""
while 1:
next_pos = self.get_next_pos()
try: yield Record2RORP(self.buf[:next_pos])
try: yield self.record_to_object(self.buf[:next_pos])
except ParsingError, e:
log.Log("Error parsing metadata file: %s" % (e,), 2)
if self.at_end: break # Ignore whitespace/bad records at end
log.Log("Error parsing flat file: %s" % (e,), 2)
if self.at_end: break
self.buf = self.buf[next_pos:]
assert not self.close()
......@@ -209,15 +208,7 @@ class rorp_extractor:
"""
assert not self.buf or self.buf.endswith("\n")
if not index: indexpath = "."
else: indexpath = "/".join(index)
# Must double all backslashes, because they will be
# reinterpreted. For instance, to search for index \n
# (newline), it will be \\n (backslash n) in the file, so the
# regular expression is "File \\\\n\\n" (File two backslash n
# backslash n)
double_quote = re.sub("\\\\", "\\\\\\\\", indexpath)
begin_re = re.compile("(^|\\n)(File %s\\n)" % (double_quote,))
begin_re = self.get_index_re(index)
while 1:
m = begin_re.search(self.buf)
if m:
......@@ -229,18 +220,28 @@ class rorp_extractor:
self.at_end = 1
return
def get_index_re(self, index):
"""Return regular expression used to find index.
Override this in sub classes. The regular expression's second
group needs to start at the beginning of the record that
contains information about the object with the given index.
"""
assert 0, "Just a placeholder, must override this in subclasses"
def iterate_starting_with(self, index):
"""Iterate records whose index starts with given index"""
"""Iterate objects whose index starts with given index"""
self.skip_to_index(index)
if self.at_end: return
while 1:
next_pos = self.get_next_pos()
try: rorp = Record2RORP(self.buf[:next_pos])
try: obj = self.record_to_object(self.buf[:next_pos])
except ParsingError, e:
log.Log("Error parsing metadata file: %s" % (e,), 2)
else:
if rorp.index[:len(index)] != index: break
yield rorp
if obj.index[:len(index)] != index: break
yield obj
if self.at_end: break
self.buf = self.buf[next_pos:]
assert not self.close()
......@@ -249,73 +250,116 @@ class rorp_extractor:
"""Return value of closing associated file"""
return self.fileobj.close()
class RorpExtractor(FlatExtractor):
"""Iterate rorps from metadata file"""
record_boundary_regexp = re.compile("\\nFile")
record_to_object = staticmethod(Record2RORP)
def get_index_re(self, index):
"""Find start of rorp record with given index"""
indexpath = index and '/'.join(index) or '.'
# Must double all backslashes, because they will be
# reinterpreted. For instance, to search for index \n
# (newline), it will be \\n (backslash n) in the file, so the
# regular expression is "File \\\\n\\n" (File two backslash n
# backslash n)
double_quote = re.sub("\\\\", "\\\\\\\\", indexpath)
return re.compile("(^|\\n)(File %s\\n)" % (double_quote,))
metadata_rp = None
metadata_fileobj = None
metadata_record_buffer = [] # Use this because gzip writes are slow
def OpenMetadata(rp = None, compress = 1):
"""Open the Metadata file for writing, return metadata fileobj"""
global metadata_rp, metadata_fileobj
assert not metadata_fileobj, "Metadata file already open"
if rp: metadata_rp = rp
else:
if compress: typestr = 'snapshot.gz'
else: typestr = 'snapshot'
metadata_rp = Globals.rbdir.append("mirror_metadata.%s.%s" %
(Time.curtimestr, typestr))
metadata_fileobj = metadata_rp.open("wb", compress = compress)
def WriteMetadata(rorp):
"""Write metadata of rorp to file"""
global metadata_fileobj, metadata_record_buffer
metadata_record_buffer.append(RORP2Record(rorp))
if len(metadata_record_buffer) >= 100: write_metadata_buffer()
def write_metadata_buffer():
global metadata_record_buffer
metadata_fileobj.write("".join(metadata_record_buffer))
metadata_record_buffer = []
def CloseMetadata():
"""Close the metadata file"""
global metadata_rp, metadata_fileobj
assert metadata_fileobj, "Metadata file not open"
if metadata_record_buffer: write_metadata_buffer()
try: fileno = metadata_fileobj.fileno() # will not work if GzipFile
except AttributeError: fileno = metadata_fileobj.fileobj.fileno()
os.fsync(fileno)
result = metadata_fileobj.close()
metadata_fileobj = None
metadata_rp.setdata()
return result
def GetMetadata(rp, restrict_index = None, compressed = None):
"""Return iterator of metadata from given metadata file rp"""
if compressed is None:
if rp.isincfile():
compressed = rp.inc_compressed
assert rp.inc_type == "data" or rp.inc_type == "snapshot"
else: compressed = rp.get_indexpath().endswith(".gz")
fileobj = rp.open("rb", compress = compressed)
if restrict_index is None: return rorp_extractor(fileobj).iterate()
else: return rorp_extractor(fileobj).iterate_starting_with(restrict_index)
def GetMetadata_at_time(rbdir, time, restrict_index = None, rblist = None):
"""Scan through rbdir, finding metadata file at given time, iterate
If rdlist is given, use that instead of listing rddir. Time here
is exact, we don't take the next one older or anything. Returns
None if no matching metadata found.
class FlatFile:
"""Manage a flat (probably text) file containing info on various files
This is used for metadata information, and possibly EAs and ACLs.
The main read interface is as an iterator. The storage format is
a flat, probably compressed file, so random access is not
recommended.
"""
if rblist is None: rblist = map(lambda x: rbdir.append(x),
robust.listrp(rbdir))
for rp in rblist:
if (rp.isincfile() and
(rp.getinctype() == "data" or rp.getinctype() == "snapshot") and
rp.getincbase_str() == "mirror_metadata"):
if rp.getinctime() == time: return GetMetadata(rp, restrict_index)
return None
_prefix = None # Set this to real prefix when subclassing
_rp, _fileobj = None, None
# Buffering may be useful because gzip writes are slow
_buffering_on = 1
_record_buffer, _max_buffer_size = None, 100
_extractor = FlatExtractor # Set to class that iterates objects
def open_file(cls, rp = None, compress = 1):
"""Open file for writing. Use cls._rp if rp not given."""
assert not cls._fileobj, "Flatfile already open"
cls._record_buffer = []
if rp: cls._rp = rp
else:
if compress: typestr = 'snapshot.gz'
else: typestr = 'snapshot'
cls._rp = Globals.rbdir.append(
"%s.%s.%s" % (cls._prefix, Time.curtimestr, typestr))
cls._fileobj = cls._rp.open("wb", compress = compress)
def write_object(cls, object):
"""Convert one object to record and write to file"""
record = cls._object_to_record(object)
if cls._buffering_on:
cls._record_buffer.append(record)
if len(cls._record_buffer) >= cls._max_buffer_size:
cls._fileobj.write("".join(cls._record_buffer))
cls._record_buffer = []
else: cls._fileobj.write(record)
def close_file(cls):
"""Close file, for when any writing is done"""
assert cls._fileobj, "File already closed"
if cls._buffering_on and cls._record_buffer:
cls._fileobj.write("".join(cls._record_buffer))
cls._record_buffer = []
try: fileno = cls._fileobj.fileno() # will not work if GzipFile
except AttributeError: fileno = cls._fileobj.fileobj.fileno()
os.fsync(fileno)
result = cls._fileobj.close()
cls._fileobj = None
cls._rp.setdata()
return result
def get_objects(cls, restrict_index = None, compressed = None):
"""Return iterator of objects records from file rp"""
assert cls._rp, "Must have rp set before get_objects can be used"
if compressed is None:
if cls._rp.isincfile():
compressed = cls._rp.inc_compressed
assert (cls._rp.inc_type == 'data' or
cls._rp.inc_type == 'snapshot'), cls._rp.inc_type
else: compressed = cls._rp.get_indexpath().endswith('.gz')
fileobj = cls._rp.open('rb', compress = compressed)
if restrict_index is None: return cls._extractor(fileobj).iterate()
else:
re = cls._extractor(fileobj)
return re.iterate_starting_with(restrict_index)
def get_objects_at_time(cls, rbdir, time, restrict_index = None,
rblist = None):
"""Scan through rbdir, finding data at given time, iterate
If rblist is givenr, use that instead of listing rbdir. Time
here is exact, we don't take the next one older or anything.
Returns None if no file matching prefix is found.
"""
if rblist is None:
rblist = map(lambda x: rbdir.append(x), robust.listrp(rbdir))
for rp in rblist:
if (rp.isincfile() and
(rp.getinctype() == "data" or rp.getinctype() == "snapshot")
and rp.getincbase_str() == cls._prefix):
if rp.getinctime() == time:
cls._rp = rp
return cls.get_objects(restrict_index)
return None
static.MakeClass(FlatFile)
class MetadataFile(FlatFile):
"""Store/retrieve metadata from mirror_metadata as rorps"""
_prefix = "mirror_metadata"
_extractor = RorpExtractor
_object_to_record = staticmethod(RORP2Record)
......@@ -124,7 +124,8 @@ def iterate_raw_rfs(mirror_rp, inc_rp):
def yield_metadata():
"""Iterate rorps from metadata file, if any are available"""
metadata_iter = metadata.GetMetadata_at_time(Globals.rbdir, regress_time)
metadata_iter = metadata.MetadataFile.get_objects_at_time(Globals.rbdir,
regress_time)
if metadata_iter: return metadata_iter
log.Log.FatalError("No metadata for time %s found, cannot regress"
% Time.timetopretty(regress_time))
......
# Copyright 2002 Ben Escoto
# Copyright 2002, 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
......@@ -22,7 +22,7 @@
from __future__ import generators
import tempfile, os, cStringIO
import Globals, Time, Rdiff, Hardlink, rorpiter, selection, rpath, \
log, static, robust, metadata, statistics, TempFile
log, static, robust, metadata, statistics, TempFile, eas_acls
# This should be set to selection.Select objects over the source and
......@@ -154,8 +154,13 @@ class MirrorStruct:
"""
if rest_time is None: rest_time = _rest_time
metadata_iter = metadata.GetMetadata_at_time(Globals.rbdir,
rest_time, restrict_index = cls.mirror_base.index)
if Globals.write_eas:
metadata_iter = eas_acls.ExtendedAttributesFile.\
get_combined_iter_at_time(
Globals.rbdir, rest_time, restrict_index = cls.mirror_base.index)
else:
metadata_iter = metadata.MetadataFile.get_objects_at_time(
Globals.rbdir, rest_time, restrict_index = cls.mirror_base.index)
if metadata_iter: rorp_iter = metadata_iter
elif require_metadata: log.Log.FatalError("Mirror metadata not found")
else:
......
# Copyright 2002 Ben Escoto
# Copyright 2002, 2003 Ben Escoto
#
# This file is part of rdiff-backup.
#
......@@ -156,6 +156,7 @@ def copy_attribs(rpin, rpout):
if Globals.change_ownership: apply(rpout.chown, rpin.getuidgid())
if Globals.change_permissions: rpout.chmod(rpin.getperms())
if not rpin.isdev(): rpout.setmtime(rpin.getmtime())
if Globals.write_eas: rpout.write_ea(rpin.get_ea())
def cmp_attribs(rp1, rp2):
"""True if rp1 has the same file attributes as rp2
......@@ -301,7 +302,8 @@ class RORPath:
return 1
def equal_verbose(self, other, check_index = 1,
compare_inodes = 0, compare_ownership = 0):
compare_inodes = 0, compare_ownership = 0,
compare_eas = 0):
"""Like __eq__, but log more information. Useful when testing"""
if check_index and self.index != other.index:
log.Log("Index %s != index %s" % (self.index, other.index), 2)
......@@ -318,6 +320,7 @@ class RORPath:
elif key == 'size' and not self.isreg(): pass
elif key == 'inode' and (not self.isreg() or not compare_inodes):
pass
elif key == 'ea' and not compare_eas: pass
elif (not other.data.has_key(key) or
self.data[key] != other.data[key]):
if not other.data.has_key(key):
......@@ -512,6 +515,14 @@ class RORPath:
self.index)
self.file_already_open = None
def set_ea(self, ea):
"""Record extended attributes in dictionary. Does not write"""
self.data['ea'] = ea
def get_ea(self):
"""Return extended attributes object"""
return self.data['ea']
class RPath(RORPath):
"""Remote Path class - wrapper around a possibly non-local pathname
......@@ -546,7 +557,7 @@ class RPath(RORPath):
else: self.path = "/".join((base,) + index)
self.file = None
if data or base is None: self.data = data
else: self.data = self.conn.C.make_file_dict(self.path)
else: self.setdata()
def __str__(self):
return "Path: %s\nIndex: %s\nData: %s" % (self.path, self.index,
......@@ -571,6 +582,7 @@ class RPath(RORPath):
def setdata(self):
"""Set data dictionary using C extension"""
self.data = self.conn.C.make_file_dict(self.path)
if Globals.read_eas and self.lstat(): self.get_ea()
def make_file_dict_old(self):
"""Create the data dictionary"""
......@@ -727,7 +739,7 @@ class RPath(RORPath):
log.Log("Deleting %s" % self.path, 7)
if self.isdir():
try: self.rmdir()
except os.error: shutil.rmtree(self.path)
except os.error: self.conn.shutil.rmtree(self.path)
else: self.conn.os.unlink(self.path)
self.setdata()
......@@ -929,6 +941,24 @@ class RPath(RORPath):
assert not fp.close()
return s
def get_ea(self):
"""Return extended attributes object, setting if necessary"""
try: ea = self.data['ea']
except KeyError:
ea = eas_acls.ExtendedAttributes(self.index)
if not self.issym():
# Don't read from symlinks because they will be
# followed. Update this when llistxattr,
# etc. available
ea.read_from_rp(self)
self.data['ea'] = ea
return ea
def write_ea(self, ea):
"""Change extended attributes of rp"""
ea.write_to_rp(self)
self.data['ea'] = ea
class RPathFileHook:
"""Look like a file, but add closing hook"""
......@@ -945,3 +975,4 @@ class RPathFileHook:
self.closing_thunk()
return result
import eas_acls # Put at end to avoid regress
......@@ -3,7 +3,7 @@ import os, sys
from rdiff_backup.log import Log
from rdiff_backup.rpath import RPath
from rdiff_backup import Globals, Hardlink, SetConnections, Main, \
selection, lazy, Time, rpath
selection, lazy, Time, rpath, eas_acls
RBBin = "../rdiff-backup"
SourceDir = "../rdiff_backup"
......@@ -143,7 +143,7 @@ def InternalRestore(mirror_local, dest_local, mirror_dir, dest_dir, time):
if inc: Main.Restore(get_increment_rp(mirror_rp, time), dest_rp)
else: # use alternate syntax
Main.restore_timestr = str(time)
Main.RestoreAsOf(mirror_rp, dest_rp)
Main.Restore(mirror_rp, dest_rp, restore_as_of = 1)
Main.cleanup()
def get_increment_rp(mirror_rp, time):
......@@ -166,7 +166,8 @@ def _reset_connections(src_rp, dest_rp):
def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
equality_func = None, exclude_rbdir = 1,
ignore_tmp_files = None, compare_ownership = 0):
ignore_tmp_files = None, compare_ownership = 0,
compare_eas = 0):
"""Compare src_rp and dest_rp, which can be directories
This only compares file attributes, not the actual data. This
......@@ -178,8 +179,8 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
src_rp.setdata()
dest_rp.setdata()
Log("Comparing %s and %s, hardlinks %s" % (src_rp.path, dest_rp.path,
compare_hardlinks), 3)
Log("Comparing %s and %s, hardlinks %s, eas %s" %
(src_rp.path, dest_rp.path, compare_hardlinks, compare_eas), 3)
src_select = selection.Select(src_rp)
dest_select = selection.Select(dest_rp)
......@@ -214,11 +215,17 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
if not src_rorp.equal_verbose(dest_rorp,
compare_ownership = compare_ownership):
return None
if Hardlink.rorp_eq(src_rorp, dest_rorp): return 1
Log("%s: %s" % (src_rorp.index, Hardlink.get_indicies(src_rorp, 1)), 3)
Log("%s: %s" % (dest_rorp.index,
Hardlink.get_indicies(dest_rorp, None)), 3)
return None
if not Hardlink.rorp_eq(src_rorp, dest_rorp):
Log("%s: %s" % (src_rorp.index,
Hardlink.get_indicies(src_rorp, 1)), 3)
Log("%s: %s" % (dest_rorp.index,
Hardlink.get_indicies(dest_rorp, None)), 3)
return None
if compare_eas and not eas_acls.compare_rps(src_rorp, dest_rorp):
Log("Different EAs in files %s and %s" %
(src_rorp.get_indexpath(), dest_rorp.get_indexpath()), 3)
return None
return 1
def rbdir_equal(src_rorp, dest_rorp):
"""Like hardlink_equal, but make allowances for data directories"""
......@@ -233,6 +240,10 @@ def CompareRecursive(src_rp, dest_rp, compare_hardlinks = 1,
if dest_rorp.index[-1].endswith('gz'): return 1
# Don't compare .missing increments because they don't matter
if dest_rorp.index[-1].endswith('.missing'): return 1
if compare_eas and not eas_acls.compare_rps(src_rorp, dest_rorp):
Log("Different EAs in files %s and %s" %
(src_rorp.get_indexpath(), dest_rorp.get_indexpath()))
return None
if compare_hardlinks:
if Hardlink.rorp_eq(src_rorp, dest_rorp): return 1
elif src_rorp.equal_verbose(dest_rorp,
......@@ -272,7 +283,8 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
compare_hardlinks = 1,
dest_dirname = "testfiles/output",
restore_dirname = "testfiles/rest_out",
compare_backups = 1):
compare_backups = 1,
compare_eas = 0):
"""Test backing up/restoring of a series of directories
The dirnames correspond to a single directory at different times.
......@@ -282,6 +294,8 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
"""
Globals.set('preserve_hardlinks', compare_hardlinks)
Globals.set('write_eas', compare_eas)
Globals.set('read_eas', compare_eas)
time = 10000
dest_rp = rpath.RPath(Globals.local_connection, dest_dirname)
restore_rp = rpath.RPath(Globals.local_connection, restore_dirname)
......@@ -296,7 +310,8 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
time += 10000
_reset_connections(src_rp, dest_rp)
if compare_backups:
assert CompareRecursive(src_rp, dest_rp, compare_hardlinks)
assert CompareRecursive(src_rp, dest_rp, compare_hardlinks,
compare_eas = compare_eas)
time = 10000
for dirname in list_of_dirnames[:-1]:
......@@ -305,7 +320,7 @@ def BackupRestoreSeries(source_local, dest_local, list_of_dirnames,
InternalRestore(dest_local, source_local, dest_dirname,
restore_dirname, time)
src_rp = rpath.RPath(Globals.local_connection, dirname)
assert CompareRecursive(src_rp, restore_rp)
assert CompareRecursive(src_rp, restore_rp, compare_eas = compare_eas)
# Restore should default back to newest time older than it
# with a backup then.
......
import unittest, os, time
from commontest import *
from rdiff_backup.eas_acls import *
from rdiff_backup import Globals, rpath, Time
tempdir = rpath.RPath(Globals.local_connection, "testfiles/output")
class EATest(unittest.TestCase):
"""Test extended attributes"""
sample_ea = ExtendedAttributes(
(), {'user.empty':'', 'user.not_empty':'foobar', 'user.third':'hello',
'user.binary':chr(0)+chr(1)+chr(2)+chr(140)+'/="',
'user.multiline':"""This is a fairly long extended attribute.
Encoding it will require several lines of
base64.""" + chr(177)*300})
empty_ea = ExtendedAttributes(())
ea1 = ExtendedAttributes(('1',), sample_ea.attr_dict.copy())
ea1.delete('user.not_empty')
ea2 = ExtendedAttributes(('2',), sample_ea.attr_dict.copy())
ea2.set('user.third', 'Another random attribute')
ea3 = ExtendedAttributes(('3',))
ea4 = ExtendedAttributes(('4',), {'user.deleted': 'File to be deleted'})
ea_testdir1 = rpath.RPath(Globals.local_connection, "testfiles/ea_test1")
ea_testdir2 = rpath.RPath(Globals.local_connection, "testfiles/ea_test2")
def make_temp(self):
"""Make temp directory testfiles/output"""
if tempdir.lstat(): tempdir.delete()
tempdir.mkdir()
def testBasic(self):
"""Test basic writing and reading of extended attributes"""
self.make_temp()
new_ea = ExtendedAttributes(())
new_ea.read_from_rp(tempdir)
assert not new_ea.attr_dict
assert not new_ea == self.sample_ea
assert new_ea != self.sample_ea
assert new_ea == self.empty_ea
self.sample_ea.write_to_rp(tempdir)
new_ea.read_from_rp(tempdir)
assert new_ea.attr_dict == self.sample_ea.attr_dict, \
(new_ea.attr_dict, self.sample_ea.attr_dict)
assert new_ea == self.sample_ea
def testRecord(self):
"""Test writing a record and reading it back"""
record = EA2Record(self.sample_ea)
new_ea = Record2EA(record)
if not new_ea == self.sample_ea:
new_list = new_ea.attr_dict.keys()
sample_list = self.sample_ea.attr_dict.keys()
new_list.sort()
sample_list.sort()
assert new_list == sample_list, (new_list, sample_list)
for name in new_list:
assert self.sample_ea.get(name) == new_ea.get(name), \
(self.sample_ea.get(name), new_ea.get(name))
assert self.sample_ea.index == new_ea.index, \
(self.sample_ea.index, new_ea.index)
assert 0, "We shouldn't have gotten this far"
def make_backup_dirs(self):
"""Create testfiles/ea_test[12] directories"""
if self.ea_testdir1.lstat(): self.ea_testdir1.delete()
if self.ea_testdir2.lstat(): self.ea_testdir2.delete()
self.ea_testdir1.mkdir()
rp1_1 = self.ea_testdir1.append('1')
rp1_2 = self.ea_testdir1.append('2')
rp1_3 = self.ea_testdir1.append('3')
rp1_4 = self.ea_testdir1.append('4')
map(rpath.RPath.touch, [rp1_1, rp1_2, rp1_3, rp1_4])
self.sample_ea.write_to_rp(self.ea_testdir1)
self.ea1.write_to_rp(rp1_1)
self.ea2.write_to_rp(rp1_2)
self.ea4.write_to_rp(rp1_4)
self.ea_testdir2.mkdir()
rp2_1 = self.ea_testdir2.append('1')
rp2_2 = self.ea_testdir2.append('2')
rp2_3 = self.ea_testdir2.append('3')
map(rpath.RPath.touch, [rp2_1, rp2_2, rp2_3])
self.ea3.write_to_rp(self.ea_testdir2)
self.sample_ea.write_to_rp(rp2_1)
self.ea1.write_to_rp(rp2_2)
self.ea2.write_to_rp(rp2_3)
def testIterate(self):
"""Test writing several records and then reading them back"""
self.make_backup_dirs()
rp1 = self.ea_testdir1.append('1')
rp2 = self.ea_testdir1.append('2')
rp3 = self.ea_testdir1.append('3')
# Now write records corresponding to above rps into file
Globals.rbdir = tempdir
Time.setcurtime(10000)
ExtendedAttributesFile.open_file()
for rp in [self.ea_testdir1, rp1, rp2, rp3]:
ea = ExtendedAttributes(rp.index)
ea.read_from_rp(rp)
ExtendedAttributesFile.write_object(ea)
ExtendedAttributesFile.close_file()
# Read back records and compare
ea_iter = ExtendedAttributesFile.get_objects_at_time(tempdir, 10000)
assert ea_iter, "No extended_attributes.<time> file found"
sample_ea_reread = ea_iter.next()
assert sample_ea_reread == self.sample_ea
ea1_reread = ea_iter.next()
assert ea1_reread == self.ea1
ea2_reread = ea_iter.next()
assert ea2_reread == self.ea2
ea3_reread = ea_iter.next()
assert ea3_reread == self.ea3
try: ea_iter.next()
except StopIteration: pass
else: assert 0, "Expected end to iterator"
def testSeriesLocal(self):
"""Test backing up and restoring directories with EAs locally"""
self.make_backup_dirs()
dirlist = ['testfiles/ea_test1', 'testfiles/empty',
'testfiles/ea_test2', 'testfiles/ea_test1']
BackupRestoreSeries(1, 1, dirlist, compare_eas = 1)
def testSeriesRemote(self):
"""Test backing up, restoring directories with EA remotely"""
self.make_backup_dirs()
dirlist = ['testfiles/ea_test1', 'testfiles/ea_test2',
'testfiles/empty', 'testfiles/ea_test1']
BackupRestoreSeries(None, None, dirlist, compare_eas = 1)
if __name__ == "__main__": unittest.main()
......@@ -35,7 +35,7 @@ class Local:
vft2_in = get_local_rp('vft2_out')
timbar_in = get_local_rp('increment1/timbar.pyc')
timbar_out = get_local_rp('../timbar.pyc') # in cur directory
timbar_out = get_local_rp('timbar.pyc') # in cur directory
wininc2 = get_local_rp('win-increment2')
wininc3 = get_local_rp('win-increment3')
......@@ -105,7 +105,7 @@ class PathSetter(unittest.TestCase):
"""Remove any temp directories created by previous tests"""
assert not os.system(MiscDir + '/myrm testfiles/output* '
'testfiles/restoretarget* testfiles/vft_out '
'timbar.pyc testfiles/vft2_out')
'testfiles/timbar.pyc testfiles/vft2_out')
def runtest(self):
self.delete_tmpdirs()
......@@ -155,7 +155,7 @@ class PathSetter(unittest.TestCase):
timbar_paths = self.getinc_paths("timbar.pyc.",
"testfiles/output/rdiff-backup-data/increments")
self.exec_rb(None, timbar_paths[0])
self.exec_rb(None, timbar_paths[0], 'testfiles/timbar.pyc')
self.refresh(Local.timbar_in, Local.timbar_out)
assert Local.timbar_in.equal_loose(Local.timbar_out)
......@@ -242,7 +242,7 @@ class Final(PathSetter):
self.exec_rb(None, '../../../../../../proc', 'testfiles/procoutput')
def testWindowsMode(self):
"""Test backup with the --windows-mode option
"""Test backup with quoting enabled
We need to delete from the increment? directories long file
names, because quoting adds too many extra letters.
......@@ -260,30 +260,35 @@ class Final(PathSetter):
delete_long(Local.wininc3)
old_schema = self.rb_schema
self.rb_schema = old_schema + " --windows-mode "
self.rb_schema = old_schema+" --override-chars-to-quote '^a-z0-9_ -.' "
self.set_connections(None, None, None, None)
self.delete_tmpdirs()
# Back up increment2, this contains a file with colons
self.exec_rb(20000, 'testfiles/win-increment2', 'testfiles/output')
self.rb_schema = old_schema # Quoting setting should now be saved
time.sleep(1)
# Back up increment3
self.exec_rb(30000, 'testfiles/win-increment3', 'testfiles/output')
# Start restore
self.rb_schema = old_schema + ' --windows-restore '
Globals.time_separator = "_"
# Start restore of increment 2
Globals.chars_to_quote = '^a-z0-9_ -.'
inc_paths = self.getinc_paths("increments.",
"testfiles/output/rdiff-backup-data", 1)
Globals.time_separator = ":"
Globals.chars_to_quote = None
assert len(inc_paths) == 1, inc_paths
# Restore increment2
self.exec_rb(None, inc_paths[0], 'testfiles/restoretarget2')
assert CompareRecursive(Local.wininc2, Local.rpout2,
compare_hardlinks = 0)
# Restore increment 3 again, using different syntax
self.rb_schema = old_schema + '-r 30000 '
self.exec_rb(None, 'testfiles/output', 'testfiles/restoretarget3')
assert CompareRecursive(Local.wininc3, Local.rpout3,
compare_hardlinks = 0)
self.rb_schema = old_schema
# Now check to make sure no ":" in output directory
popen_fp = os.popen("find testfiles/output -name '*:*' | wc")
wc_output = popen_fp.read()
......
......@@ -40,13 +40,16 @@ class MetadataTest(unittest.TestCase):
def testIterator(self):
"""Test writing RORPs to file and iterating them back"""
def write_rorp_iter_to_file(rorp_iter, file):
for rorp in rorp_iter: file.write(RORP2Record(rorp))
l = self.get_rpaths()
fp = cStringIO.StringIO()
write_rorp_iter_to_file(iter(l), fp)
fp.seek(0)
cstring = fp.read()
fp.seek(0)
outlist = list(rorp_extractor(fp).iterate())
outlist = list(RorpExtractor(fp).iterate())
assert len(l) == len(outlist), (len(l), len(outlist))
for i in range(len(l)):
if not l[i].equal_verbose(outlist[i]):
......@@ -65,18 +68,19 @@ class MetadataTest(unittest.TestCase):
rpath_iter = selection.Select(rootrp).set_iter()
start_time = time.time()
OpenMetadata(temprp)
for rp in rpath_iter: WriteMetadata(rp)
CloseMetadata()
MetadataFile.open_file(temprp)
for rp in rpath_iter: MetadataFile.write_object(rp)
MetadataFile.close_file()
print "Writing metadata took %s seconds" % (time.time() - start_time)
return temprp
def testSpeed(self):
"""Test testIterator on 10000 files"""
temprp = self.write_metadata_to_temp()
MetadataFile._rp = temprp
start_time = time.time(); i = 0
for rorp in GetMetadata(temprp): i += 1
for rorp in MetadataFile.get_objects(): i += 1
print "Reading %s metadata entries took %s seconds." % \
(i, time.time() - start_time)
......@@ -98,11 +102,35 @@ class MetadataTest(unittest.TestCase):
"""
temprp = self.write_metadata_to_temp()
MetadataFile._rp = temprp
start_time = time.time(); i = 0
for rorp in GetMetadata(temprp, ("subdir3", "subdir10")): i += 1
for rorp in MetadataFile.get_objects(("subdir3", "subdir10")): i += 1
print "Reading %s metadata entries took %s seconds." % \
(i, time.time() - start_time)
assert i == 51
def test_write(self):
"""Test writing to metadata file, then reading back contents"""
global tempdir
temprp = tempdir.append("write_test.gz")
if temprp.lstat(): temprp.delete()
self.make_temp()
rootrp = rpath.RPath(Globals.local_connection,
"testfiles/various_file_types")
dirlisting = rootrp.listdir()
dirlisting.sort()
rps = map(rootrp.append, dirlisting)
assert not temprp.lstat()
MetadataFile.open_file(temprp)
for rp in rps: MetadataFile.write_object(rp)
MetadataFile.close_file()
assert temprp.lstat()
reread_rps = list(MetadataFile.get_objects())
assert len(reread_rps) == len(rps), (len(reread_rps), len(rps))
for i in range(len(reread_rps)):
assert reread_rps[i] == rps[i], i
if __name__ == "__main__": unittest.main()
......@@ -12,7 +12,7 @@ testfiles
Globals.set('change_source_perms', 1)
Globals.counter = 0
log.Log.setverbosity(3)
log.Log.setverbosity(7)
def get_local_rp(extension):
return rpath.RPath(Globals.local_connection, "testfiles/" + extension)
......@@ -172,16 +172,12 @@ class IncrementTest1(unittest.TestCase):
hl2.hardlink(hl1.path)
Myrm(Local.rpout.path)
old_settings = (Globals.quoting_enabled, Globals.chars_to_quote,
Globals.quoting_char)
Globals.quoting_enabled = 1
old_chars = Globals.chars_to_quote
Globals.chars_to_quote = 'A-Z'
Globals.quoting_char = ';'
InternalBackup(1, 1, hldir.path, Local.rpout.path, current_time = 1)
InternalBackup(1, 1, "testfiles/empty", Local.rpout.path,
current_time = 10000)
(Globals.quoting_enabled, Globals.chars_to_quote,
Globals.quoting_char) = old_settings
Globals.chars_to_quote = old_chars
def test_long_socket(self):
"""Test backing up a directory with long sockets in them
......@@ -389,8 +385,10 @@ class MirrorTest(PathSetter):
Main.force = 1
assert not rpout.append("rdiff-backup-data").lstat()
Main.misc_setup([rpin, rpout])
Main.backup_check_dirs(rpin, rpout)
Main.backup_set_fs_globals(rpin, rpout)
Main.backup_set_rbdir(rpin, rpout)
Main.backup_set_select(rpin)
Main.backup_init_dirs(rpin, rpout)
backup.Mirror(rpin, rpout)
log.ErrorLog.close()
log.Log.close_logfile()
......
......@@ -143,22 +143,6 @@ class RestoreTest(unittest.TestCase):
"testfiles/output", 5000)
assert CompareRecursive(inc1_rp, target_rp, compare_hardlinks = 0)
# def testRestoreCorrupt(self):
# """Test restoring a partially corrupt archive
#
# The problem here is that a directory is missing from what is
# to be restored, but because the previous backup was aborted in
# the middle, some of the files in that directory weren't marked
# as .missing.
#
# """
# Myrm("testfiles/output")
# InternalRestore(1, 1, "testfiles/restoretest4", "testfiles/output",
# 10000)
# assert os.lstat("testfiles/output")
# self.assertRaises(OSError, os.lstat, "testfiles/output/tmp")
# self.assertRaises(OSError, os.lstat, "testfiles/output/rdiff-backup")
def testRestoreNoincs(self):
"""Test restoring a directory with no increments, just mirror"""
Myrm("testfiles/output")
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment