Commit bae0f9c1 authored by James Fargher's avatar James Fargher Committed by Mayra Cabrera

Stop passing name up to Files backup task

Now that we pass through the name of the archive to be created, there's
no need to explicitly give a name.
parent 00c452e9
# frozen_string_literal: true
module Backup
class Artifacts < Backup::Files
def initialize(progress)
super(progress, 'artifacts', JobArtifactUploader.root, excludes: ['tmp'])
end
override :human_name
def human_name
_('artifacts')
end
end
end
# frozen_string_literal: true
module Backup
class Builds < Backup::Files
def initialize(progress)
super(progress, 'builds', Settings.gitlab_ci.builds_path)
end
override :human_name
def human_name
_('builds')
end
end
end
......@@ -134,11 +134,6 @@ module Backup
MSG
end
override :human_name
def human_name
_('database')
end
protected
def database
......
......@@ -9,12 +9,11 @@ module Backup
DEFAULT_EXCLUDE = 'lost+found'
attr_reader :name, :excludes
attr_reader :excludes
def initialize(progress, name, app_files_dir, excludes: [])
def initialize(progress, app_files_dir, excludes: [])
super(progress)
@name = name
@app_files_dir = app_files_dir
@excludes = [DEFAULT_EXCLUDE].concat(excludes)
end
......@@ -55,7 +54,7 @@ module Backup
override :restore
def restore(backup_tarball)
backup_existing_files_dir
backup_existing_files_dir(backup_tarball)
cmd_list = [%w[gzip -cd], %W[#{tar} --unlink-first --recursive-unlink -C #{app_files_realpath} -xf -]]
status_list, output = run_pipeline!(cmd_list, in: backup_tarball)
......@@ -73,11 +72,13 @@ module Backup
end
end
def backup_existing_files_dir
def backup_existing_files_dir(backup_tarball)
name = File.basename(backup_tarball, '.tar.gz')
timestamped_files_path = File.join(Gitlab.config.backup.path, "tmp", "#{name}.#{Time.now.to_i}")
if File.exist?(app_files_realpath)
# Move all files in the existing repos directory except . and .. to
# repositories.old.<timestamp> directory
# repositories.<timestamp> directory
FileUtils.mkdir_p(timestamped_files_path, mode: 0700)
files = Dir.glob(File.join(app_files_realpath, "*"), File::FNM_DOTMATCH) - [File.join(app_files_realpath, "."), File.join(app_files_realpath, "..")]
begin
......
# frozen_string_literal: true
module Backup
class Lfs < Backup::Files
def initialize(progress)
super(progress, 'lfs', Settings.lfs.storage_path)
end
override :human_name
def human_name
_('lfs objects')
end
end
end
......@@ -5,68 +5,34 @@ module Backup
FILE_NAME_SUFFIX = '_gitlab_backup.tar'
MANIFEST_NAME = 'backup_information.yml'
# pages used to deploy tmp files to this path
# if some of these files are still there, we don't need them in the backup
LEGACY_PAGES_TMP_PATH = '@pages.tmp'
TaskDefinition = Struct.new(
:enabled, # `true` if the task can be used. Treated as `true` when not specified.
:human_name, # Name of the task used for logging.
:destination_path, # Where the task should put its backup file/dir.
:destination_optional, # `true` if the destination might not exist on a successful backup.
:cleanup_path, # Path to remove after a successful backup. Uses `destination_path` when not specified.
:task,
keyword_init: true
)
) do
def enabled?
enabled.nil? || enabled
end
end
attr_reader :progress
def initialize(progress, definitions: nil)
@progress = progress
force = ENV['force'] == 'yes'
@incremental = Feature.feature_flags_available? &&
Feature.enabled?(:incremental_repository_backup, default_enabled: :yaml) &&
Gitlab::Utils.to_boolean(ENV['INCREMENTAL'], default: false)
@definitions = definitions || {
'db' => TaskDefinition.new(
destination_path: 'db/database.sql.gz',
cleanup_path: 'db',
task: Database.new(progress, force: force)
),
'repositories' => TaskDefinition.new(
destination_path: 'repositories',
destination_optional: true,
task: Repositories.new(progress, strategy: repository_backup_strategy)
),
'uploads' => TaskDefinition.new(
destination_path: 'uploads.tar.gz',
task: Uploads.new(progress)
),
'builds' => TaskDefinition.new(
destination_path: 'builds.tar.gz',
task: Builds.new(progress)
),
'artifacts' => TaskDefinition.new(
destination_path: 'artifacts.tar.gz',
task: Artifacts.new(progress)
),
'pages' => TaskDefinition.new(
destination_path: 'pages.tar.gz',
task: Pages.new(progress)
),
'lfs' => TaskDefinition.new(
destination_path: 'lfs.tar.gz',
task: Lfs.new(progress)
),
'terraform_state' => TaskDefinition.new(
destination_path: 'terraform_state.tar.gz',
task: TerraformState.new(progress)
),
'registry' => TaskDefinition.new(
destination_path: 'registry.tar.gz',
task: Registry.new(progress)
),
'packages' => TaskDefinition.new(
destination_path: 'packages.tar.gz',
task: Packages.new(progress)
)
}.freeze
@definitions = definitions || build_definitions
end
def create
......@@ -102,22 +68,22 @@ module Backup
build_backup_information
unless definition.task.enabled
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
unless definition.enabled?
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
return
end
if skipped?(task_name)
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue) + "[SKIPPED]".color(:cyan)
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "[SKIPPED]".color(:cyan)
return
end
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue)
puts_time "Dumping #{definition.human_name} ... ".color(:blue)
definition.task.dump(File.join(Gitlab.config.backup.path, definition.destination_path), backup_id)
puts_time "Dumping #{definition.task.human_name} ... ".color(:blue) + "done".color(:green)
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "done".color(:green)
rescue Backup::DatabaseBackupError, Backup::FileBackupError => e
puts_time "Dumping #{definition.task.human_name} failed: #{e.message}".color(:red)
puts_time "Dumping #{definition.human_name} failed: #{e.message}".color(:red)
end
def restore
......@@ -146,12 +112,12 @@ module Backup
def run_restore_task(task_name)
definition = @definitions[task_name]
unless definition.task.enabled
puts_time "Restoring #{definition.task.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
unless definition.enabled?
puts_time "Restoring #{definition.human_name} ... ".color(:blue) + "[DISABLED]".color(:cyan)
return
end
puts_time "Restoring #{definition.task.human_name} ... ".color(:blue)
puts_time "Restoring #{definition.human_name} ... ".color(:blue)
warning = definition.task.pre_restore_warning
if warning.present?
......@@ -161,7 +127,7 @@ module Backup
definition.task.restore(File.join(Gitlab.config.backup.path, definition.destination_path))
puts_time "Restoring #{definition.task.human_name} ... ".color(:blue) + "done".color(:green)
puts_time "Restoring #{definition.human_name} ... ".color(:blue) + "done".color(:green)
warning = definition.task.post_restore_warning
if warning.present?
......@@ -176,6 +142,82 @@ module Backup
private
def build_definitions
{
'db' => TaskDefinition.new(
human_name: _('database'),
destination_path: 'db/database.sql.gz',
cleanup_path: 'db',
task: build_db_task
),
'repositories' => TaskDefinition.new(
human_name: _('repositories'),
destination_path: 'repositories',
destination_optional: true,
task: build_repositories_task
),
'uploads' => TaskDefinition.new(
human_name: _('uploads'),
destination_path: 'uploads.tar.gz',
task: build_files_task(File.join(Gitlab.config.uploads.storage_path, 'uploads'), excludes: ['tmp'])
),
'builds' => TaskDefinition.new(
human_name: _('builds'),
destination_path: 'builds.tar.gz',
task: build_files_task(Settings.gitlab_ci.builds_path)
),
'artifacts' => TaskDefinition.new(
human_name: _('artifacts'),
destination_path: 'artifacts.tar.gz',
task: build_files_task(JobArtifactUploader.root, excludes: ['tmp'])
),
'pages' => TaskDefinition.new(
human_name: _('pages'),
destination_path: 'pages.tar.gz',
task: build_files_task(Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
),
'lfs' => TaskDefinition.new(
human_name: _('lfs objects'),
destination_path: 'lfs.tar.gz',
task: build_files_task(Settings.lfs.storage_path)
),
'terraform_state' => TaskDefinition.new(
human_name: _('terraform states'),
destination_path: 'terraform_state.tar.gz',
task: build_files_task(Settings.terraform_state.storage_path, excludes: ['tmp'])
),
'registry' => TaskDefinition.new(
enabled: Gitlab.config.registry.enabled,
human_name: _('container registry images'),
destination_path: 'registry.tar.gz',
task: build_files_task(Settings.registry.path)
),
'packages' => TaskDefinition.new(
human_name: _('packages'),
destination_path: 'packages.tar.gz',
task: build_files_task(Settings.packages.storage_path, excludes: ['tmp'])
)
}.freeze
end
def build_db_task
force = ENV['force'] == 'yes'
Database.new(progress, force: force)
end
def build_repositories_task
max_concurrency = ENV['GITLAB_BACKUP_MAX_CONCURRENCY'].presence
max_storage_concurrency = ENV['GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY'].presence
strategy = Backup::GitalyBackup.new(progress, incremental: incremental?, max_parallelism: max_concurrency, storage_parallelism: max_storage_concurrency)
Repositories.new(progress, strategy: strategy)
end
def build_files_task(app_files_dir, excludes: [])
Files.new(progress, app_files_dir, excludes: excludes)
end
def incremental?
@incremental
end
......@@ -394,7 +436,7 @@ module Backup
end
def enabled_task?(task_name)
@definitions[task_name].task.enabled
@definitions[task_name].enabled?
end
def backup_file?(file)
......@@ -500,12 +542,6 @@ module Backup
Gitlab.config.backup.upload.connection&.provider&.downcase == 'google'
end
def repository_backup_strategy
max_concurrency = ENV['GITLAB_BACKUP_MAX_CONCURRENCY'].presence
max_storage_concurrency = ENV['GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY'].presence
Backup::GitalyBackup.new(progress, incremental: incremental?, max_parallelism: max_concurrency, storage_parallelism: max_storage_concurrency)
end
def puts_time(msg)
progress.puts "#{Time.now} -- #{msg}"
Gitlab::BackupLogger.info(message: "#{Rainbow.uncolor(msg)}")
......
# frozen_string_literal: true
module Backup
class Packages < Backup::Files
def initialize(progress)
super(progress, 'packages', Settings.packages.storage_path, excludes: ['tmp'])
end
override :human_name
def human_name
_('packages')
end
end
end
# frozen_string_literal: true
module Backup
class Pages < Backup::Files
# pages used to deploy tmp files to this path
# if some of these files are still there, we don't need them in the backup
LEGACY_PAGES_TMP_PATH = '@pages.tmp'
def initialize(progress)
super(progress, 'pages', Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
end
override :human_name
def human_name
_('pages')
end
end
end
# frozen_string_literal: true
module Backup
class Registry < Backup::Files
def initialize(progress)
super(progress, 'registry', Settings.registry.path)
end
override :human_name
def human_name
_('container registry images')
end
override :enabled
def enabled
Gitlab.config.registry.enabled
end
end
end
......@@ -33,11 +33,6 @@ module Backup
restore_object_pools
end
override :human_name
def human_name
_('repositories')
end
private
attr_reader :strategy
......
......@@ -6,11 +6,6 @@ module Backup
@progress = progress
end
# human readable task name used for logging
def human_name
raise NotImplementedError
end
# dump task backup to `path`
#
# @param [String] path fully qualified backup task destination
......@@ -32,11 +27,6 @@ module Backup
def post_restore_warning
end
# returns `true` when the task should be used
def enabled
true
end
private
attr_reader :progress
......
# frozen_string_literal: true
module Backup
class TerraformState < Backup::Files
def initialize(progress)
super(progress, 'terraform_state', Settings.terraform_state.storage_path, excludes: ['tmp'])
end
override :human_name
def human_name
_('terraform states')
end
end
end
# frozen_string_literal: true
module Backup
class Uploads < Backup::Files
def initialize(progress)
super(progress, 'uploads', File.join(Gitlab.config.uploads.storage_path, "uploads"), excludes: ['tmp'])
end
override :human_name
def human_name
_('uploads')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Backup::Artifacts do
let(:progress) { StringIO.new }
subject(:backup) { described_class.new(progress) }
describe '#dump' do
before do
allow(File).to receive(:realpath).with('/var/gitlab-artifacts').and_return('/var/gitlab-artifacts')
allow(File).to receive(:realpath).with('/var/gitlab-artifacts/..').and_return('/var')
allow(JobArtifactUploader).to receive(:root) { '/var/gitlab-artifacts' }
end
it 'excludes tmp from backup tar' do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/gitlab-artifacts -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump('artifacts.tar.gz', 'backup_id')
end
end
end
......@@ -39,7 +39,7 @@ RSpec.describe Backup::Files do
end
describe '#restore' do
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
subject { described_class.new(progress, '/var/gitlab-registry') }
let(:timestamp) { Time.utc(2017, 3, 22) }
......@@ -110,7 +110,7 @@ RSpec.describe Backup::Files do
end
describe '#dump' do
subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
subject { described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp']) }
before do
allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
......@@ -176,7 +176,7 @@ RSpec.describe Backup::Files do
end
describe '#exclude_dirs' do
subject { described_class.new(progress, 'pages', '/var/gitlab-pages', excludes: ['@pages.tmp']) }
subject { described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp']) }
it 'prepends a leading dot slash to tar excludes' do
expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
......@@ -188,7 +188,7 @@ RSpec.describe Backup::Files do
end
describe '#run_pipeline!' do
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
subject { described_class.new(progress, '/var/gitlab-registry') }
it 'executes an Open3.pipeline for cmd_list' do
expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
......@@ -222,7 +222,7 @@ RSpec.describe Backup::Files do
end
describe '#pipeline_succeeded?' do
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
subject { described_class.new(progress, '/var/gitlab-registry') }
it 'returns true if both tar and gzip succeeeded' do
expect(
......@@ -262,7 +262,7 @@ RSpec.describe Backup::Files do
end
describe '#tar_ignore_non_success?' do
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
subject { described_class.new(progress, '/var/gitlab-registry') }
context 'if `tar` command exits with 1 exitstatus' do
it 'returns true' do
......@@ -310,7 +310,7 @@ RSpec.describe Backup::Files do
end
describe '#noncritical_warning?' do
subject { described_class.new(progress, 'registry', '/var/gitlab-registry') }
subject { described_class.new(progress, '/var/gitlab-registry') }
it 'returns true if given text matches noncritical warnings list' do
expect(
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Backup::Lfs do
let(:progress) { StringIO.new }
subject(:backup) { described_class.new(progress) }
describe '#dump' do
before do
allow(File).to receive(:realpath).and_call_original
allow(File).to receive(:realpath).with('/var/lfs-objects').and_return('/var/lfs-objects')
allow(File).to receive(:realpath).with('/var/lfs-objects/..').and_return('/var')
allow(Settings.lfs).to receive(:storage_path).and_return('/var/lfs-objects')
end
it 'uses the correct lfs dir in tar command', :aggregate_failures do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found -C /var/lfs-objects -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump('lfs.tar.gz', 'backup_id')
end
end
end
......@@ -22,8 +22,8 @@ RSpec.describe Backup::Manager do
describe '#run_create_task' do
let(:enabled) { true }
let(:task) { instance_double(Backup::Task, human_name: 'my task', enabled: enabled) }
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
let(:task) { instance_double(Backup::Task) }
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, destination_path: 'my_task.tar.gz', human_name: 'my task') } }
it 'calls the named task' do
expect(task).to receive(:dump)
......@@ -58,12 +58,10 @@ RSpec.describe Backup::Manager do
let(:enabled) { true }
let(:pre_restore_warning) { nil }
let(:post_restore_warning) { nil }
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, destination_path: 'my_task.tar.gz') } }
let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, human_name: 'my task', destination_path: 'my_task.tar.gz') } }
let(:backup_information) { {} }
let(:task) do
instance_double(Backup::Task,
human_name: 'my task',
enabled: enabled,
pre_restore_warning: pre_restore_warning,
post_restore_warning: post_restore_warning)
end
......@@ -158,12 +156,12 @@ RSpec.describe Backup::Manager do
}
end
let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true) }
let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true) }
let(:task1) { instance_double(Backup::Task) }
let(:task2) { instance_double(Backup::Task) }
let(:definitions) do
{
'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
'task1' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'task 1', destination_path: 'task1.tar.gz'),
'task2' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'task 2', destination_path: 'task2.tar.gz')
}
end
......@@ -735,12 +733,12 @@ RSpec.describe Backup::Manager do
end
describe '#restore' do
let(:task1) { instance_double(Backup::Task, human_name: 'task 1', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
let(:task2) { instance_double(Backup::Task, human_name: 'task 2', enabled: true, pre_restore_warning: nil, post_restore_warning: nil) }
let(:task1) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) }
let(:task2) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) }
let(:definitions) do
{
'task1' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'task1.tar.gz'),
'task2' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'task2.tar.gz')
'task1' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'task 1', destination_path: 'task1.tar.gz'),
'task2' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'task 2', destination_path: 'task2.tar.gz')
}
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.shared_examples 'backup object' do |setting|
let(:progress) { StringIO.new }
let(:backup_path) { "/var/#{setting}" }
subject(:backup) { described_class.new(progress) }
describe '#dump' do
before do
allow(File).to receive(:realpath).and_call_original
allow(File).to receive(:realpath).with(backup_path).and_return(backup_path)
allow(File).to receive(:realpath).with("#{backup_path}/..").and_return('/var')
allow(Settings.send(setting)).to receive(:storage_path).and_return(backup_path)
end
it 'uses the correct storage dir in tar command and excludes tmp', :aggregate_failures do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%W(blabla-tar --exclude=lost+found --exclude=./tmp -C #{backup_path} -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump('backup_object.tar.gz', 'backup_id')
end
end
end
RSpec.describe Backup::Packages do
it_behaves_like 'backup object', 'packages'
end
RSpec.describe Backup::TerraformState do
it_behaves_like 'backup object', 'terraform_state'
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Backup::Pages do
let(:progress) { StringIO.new }
subject { described_class.new(progress) }
before do
allow(File).to receive(:realpath).with("/var/gitlab-pages").and_return("/var/gitlab-pages")
allow(File).to receive(:realpath).with("/var/gitlab-pages/..").and_return("/var")
end
describe '#dump' do
it 'excludes tmp from backup tar' do
allow(Gitlab.config.pages).to receive(:path) { '/var/gitlab-pages' }
expect(subject).to receive(:tar).and_return('blabla-tar')
expect(subject).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(subject).to receive(:pipeline_succeeded?).and_return(true)
subject.dump('pages.tar.gz', 'backup_id')
end
end
end
......@@ -7,12 +7,6 @@ RSpec.describe Backup::Task do
subject { described_class.new(progress) }
describe '#human_name' do
it 'must be implemented by the subclass' do
expect { subject.human_name }.to raise_error(NotImplementedError)
end
end
describe '#dump' do
it 'must be implemented by the subclass' do
expect { subject.dump('some/path', 'backup_id') }.to raise_error(NotImplementedError)
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Backup::Uploads do
let(:progress) { StringIO.new }
subject(:backup) { described_class.new(progress) }
describe '#dump' do
before do
allow(File).to receive(:realpath).and_call_original
allow(File).to receive(:realpath).with('/var/uploads').and_return('/var/uploads')
allow(File).to receive(:realpath).with('/var/uploads/..').and_return('/var')
allow(Gitlab.config.uploads).to receive(:storage_path) { '/var' }
end
it 'excludes tmp from backup tar' do
expect(backup).to receive(:tar).and_return('blabla-tar')
expect(backup).to receive(:run_pipeline!).with([%w(blabla-tar --exclude=lost+found --exclude=./tmp -C /var/uploads -cf - .), 'gzip -c -1'], any_args).and_return([[true, true], ''])
expect(backup).to receive(:pipeline_succeeded?).and_return(true)
backup.dump('uploads.tar.gz', 'backup_id')
end
end
end
......@@ -235,19 +235,19 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
db_backup_error = Backup::DatabaseBackupError.new(config, db_file_name)
where(:backup_class, :rake_task, :error) do
Backup::Database | 'gitlab:backup:db:create' | db_backup_error
Backup::Builds | 'gitlab:backup:builds:create' | file_backup_error
Backup::Uploads | 'gitlab:backup:uploads:create' | file_backup_error
Backup::Artifacts | 'gitlab:backup:artifacts:create' | file_backup_error
Backup::Pages | 'gitlab:backup:pages:create' | file_backup_error
Backup::Lfs | 'gitlab:backup:lfs:create' | file_backup_error
Backup::Registry | 'gitlab:backup:registry:create' | file_backup_error
Backup::Database | 'gitlab:backup:db:create' | db_backup_error
Backup::Files | 'gitlab:backup:builds:create' | file_backup_error
Backup::Files | 'gitlab:backup:uploads:create' | file_backup_error
Backup::Files | 'gitlab:backup:artifacts:create' | file_backup_error
Backup::Files | 'gitlab:backup:pages:create' | file_backup_error
Backup::Files | 'gitlab:backup:lfs:create' | file_backup_error
Backup::Files | 'gitlab:backup:registry:create' | file_backup_error
end
with_them do
before do
expect_next_instance_of(backup_class) do |instance|
expect(instance).to receive(:dump).and_raise(error)
allow_next_instance_of(backup_class) do |instance|
allow(instance).to receive(:dump).and_raise(error)
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment