Commit 3612e267 authored by Sean McGivern's avatar Sean McGivern

Merge branch 'upload-migration-task' into 'master'

Migrate uploads rake task

Closes #4704

See merge request gitlab-org/gitlab-ee!4215
parents 79f6765f 4b8f95e5
class Appearance < ActiveRecord::Base
include CacheMarkdownField
include AfterCommitQueue
include ObjectStorage::BackgroundMove
cache_markdown_field :description
cache_markdown_field :new_project_guidelines
......
......@@ -11,13 +11,7 @@ module Ci
mount_uploader :file, JobArtifactUploader
after_save if: :file_changed?, on: [:create, :update] do
run_after_commit do
file.schedule_migration_to_object_storage
end
end
delegate :open, :exists?, to: :file
delegate :exists?, :open, to: :file
enum file_type: {
archive: 1,
......
......@@ -3,6 +3,7 @@ module Avatarable
included do
prepend ShadowMethods
include ObjectStorage::BackgroundMove
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
......
......@@ -7,16 +7,8 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
mount_uploader :file, LfsObjectUploader
after_save if: :file_changed?, on: [:create, :update] do
run_after_commit do
file.schedule_migration_to_object_storage
end
end
def project_allowed_access?(project)
projects.exists?(project.lfs_storage_project.id)
end
......
......@@ -36,8 +36,8 @@ class Upload < ActiveRecord::Base
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader
uploader_class.new(model, mount_point, **uploader_context).tap do |uploader|
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as || mount_point).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
......@@ -54,6 +54,12 @@ class Upload < ActiveRecord::Base
}.compact
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def delete_file!
......@@ -64,12 +70,6 @@ class Upload < ActiveRecord::Base
checksum.nil? && local? && exist?
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
......
......@@ -17,6 +17,10 @@ class FileUploader < GitlabUploader
after :remove, :prune_store_dir
# FileUploader do not run in a model transaction, so we can simply
# enqueue a job after the :store hook.
after :store, :schedule_background_upload
def self.root
File.join(options.storage_path, 'uploads')
end
......
......@@ -121,6 +121,10 @@
- geo:geo_repositories_clean_up
- geo:geo_repository_destroy
- object_storage_upload
- object_storage:object_storage_background_move
- object_storage:object_storage_migrate_uploads
- admin_emails
- elastic_batch_project_indexer
- elastic_commit_indexer
......@@ -131,7 +135,6 @@
- geo_project_sync
- geo_repository_shard_sync
- ldap_group_sync
- object_storage_upload
- project_update_repository_storage
- rebase
- repository_update_mirror
......
---
title: Add object storage migration task for uploads.
merge_request: 4215
author:
type: added
......@@ -85,3 +85,4 @@
- [elastic_commit_indexer, 1]
- [export_csv, 1]
- [object_storage_upload, 1]
- [object_storage, 1]
......@@ -14,6 +14,8 @@ module EE
DAST_FILE = 'gl-dast-report.json'.freeze
included do
include ObjectStorage::BackgroundMove
scope :codequality, -> { where(name: %w[codequality codeclimate]) }
scope :performance, -> { where(name: %w[performance deploy]) }
scope :sast, -> { where(name: 'sast') }
......
......@@ -7,6 +7,8 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
......
......@@ -7,7 +7,11 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
end
def local_store?
......
......@@ -2,6 +2,10 @@ module EE
module Note
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundMove
end
def for_epic?
noteable.is_a?(Epic)
end
......
......@@ -8,7 +8,7 @@ require 'carrierwave/storage/fog'
module ObjectStorage
RemoteStoreError = Class.new(StandardError)
UnknownStoreError = Class.new(StandardError)
ObjectStoreUnavailable = Class.new(StandardError)
ObjectStorageUnavailable = Class.new(StandardError)
module Store
LOCAL = 1
......@@ -21,7 +21,7 @@ module ObjectStorage
extend ActiveSupport::Concern
prepended do |base|
raise ObjectStoreUnavailable, "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
raise "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
base.include(::RecordsUploads::Concern)
end
......@@ -50,6 +50,15 @@ module ObjectStorage
super
end
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
upload.class.to_s,
mounted_as,
upload.id)
end
private
def current_upload_satisfies?(paths, model)
......@@ -63,6 +72,33 @@ module ObjectStorage
end
end
# Add support for automatic background uploading after the file is stored.
#
module BackgroundMove
extend ActiveSupport::Concern
def background_upload(mount_points = [])
return unless mount_points.any?
run_after_commit do
mount_points.each { |mount| send(mount).schedule_background_upload } # rubocop:disable GitlabSecurity/PublicSend
end
end
def changed_mounts
self.class.uploaders.select do |mount, uploader_class|
mounted_as = uploader_class.serialization_column(self.class, mount)
mount if send(:"#{mounted_as}_changed?") # rubocop:disable GitlabSecurity/PublicSend
end.keys
end
included do
after_save on: [:create, :update] do
background_upload(changed_mounts)
end
end
end
module Concern
extend ActiveSupport::Concern
......@@ -97,6 +133,10 @@ module ObjectStorage
def licensed?
License.feature_available?(:object_storage)
end
def serialization_column(model_class, mount_point)
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
end
def file_storage?
......@@ -183,13 +223,13 @@ module ObjectStorage
raise e
end
def schedule_migration_to_object_storage(*args)
return unless self.class.object_store_enabled?
return unless self.class.background_upload_enabled?
return unless self.class.licensed?
return unless self.file_storage?
def schedule_background_upload(*args)
return unless schedule_background_upload?
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
ObjectStorage::BackgroundMoveWorker.perform_async(self.class.name,
model.class.name,
mounted_as,
model.id)
end
def fog_directory
......@@ -211,7 +251,7 @@ module ObjectStorage
def verify_license!(_file)
return if file_storage?
raise 'Object Storage feature is missing' unless self.class.licensed?
raise(ObjectStorageUnavailable, 'Object Storage feature is missing') unless self.class.licensed?
end
def exists?
......@@ -231,6 +271,13 @@ module ObjectStorage
private
def schedule_background_upload?
self.class.object_store_enabled? &&
self.class.background_upload_enabled? &&
self.class.licensed? &&
self.file_storage?
end
# this is a hack around CarrierWave. The #migrate method needs to be
# able to force the current file to the migrated file upon success.
def file=(file)
......@@ -238,7 +285,7 @@ module ObjectStorage
end
def serialization_column
model.class.uploader_options.dig(mounted_as, :mount_on) || mounted_as
self.class.serialization_column(model.class, mounted_as)
end
# Returns the column where the 'store' is saved
......
# Concern for setting Sidekiq settings for the various GitLab ObjectStorage workers.
module ObjectStorageQueue
extend ActiveSupport::Concern
included do
queue_namespace :object_storage
end
end
module ObjectStorage
class BackgroundMoveWorker
include ApplicationWorker
include ObjectStorageQueue
sidekiq_options retry: 5
def perform(uploader_class_name, subject_class_name, file_field, subject_id)
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
uploader = build_uploader(subject, file_field&.to_sym)
uploader.migrate!(ObjectStorage::Store::REMOTE)
end
def build_uploader(subject, mount_point)
case subject
when Upload then subject.build_uploader(mount_point)
else
subject.send(mount_point) # rubocop:disable GitlabSecurity/PublicSend
end
end
end
end
# frozen_string_literal: true
# rubocop:disable Metrics/LineLength
# rubocop:disable Style/Documentation
module ObjectStorage
class MigrateUploadsWorker
include ApplicationWorker
include ObjectStorageQueue
SanityCheckError = Class.new(StandardError)
class Upload < ActiveRecord::Base
# Upper limit for foreground checksum processing
CHECKSUM_THRESHOLD = 100.megabytes
belongs_to :model, polymorphic: true # rubocop:disable Cop/PolymorphicAssociations
validates :size, presence: true
validates :path, presence: true
validates :model, presence: true
validates :uploader, presence: true
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
scope :stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
def calculate_checksum!
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader(mounted_as = nil)
uploader_class.new(model, mounted_as).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
end
def exist?
File.exist?(absolute_path)
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
private
def checksummable?
checksum.nil? && local? && exist?
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
UploadChecksumWorker.perform_async(id)
end
def relative_path?
!path.start_with?('/')
end
def identifier
File.basename(path)
end
def uploader_class
Object.const_get(uploader)
end
end
class MigrationResult
attr_reader :upload
attr_accessor :error
def initialize(upload, error = nil)
@upload, @error = upload, error
end
def success?
error.nil?
end
def to_s
success? ? "Migration successful." : "Error while migrating #{upload.id}: #{error.message}"
end
end
module Report
class MigrationFailures < StandardError
attr_reader :errors
def initialize(errors)
@errors = errors
end
def message
errors.map(&:message).join("\n")
end
end
def report!(results)
success, failures = results.partition(&:success?)
Rails.logger.info header(success, failures)
Rails.logger.warn failures(failures)
raise MigrationFailures.new(failures.map(&:error)) if failures.any?
end
def header(success, failures)
"Migrated #{success.count}/#{success.count + failures.count} files."
end
def failures(failures)
failures.map { |f| "\t#{f}" }.join('\n')
end
end
include Report
def self.enqueue!(uploads, mounted_as, to_store)
sanity_check!(uploads, mounted_as)
perform_async(uploads.ids, mounted_as, to_store)
end
# We need to be sure all the uploads are for the same uploader and model type
# and that the mount point exists if provided.
#
def self.sanity_check!(uploads, mounted_as)
upload = uploads.first
uploader_class = upload.uploader.constantize
model_class = uploads.first.model_type.constantize
uploader_types = uploads.map(&:uploader).uniq
model_types = uploads.map(&:model_type).uniq
model_has_mount = mounted_as.nil? || model_class.uploaders[mounted_as] == uploader_class
raise(SanityCheckError, "Multiple uploaders found: #{uploader_types}") unless uploader_types.count == 1
raise(SanityCheckError, "Multiple model types found: #{model_types}") unless model_types.count == 1
raise(SanityCheckError, "Mount point #{mounted_as} not found in #{model_class}.") unless model_has_mount
end
def perform(ids, mounted_as, to_store)
@mounted_as = mounted_as&.to_sym
@to_store = to_store
uploads = Upload.preload(:model).where(id: ids)
sanity_check!(uploads)
results = migrate(uploads)
report!(results)
rescue SanityCheckError => e
# do not retry: the job is insane
Rails.logger.warn "#{self.class}: Sanity check error (#{e.message})"
end
def sanity_check!(uploads)
self.class.sanity_check!(uploads, @mounted_as)
end
def build_uploaders(uploads)
uploads.map { |upload| upload.build_uploader(@mounted_as) }
end
def migrate(uploads)
build_uploaders(uploads).map(&method(:process_uploader))
end
def process_uploader(uploader)
MigrationResult.new(uploader.upload).tap do |result|
begin
uploader.migrate!(@to_store)
rescue => e
result.error = e
end
end
end
end
end
# @Deprecated - remove once the `object_storage_upload` queue is empty
# The queue has been renamed `object_storage:object_storage_background_upload`
#
class ObjectStorageUploadWorker
include ApplicationWorker
......@@ -15,8 +18,5 @@ class ObjectStorageUploadWorker
subject = subject_class.find(subject_id)
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
end
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Migrate the uploaded files to object storage'
task :migrate, [:uploader_class, :model_class, :mounted_as] => :environment do |task, args|
batch_size = ENV.fetch('BATCH', 200).to_i
@to_store = ObjectStorage::Store::REMOTE
@mounted_as = args.mounted_as&.gsub(':', '')&.to_sym
@uploader_class = args.uploader_class.constantize
@model_class = args.model_class.constantize
uploads.each_batch(of: batch_size, &method(:enqueue_batch)) # rubocop: disable Cop/InBatches
end
def enqueue_batch(batch, index)
job = ObjectStorage::MigrateUploadsWorker.enqueue!(batch,
@mounted_as,
@to_store)
puts "Enqueued job ##{index}: #{job}"
rescue ObjectStorage::MigrateUploadsWorker::SanityCheckError => e
# continue for the next batch
puts "Could not enqueue batch (#{batch.ids}) #{e.message}".color(:red)
end
def uploads
Upload.class_eval { include EachBatch } unless Upload < EachBatch
Upload
.where.not(store: @to_store)
.where(uploader: @uploader_class.to_s,
model_type: @model_class.base_class.sti_name)
end
end
end
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Check integrity of uploaded files'
task check: :environment do
puts 'Checking integrity of uploaded files'
uploads_batches do |batch|
batch.each do |upload|
puts "- Checking file (#{upload.id}): #{upload.absolute_path}".color(:green)
if upload.exist?
check_checksum(upload)
else
puts " * File does not exist on the file system".color(:red)
end
end
end
puts 'Done!'
end
def batch_size
ENV.fetch('BATCH', 200).to_i
end
def calculate_checksum(absolute_path)
Digest::SHA256.file(absolute_path).hexdigest
end
def check_checksum(upload)
checksum = calculate_checksum(upload.absolute_path)
if checksum != upload.checksum
puts " * File checksum (#{checksum}) does not match the one in the database (#{upload.checksum})".color(:red)
end
end
def uploads_batches(&block)
Upload.all.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
yield relation
end
end
end
end
require_relative 'helpers.rb'
namespace :gitlab do
namespace :uploads do
desc 'GitLab | Uploads | Check integrity of uploaded files'
task check: :environment do
include UploadTaskHelpers
puts 'Checking integrity of uploaded files'
uploads_batches do |batch|
batch.each do |upload|
begin
puts "- Checking file (#{upload.id}): #{upload.absolute_path}".color(:green)
if upload.exist?
check_checksum(upload)
else
puts " * File does not exist on the file system".color(:red)
end
rescue ObjectStorage::RemoteStoreError
puts "- File (#{upload.id}): File is stored remotely, skipping".color(:yellow)
end
end
end
puts 'Done!'
end
end
end
module UploadTaskHelpers
def batch_size
ENV.fetch('BATCH', 200).to_i
end
def calculate_checksum(absolute_path)
Digest::SHA256.file(absolute_path).hexdigest
end
def check_checksum(upload)
checksum = calculate_checksum(upload.absolute_path)
if checksum != upload.checksum
puts " * File checksum (#{checksum}) does not match the one in the database (#{upload.checksum})".color(:red)
end
end
def uploads_batches(&block)
Upload.all.in_batches(of: batch_size, start: ENV['ID_FROM'], finish: ENV['ID_TO']) do |relation| # rubocop: disable Cop/InBatches
yield relation
end
end
end
......@@ -34,7 +34,7 @@ describe LfsObject do
end
end
describe '#schedule_migration_to_object_storage' do
describe '#schedule_background_upload' do
before do
stub_lfs_setting(enabled: true)
end
......@@ -47,7 +47,7 @@ describe LfsObject do
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -61,7 +61,7 @@ describe LfsObject do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
subject
end
......@@ -73,7 +73,7 @@ describe LfsObject do
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -86,7 +86,7 @@ describe LfsObject do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......
require 'rake_helper'
describe 'gitlab:uploads:migrate rake tasks' do
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:model_class) { Project }
let(:uploader_class) { AvatarUploader }
let(:mounted_as) { :avatar }
let(:batch_size) { 3 }
before do
stub_env('BATCH', batch_size.to_s)
stub_uploads_object_storage(uploader_class)
Rake.application.rake_require 'tasks/gitlab/uploads/migrate'
allow(ObjectStorage::MigrateUploadsWorker).to receive(:perform_async)
end
def run
args = [uploader_class.to_s, model_class.to_s, mounted_as].compact
run_rake_task("gitlab:uploads:migrate", *args)
end
it 'enqueue jobs in batch' do
expect(ObjectStorage::MigrateUploadsWorker).to receive(:enqueue!).exactly(4).times
run
end
end
require 'spec_helper'
describe ObjectStorage::BackgroundMoveWorker do
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
end
context 'for LFS' do
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
let(:uploader_class) { LfsObjectUploader }
let(:subject_class) { LfsObject }
let(:file_field) { :file }
let(:subject_id) { lfs_object.id }
context 'when object storage is enabled' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'uploads object to storage' do
expect { perform }.to change { lfs_object.reload.file_store }.from(local).to(remote)
end
context 'when background upload is disabled' do
before do
allow(Gitlab.config.lfs.object_store).to receive(:background_upload) { false }
end
it 'is skipped' do
expect { perform }.not_to change { lfs_object.reload.file_store }
end
end
end
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it "doesn't migrate files" do
perform
expect(lfs_object.reload.file_store).to eq(local)
end
end
end
context 'for legacy artifacts' do
let(:build) { create(:ci_build, :legacy_artifacts) }
let(:uploader_class) { LegacyArtifactUploader }
let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file }
let(:subject_id) { build.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(build.reload.artifacts_file_store).to eq(remote)
end
context 'for artifacts_metadata' do
let(:file_field) { :artifacts_metadata }
it 'migrates metadata to remote storage' do
perform
expect(build.reload.artifacts_metadata_store).to eq(remote)
end
end
end
end
end
context 'for job artifacts' do
let(:artifact) { create(:ci_job_artifact, :archive) }
let(:uploader_class) { JobArtifactUploader }
let(:subject_class) { Ci::JobArtifact }
let(:file_field) { :file }
let(:subject_id) { artifact.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it "migrates file to remote storage" do
perform
expect(artifact.reload.file_store).to eq(remote)
end
end
end
end
context 'for uploads' do
let!(:project) { create(:project, :with_avatar) }
let(:uploader_class) { AvatarUploader }
let(:file_field) { :avatar }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_uploads_object_storage(uploader_class, background_upload: true)
end
describe 'supports using the model' do
let(:subject_class) { project.class }
let(:subject_id) { project.id }
it "migrates file to remote storage" do
perform
expect(project.reload.avatar.file_storage?).to be_falsey
end
end
describe 'supports using the Upload' do
let(:subject_class) { Upload }
let(:subject_id) { project.avatar.upload.id }
it "migrates file to remote storage" do
perform
expect(project.reload.avatar.file_storage?).to be_falsey
end
end
end
end
end
end
require 'spec_helper'
describe ObjectStorage::MigrateUploadsWorker, :sidekiq do
shared_context 'sanity_check! fails' do
before do
expect(described_class).to receive(:sanity_check!).and_raise(described_class::SanityCheckError)
end
end
let!(:projects) { create_list(:project, 10, :with_avatar) }
let(:uploads) { Upload.all }
let(:mounted_as) { :avatar }
let(:to_store) { ObjectStorage::Store::REMOTE }
before do
stub_uploads_object_storage(AvatarUploader)
end
describe '.enqueue!' do
def enqueue!
described_class.enqueue!(uploads, mounted_as, to_store)
end
it 'is guarded by .sanity_check!' do
expect(described_class).to receive(:perform_async)
expect(described_class).to receive(:sanity_check!)
enqueue!
end
context 'sanity_check! fails' do
include_context 'sanity_check! fails'
it 'does not enqueue a job' do
expect(described_class).not_to receive(:perform_async)
expect { enqueue! }.to raise_error(described_class::SanityCheckError)
end
end
end
describe '.sanity_check!' do
shared_examples 'raises a SanityCheckError' do
let(:mount_point) { nil }
it do
expect { described_class.sanity_check!(uploads, mount_point) }
.to raise_error(described_class::SanityCheckError)
end
end
context 'uploader types mismatch' do
let!(:outlier) { create(:upload, uploader: 'FileUploader') }
include_examples 'raises a SanityCheckError'
end
context 'model types mismatch' do
let!(:outlier) { create(:upload, model_type: 'Potato') }
include_examples 'raises a SanityCheckError'
end
context 'mount point not found' do
include_examples 'raises a SanityCheckError' do
let(:mount_point) { :potato }
end
end
end
describe '#perform' do
def perform
described_class.new.perform(uploads.ids, mounted_as, to_store)
rescue ObjectStorage::MigrateUploadsWorker::Report::MigrationFailures
# swallow
end
shared_examples 'outputs correctly' do |success: 0, failures: 0|
total = success + failures
if success > 0
it 'outputs the reports' do
expect(Rails.logger).to receive(:info).with(%r{Migrated #{success}/#{total} files})
perform
end
end
if failures > 0
it 'outputs upload failures' do
expect(Rails.logger).to receive(:warn).with(/Error .* I am a teapot/)
perform
end
end
end
it_behaves_like 'outputs correctly', success: 10
it 'migrates files' do
perform
aggregate_failures do
projects.each do |project|
expect(project.reload.avatar.upload.local?).to be_falsey
end
end
end
context 'migration is unsuccessful' do
before do
allow_any_instance_of(ObjectStorage::Concern).to receive(:migrate!).and_raise(CarrierWave::UploadError, "I am a teapot.")
end
it_behaves_like 'outputs correctly', failures: 10
end
end
end
......@@ -6,4 +6,17 @@ FactoryBot.define do
description "Open source software to collaborate on code"
new_project_guidelines "Custom project guidelines"
end
trait :with_logo do
logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_header_logo do
header_logo { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :with_logos do
with_logo
with_header_logo
end
end
......@@ -18,14 +18,14 @@ describe Ci::JobArtifact do
describe 'callbacks' do
subject { create(:ci_job_artifact, :archive) }
describe '#schedule_migration_to_object_storage' do
describe '#schedule_background_upload' do
context 'when object storage is disabled' do
before do
stub_artifacts_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -39,7 +39,7 @@ describe Ci::JobArtifact do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject
end
......@@ -51,7 +51,7 @@ describe Ci::JobArtifact do
end
it 'does not schedule the migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......@@ -64,7 +64,7 @@ describe Ci::JobArtifact do
end
it 'schedules the model for migration' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
......
......@@ -1030,7 +1030,7 @@ describe 'Git LFS API and storage' do
context 'with object storage disabled' do
it "doesn't attempt to migrate file to object storage" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
put_finalize(with_tempfile: true)
end
......@@ -1042,7 +1042,7 @@ describe 'Git LFS API and storage' do
end
it 'schedules migration of file to object storage' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('LfsObjectUploader', 'LfsObject', :file, kind_of(Numeric))
put_finalize(with_tempfile: true)
end
......
require 'rake_helper'
describe 'gitlab:uploads:check rake tasks' do
let!(:upload) { create(:upload, path: Rails.root.join('spec/fixtures/banana_sample.gif')) }
before do
Rake.application.rake_require 'tasks/gitlab/uploads/check'
end
it 'outputs the integrity check for each uploaded file' do
expect { run_rake_task('gitlab:uploads:check') }.to output(/Checking file \(#{upload.id}\): #{Regexp.quote(upload.absolute_path)}/).to_stdout
end
it 'errors out about missing files on the file system' do
create(:upload)
expect { run_rake_task('gitlab:uploads:check') }.to output(/File does not exist on the file system/).to_stdout
end
it 'errors out about invalid checksum' do
upload.update_column(:checksum, '01a3156db2cf4f67ec823680b40b7302f89ab39179124ad219f94919b8a1769e')
expect { run_rake_task('gitlab:uploads:check') }.to output(/File checksum \(9e697aa09fe196909813ee36103e34f721fe47a5fdc8aac0e4e4ac47b9b38282\) does not match the one in the database \(#{upload.checksum}\)/).to_stdout
end
end
require 'rake_helper'
describe 'gitlab:uploads rake tasks' do
describe 'check' do
let!(:upload) { create(:upload, path: Rails.root.join('spec/fixtures/banana_sample.gif')) }
before do
Rake.application.rake_require 'tasks/gitlab/uploads'
end
it 'outputs the integrity check for each uploaded file' do
expect { run_rake_task('gitlab:uploads:check') }.to output(/Checking file \(#{upload.id}\): #{Regexp.quote(upload.absolute_path)}/).to_stdout
end
it 'errors out about missing files on the file system' do
create(:upload)
expect { run_rake_task('gitlab:uploads:check') }.to output(/File does not exist on the file system/).to_stdout
end
it 'errors out about invalid checksum' do
upload.update_column(:checksum, '01a3156db2cf4f67ec823680b40b7302f89ab39179124ad219f94919b8a1769e')
expect { run_rake_task('gitlab:uploads:check') }.to output(/File checksum \(9e697aa09fe196909813ee36103e34f721fe47a5fdc8aac0e4e4ac47b9b38282\) does not match the one in the database \(#{upload.checksum}\)/).to_stdout
end
end
end
......@@ -26,7 +26,7 @@ describe LfsObjectUploader do
describe 'migration to object storage' do
context 'with object storage disabled' do
it "is skipped" do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object
end
......@@ -38,7 +38,7 @@ describe LfsObjectUploader do
end
it 'is scheduled to run after creation' do
expect(ObjectStorageUploadWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with(described_class.name, 'LfsObject', :file, kind_of(Numeric))
lfs_object
end
......@@ -50,7 +50,7 @@ describe LfsObjectUploader do
end
it 'is skipped' do
expect(ObjectStorageUploadWorker).not_to receive(:perform_async)
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object
end
......@@ -67,7 +67,7 @@ describe LfsObjectUploader do
end
it 'can store file remotely' do
allow(ObjectStorageUploadWorker).to receive(:perform_async)
allow(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async)
store_file(lfs_object)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment