Commit 32c801eb authored by Sean McGivern's avatar Sean McGivern

Merge branch '4163-move-uploads-to-object-storage' into 'master'

Move uploads to object storage

Closes #4163

See merge request gitlab-org/gitlab-ee!3867
parents b11b6d3e 71f4ae4a
module UploadsActions
include Gitlab::Utils::StrongMemoize
UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
def create
link_to_file = UploadService.new(model, params[:file], uploader_class).execute
......@@ -17,34 +19,71 @@ module UploadsActions
end
end
# This should either
# - send the file directly
# - or redirect to its URL
#
def show
return render_404 unless uploader.exists?
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
expires_in 0.seconds, must_revalidate: true, private: true
if uploader.file_storage?
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
expires_in 0.seconds, must_revalidate: true, private: true
send_file uploader.file.path, disposition: disposition
send_file uploader.file.path, disposition: disposition
else
redirect_to uploader.url
end
end
private
def uploader_class
raise NotImplementedError
end
def upload_mount
mounted_as = params[:mounted_as]
mounted_as if UPLOAD_MOUNTS.include?(mounted_as)
end
def uploader_mounted?
upload_model_class < CarrierWave::Mount::Extension && !upload_mount.nil?
end
def uploader
strong_memoize(:uploader) do
return if show_model.nil?
if uploader_mounted?
model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
else
build_uploader_from_upload || build_uploader_from_params
end
end
end
file_uploader = FileUploader.new(show_model, params[:secret])
file_uploader.retrieve_from_store!(params[:filename])
def build_uploader_from_upload
return nil unless params[:secret] && params[:filename]
file_uploader
end
upload_path = uploader_class.upload_path(params[:secret], params[:filename])
upload = Upload.find_by(uploader: uploader_class.to_s, path: upload_path)
upload&.build_uploader
end
def build_uploader_from_params
uploader = uploader_class.new(model, params[:secret])
uploader.retrieve_from_store!(params[:filename])
uploader
end
def image_or_video?
uploader && uploader.exists? && uploader.image_or_video?
end
def uploader_class
FileUploader
def find_model
nil
end
def model
strong_memoize(:model) { find_model }
end
end
......@@ -7,29 +7,23 @@ class Groups::UploadsController < Groups::ApplicationController
private
def show_model
strong_memoize(:show_model) do
group_id = params[:group_id]
Group.find_by_full_path(group_id)
end
def upload_model_class
Group
end
def authorize_upload_file!
render_404 unless can?(current_user, :upload_file, group)
def uploader_class
NamespaceFileUploader
end
def uploader
strong_memoize(:uploader) do
file_uploader = uploader_class.new(show_model, params[:secret])
file_uploader.retrieve_from_store!(params[:filename])
file_uploader
end
end
def find_model
return @group if @group
def uploader_class
NamespaceFileUploader
group_id = params[:group_id]
Group.find_by_full_path(group_id)
end
alias_method :model, :group
def authorize_upload_file!
render_404 unless can?(current_user, :upload_file, group)
end
end
......@@ -61,7 +61,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
def store_file(oid, size, tmp_file)
# Define tmp_file_path early because we use it in "ensure"
tmp_file_path = File.join("#{Gitlab.config.lfs.storage_path}/tmp/upload", tmp_file)
tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
object = LfsObject.find_or_create_by(oid: oid, size: size)
file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
......
class Projects::UploadsController < Projects::ApplicationController
include UploadsActions
# These will kick you out if you don't have access.
skip_before_action :project, :repository,
if: -> { action_name == 'show' && image_or_video? }
......@@ -8,14 +9,20 @@ class Projects::UploadsController < Projects::ApplicationController
private
def show_model
strong_memoize(:show_model) do
namespace = params[:namespace_id]
id = params[:project_id]
def upload_model_class
Project
end
Project.find_by_full_path("#{namespace}/#{id}")
end
def uploader_class
FileUploader
end
alias_method :model, :project
def find_model
return @project if @project
namespace = params[:namespace_id]
id = params[:project_id]
Project.find_by_full_path("#{namespace}/#{id}")
end
end
class UploadsController < ApplicationController
include UploadsActions
UnknownUploadModelError = Class.new(StandardError)
MODEL_CLASSES = {
"user" => User,
"project" => Project,
"note" => Note,
"group" => Group,
"appearance" => Appearance,
"personal_snippet" => PersonalSnippet,
nil => PersonalSnippet
}.freeze
rescue_from UnknownUploadModelError, with: :render_404
skip_before_action :authenticate_user!
before_action :upload_mount_satisfied?
before_action :find_model
before_action :authorize_access!, only: [:show]
before_action :authorize_create_access!, only: [:create]
private
def uploader_class
PersonalFileUploader
end
def find_model
return nil unless params[:id]
return render_404 unless upload_model && upload_mount
@model = upload_model.find(params[:id])
upload_model_class.find(params[:id])
end
def authorize_access!
......@@ -53,55 +68,17 @@ class UploadsController < ApplicationController
end
end
def upload_model
upload_models = {
"user" => User,
"project" => Project,
"note" => Note,
"group" => Group,
"appearance" => Appearance,
"personal_snippet" => PersonalSnippet
}
upload_models[params[:model]]
end
def upload_mount
return true unless params[:mounted_as]
upload_mounts = %w(avatar attachment file logo header_logo)
if upload_mounts.include?(params[:mounted_as])
params[:mounted_as]
end
def upload_model_class
MODEL_CLASSES[params[:model]] || raise(UnknownUploadModelError)
end
def uploader
return @uploader if defined?(@uploader)
case model
when nil
@uploader = PersonalFileUploader.new(nil, params[:secret])
@uploader.retrieve_from_store!(params[:filename])
when PersonalSnippet
@uploader = PersonalFileUploader.new(model, params[:secret])
@uploader.retrieve_from_store!(params[:filename])
else
@uploader = @model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
redirect_to @uploader.url unless @uploader.file_storage?
end
@uploader
def upload_model_class_has_mounts?
upload_model_class < CarrierWave::Mount::Extension
end
def uploader_class
PersonalFileUploader
end
def upload_mount_satisfied?
return true unless upload_model_class_has_mounts?
def model
@model ||= find_model
upload_model_class.uploader_options.has_key?(upload_mount)
end
end
......@@ -11,6 +11,7 @@ class Appearance < ActiveRecord::Base
mount_uploader :logo, AttachmentUploader
mount_uploader :header_logo, AttachmentUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
CACHE_KEY = 'current_appearance'.freeze
......
......@@ -49,7 +49,7 @@ module Ci
end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) }
......
module Avatarable
extend ActiveSupport::Concern
included do
prepend ShadowMethods
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
mount_uploader :avatar, AvatarUploader
end
module ShadowMethods
def avatar_url(**args)
# We use avatar_path instead of overriding avatar_url because of carrierwave.
# See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
avatar_path(only_path: args.fetch(:only_path, true)) || super
end
end
def avatar_type
unless self.avatar.image?
self.errors.add :avatar, "only images allowed"
end
end
def avatar_path(only_path: true)
return unless self[:avatar].present?
......
......@@ -34,26 +34,21 @@ class Group < Namespace
has_many :ldap_group_links, foreign_key: 'group_id', dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :hooks, dependent: :destroy, class_name: 'GroupHook' # rubocop:disable Cop/ActiveRecordDependent
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
# We cannot simply set `has_many :audit_events, as: :entity, dependent: :destroy`
# here since Group inherits from Namespace, the entity_type would be set to `Namespace`.
has_many :audit_events, -> { where(entity_type: Group) }, foreign_key: 'entity_id'
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validate :visibility_level_allowed_by_projects
validate :visibility_level_allowed_by_sub_groups
validate :visibility_level_allowed_by_parent
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
validates :two_factor_grace_period, presence: true, numericality: { greater_than_or_equal_to: 0 }
validates :repository_size_limit,
numericality: { only_integer: true, greater_than_or_equal_to: 0, allow_nil: true }
mount_uploader :avatar, AvatarUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
after_create :post_create_hook
after_destroy :post_destroy_hook
after_save :update_two_factor_requirement
......@@ -133,12 +128,6 @@ class Group < Namespace
visibility_level_allowed_by_sub_groups?(level)
end
def avatar_url(**args)
# We use avatar_path instead of overriding avatar_url because of carrierwave.
# See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
avatar_path(args)
end
def lfs_enabled?
return false unless Gitlab.config.lfs.enabled
return Gitlab.config.lfs.enabled if self[:lfs_enabled].nil?
......@@ -211,12 +200,6 @@ class Group < Namespace
owners.include?(user) && owners.size == 1
end
def avatar_type
unless self.avatar.image?
self.errors.add :avatar, "only images allowed"
end
end
def human_ldap_access
Gitlab::Access.options_with_owner.key ldap_access
end
......
......@@ -7,7 +7,7 @@ class LfsObject < ActiveRecord::Base
validates :oid, presence: true, uniqueness: true
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) }
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
mount_uploader :file, LfsObjectUploader
......
......@@ -91,6 +91,7 @@ class Note < ActiveRecord::Base
end
end
# @deprecated attachments are handler by the MarkdownUploader
mount_uploader :attachment, AttachmentUploader
# Scopes
......
......@@ -261,9 +261,6 @@ class Project < ActiveRecord::Base
validates :star_count, numericality: { greater_than_or_equal_to: 0 }
validate :check_limit, on: :create
validate :check_repository_path_availability, on: :update, if: ->(project) { project.renamed? }
validate :avatar_type,
if: ->(project) { project.avatar.present? && project.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
validate :visibility_level_allowed_by_group
validate :visibility_level_allowed_as_fork
validate :check_wiki_path_conflict
......@@ -271,7 +268,6 @@ class Project < ActiveRecord::Base
presence: true,
inclusion: { in: ->(_object) { Gitlab.config.repositories.storages.keys } }
mount_uploader :avatar, AvatarUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
# Scopes
......@@ -933,20 +929,12 @@ class Project < ActiveRecord::Base
issues_tracker.to_param == 'jira'
end
def avatar_type
unless self.avatar.image?
self.errors.add :avatar, 'only images allowed'
end
end
def avatar_in_git
repository.avatar
end
def avatar_url(**args)
# We use avatar_path instead of overriding avatar_url because of carrierwave.
# See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
avatar_path(args) || (Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git)
Gitlab::Routing.url_helpers.project_avatar_url(self) if avatar_in_git
end
# For compatibility with old code
......
......@@ -9,50 +9,52 @@ class Upload < ActiveRecord::Base
validates :model, presence: true
validates :uploader, presence: true
before_save :calculate_checksum, if: :foreground_checksum?
after_commit :schedule_checksum, unless: :foreground_checksum?
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
def self.remove_path(path)
where(path: path).destroy_all
end
def self.record(uploader)
remove_path(uploader.relative_path)
create(
size: uploader.file.size,
path: uploader.relative_path,
model: uploader.model,
uploader: uploader.class.to_s
)
end
def self.hexdigest(absolute_path)
return unless File.exist?(absolute_path)
Digest::SHA256.file(absolute_path).hexdigest
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
uploader_class.absolute_path(self)
end
def calculate_checksum
return unless exist?
def calculate_checksum!
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader
uploader_class.new(model).tap do |uploader|
uploader.upload = self
uploader.retrieve_from_store!(identifier)
end
end
def exist?
File.exist?(absolute_path)
end
private
def foreground_checksum?
size <= CHECKSUM_THRESHOLD
def checksummable?
checksum.nil? && local? && exist?
end
def local?
return true if store.nil?
store == ObjectStorage::Store::LOCAL
end
def foreground_checksummable?
checksummable? && size <= CHECKSUM_THRESHOLD
end
def schedule_checksum
......@@ -63,6 +65,10 @@ class Upload < ActiveRecord::Base
!path.start_with?('/')
end
def identifier
File.basename(path)
end
def uploader_class
Object.const_get(uploader)
end
......
......@@ -139,6 +139,7 @@ class User < ActiveRecord::Base
has_many :assigned_merge_requests, dependent: :nullify, foreign_key: :assignee_id, class_name: "MergeRequest" # rubocop:disable Cop/ActiveRecordDependent
has_many :custom_attributes, class_name: 'UserCustomAttribute'
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
#
# Validations
......@@ -161,12 +162,10 @@ class User < ActiveRecord::Base
validate :namespace_uniq, if: :username_changed?
validate :namespace_move_dir_allowed, if: :username_changed?
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validate :unique_email, if: :email_changed?
validate :owns_notification_email, if: :notification_email_changed?
validate :owns_public_email, if: :public_email_changed?
validate :signup_domain_valid?, on: :create, if: ->(user) { !user.created_by_id }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }
before_validation :sanitize_attrs
before_validation :set_notification_email, if: :email_changed?
......@@ -229,9 +228,6 @@ class User < ActiveRecord::Base
end
end
mount_uploader :avatar, AvatarUploader
has_many :uploads, as: :model, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
# Scopes
scope :admins, -> { where(admin: true) }
scope :blocked, -> { with_states(:blocked, :ldap_blocked) }
......@@ -545,12 +541,6 @@ class User < ActiveRecord::Base
end
end
def avatar_type
unless avatar.image?
errors.add :avatar, "only images allowed"
end
end
def unique_email
if !emails.exists?(email: email) && Email.exists?(email: email)
errors.add(:email, 'has already been taken')
......@@ -878,9 +868,7 @@ class User < ActiveRecord::Base
end
def avatar_url(size: nil, scale: 2, **args)
# We use avatar_path instead of overriding avatar_url because of carrierwave.
# See https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/11001/diffs#note_28659864
avatar_path(args) || GravatarService.new.execute(email, size, scale, username: username)
GravatarService.new.execute(email, size, scale, username: username)
end
def primary_email_verified?
......
......@@ -16,9 +16,9 @@ module Projects
@old_path = project.full_path
@new_path = project.disk_path
origin = FileUploader.dynamic_path_segment(project)
origin = FileUploader.absolute_base_dir(project)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
target = FileUploader.dynamic_path_segment(project)
target = FileUploader.absolute_base_dir(project)
result = move_folder!(origin, target)
project.save!
......
class AttachmentUploader < GitlabUploader
include RecordsUploads
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
include UploaderHelper
storage :file
private
def store_dir
"#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
def dynamic_segment
File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
end
end
class AvatarUploader < GitlabUploader
include RecordsUploads
include UploaderHelper
storage :file
def store_dir
"#{base_dir}/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
end
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
def exists?
model.avatar.file && model.avatar.file.present?
end
# We set move_to_store and move_to_cache to 'false' to prevent stealing
# the avatar file from a project when forking it.
# https://gitlab.com/gitlab-org/gitlab-ce/issues/26158
def move_to_store
false
end
......@@ -22,4 +15,10 @@ class AvatarUploader < GitlabUploader
def move_to_cache
false
end
private
def dynamic_segment
File.join(model.class.to_s.underscore, mounted_as.to_s, model.id.to_s)
end
end
......@@ -21,13 +21,11 @@ class FileMover
end
def update_markdown
updated_text = model.read_attribute(update_field).gsub(temp_file_uploader.to_markdown, uploader.to_markdown)
updated_text = model.read_attribute(update_field)
.gsub(temp_file_uploader.markdown_link, uploader.markdown_link)
model.update_attribute(update_field, updated_text)
true
rescue
revert
false
end
......
# This class breaks the actual CarrierWave concept.
# Every uploader should use a base_dir that is model agnostic so we can build
# back URLs from base_dir-relative paths saved in the `Upload` model.
#
# As the `.base_dir` is model dependent and **not** saved in the upload model (see #upload_path)
# there is no way to build back the correct file path without the model, which defies
# CarrierWave way of storing files.
#
class FileUploader < GitlabUploader
include RecordsUploads
include UploaderHelper
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
MARKDOWN_PATTERN = %r{\!?\[.*?\]\(/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*?)\)}
DYNAMIC_PATH_PATTERN = %r{(?<secret>\h{32})/(?<identifier>.*)}
storage :file
attr_accessor :model
def self.root
File.join(options.storage_path, 'uploads')
end
def self.absolute_path(upload_record)
def self.absolute_path(upload)
File.join(
self.dynamic_path_segment(upload_record.model),
upload_record.path
absolute_base_dir(upload.model),
upload.path # already contain the dynamic_segment, see #upload_path
)
end
# Not using `GitlabUploader.base_dir` because all project namespaces are in
# the `public/uploads` dir.
#
def self.base_dir
root_dir
def self.base_dir(model)
model_path_segment(model)
end
# used in migrations and import/exports
def self.absolute_base_dir(model)
File.join(root, base_dir(model))
end
# Returns the part of `store_dir` that can change based on the model's current
......@@ -29,63 +46,94 @@ class FileUploader < GitlabUploader
# model - Object that responds to `full_path` and `disk_path`
#
# Returns a String without a trailing slash
def self.dynamic_path_segment(model)
def self.model_path_segment(model)
if model.hashed_storage?(:attachments)
dynamic_path_builder(model.disk_path)
model.disk_path
else
dynamic_path_builder(model.full_path)
model.full_path
end
end
# Auxiliary method to build dynamic path segment when not using a project model
#
# Prefer to use the `.dynamic_path_segment` as it includes Hashed Storage specific logic
def self.dynamic_path_builder(path)
File.join(CarrierWave.root, base_dir, path)
def self.upload_path(secret, identifier)
File.join(secret, identifier)
end
attr_accessor :model
attr_reader :secret
def self.generate_secret
SecureRandom.hex
end
def initialize(model, secret = nil)
@model = model
@secret = secret || generate_secret
@secret = secret
end
def store_dir
File.join(dynamic_path_segment, @secret)
def base_dir
self.class.base_dir(@model)
end
def relative_path
self.file.path.sub("#{dynamic_path_segment}/", '')
# we don't need to know the actual path, an uploader instance should be
# able to yield the file content on demand, so we should build the digest
def absolute_path
self.class.absolute_path(@upload)
end
def to_markdown
to_h[:markdown]
def upload_path
self.class.upload_path(dynamic_segment, identifier)
end
def to_h
filename = image_or_video? ? self.file.basename : self.file.filename
escaped_filename = filename.gsub("]", "\\]")
def model_path_segment
self.class.model_path_segment(@model)
end
markdown = "[#{escaped_filename}](#{secure_url})"
def store_dir
File.join(base_dir, dynamic_segment)
end
def markdown_link
markdown = "[#{markdown_name}](#{secure_url})"
markdown.prepend("!") if image_or_video? || dangerous?
markdown
end
def to_h
{
alt: filename,
alt: markdown_name,
url: secure_url,
markdown: markdown
markdown: markdown_link
}
end
def filename
self.file.filename
end
# the upload does not hold the secret, but holds the path
# which contains the secret: extract it
def upload=(value)
if matches = DYNAMIC_PATH_PATTERN.match(value.path)
@secret = matches[:secret]
@identifier = matches[:identifier]
end
super
end
def secret
@secret ||= self.class.generate_secret
end
private
def dynamic_path_segment
self.class.dynamic_path_segment(model)
def markdown_name
(image_or_video? ? File.basename(filename, File.extname(filename)) : filename).gsub("]", "\\]")
end
def generate_secret
SecureRandom.hex
def identifier
@identifier ||= filename
end
def dynamic_segment
secret
end
def secure_url
......
class GitlabUploader < CarrierWave::Uploader::Base
def self.absolute_path(upload_record)
File.join(CarrierWave.root, upload_record.path)
end
class_attribute :options
def self.root_dir
'uploads'
end
class << self
# DSL setter
def storage_options(options)
self.options = options
end
# When object storage is used, keep the `root_dir` as `base_dir`.
# The files aren't really in folders there, they just have a name.
# The files that contain user input in their name, also contain a hash, so
# the names are still unique
#
# This method is overridden in the `FileUploader`
def self.base_dir
return root_dir unless file_storage?
def root
options.storage_path
end
File.join(root_dir, '-', 'system')
end
# represent the directory namespacing at the class level
def base_dir
options.fetch('base_dir', '')
end
def self.file_storage?
self.storage == CarrierWave::Storage::File
def file_storage?
storage == CarrierWave::Storage::File
end
def absolute_path(upload_record)
File.join(root, upload_record.path)
end
end
storage_options Gitlab.config.uploads
delegate :base_dir, :file_storage?, to: :class
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
# Reduce disk IO
def move_to_cache
true
file_storage?
end
# Reduce disk IO
def move_to_store
true
end
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
#
# For example, `FileUploader` builds the storage path based on the associated
# project model's `path_with_namespace` value, which can change when the
# project or its containing namespace is moved or renamed.
def relative_path
self.file.path.sub("#{root}/", '')
file_storage?
end
def exists?
file.present?
end
# Override this if you don't want to save files by default to the Rails.root directory
def cache_dir
File.join(root, base_dir, 'tmp/cache')
end
def work_dir
# Default path set by CarrierWave:
# https://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L182
CarrierWave.tmp_path
File.join(root, base_dir, 'tmp/work')
end
def filename
......@@ -67,6 +59,17 @@ class GitlabUploader < CarrierWave::Uploader::Base
private
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
#
# For example, `FileUploader` builds the storage path based on the associated
# project model's `path_with_namespace` value, which can change when the
# project or its containing namespace is moved or renamed.
def dynamic_segment
raise(NotImplementedError)
end
# To prevent files from moving across filesystems, override the default
# implementation:
# http://github.com/carrierwaveuploader/carrierwave/blob/v1.0.0/lib/carrierwave/uploader/cache.rb#L181-L183
......@@ -74,6 +77,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
# To be safe, keep this directory outside of the the cache directory
# because calling CarrierWave.clean_cache_files! will remove any files in
# the cache directory.
File.join(work_dir, @cache_id, version_name.to_s, for_file)
File.join(work_dir, cache_id, version_name.to_s, for_file)
end
end
class JobArtifactUploader < ObjectStoreUploader
storage_options Gitlab.config.artifacts
def self.local_store_path
Gitlab.config.artifacts.path
end
class JobArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
end
storage_options Gitlab.config.artifacts
def size
return super if model.size.nil?
......@@ -15,9 +10,13 @@ class JobArtifactUploader < ObjectStoreUploader
model.size
end
def store_dir
dynamic_segment
end
private
def default_path
def dynamic_segment
creation_date = model.created_at.utc.strftime('%Y_%m_%d')
File.join(disk_hash[0..1], disk_hash[2..3], disk_hash,
......
class LegacyArtifactUploader < ObjectStoreUploader
storage_options Gitlab.config.artifacts
class LegacyArtifactUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
def self.local_store_path
Gitlab.config.artifacts.path
end
storage_options Gitlab.config.artifacts
def self.artifacts_upload_path
File.join(self.local_store_path, 'tmp/uploads/')
def store_dir
dynamic_segment
end
private
def default_path
def dynamic_segment
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end
end
class LfsObjectUploader < ObjectStoreUploader
storage_options Gitlab.config.lfs
class LfsObjectUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
def self.local_store_path
Gitlab.config.lfs.storage_path
# LfsObject are in `tmp/upload` instead of `tmp/uploads`
def self.workhorse_upload_path
File.join(root, 'tmp/upload')
end
storage_options Gitlab.config.lfs
def filename
model.oid[4..-1]
end
def store_dir
dynamic_segment
end
private
def default_path
"#{model.oid[0, 2]}/#{model.oid[2, 2]}"
def dynamic_segment
File.join(model.oid[0, 2], model.oid[2, 2])
end
end
class NamespaceFileUploader < FileUploader
def self.base_dir
File.join(root_dir, '-', 'system', 'namespace')
# Re-Override
def self.root
options.storage_path
end
def self.dynamic_path_segment(model)
dynamic_path_builder(model.id.to_s)
def self.base_dir(model)
File.join(options.base_dir, 'namespace', model_path_segment(model))
end
private
def self.model_path_segment(model)
File.join(model.id.to_s)
end
# Re-Override
def store_dir
store_dirs[object_store]
end
def secure_url
File.join('/uploads', @secret, file.filename)
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join('namespace', model_path_segment, dynamic_segment)
}
end
end
class PersonalFileUploader < FileUploader
def self.dynamic_path_segment(model)
File.join(CarrierWave.root, model_path(model))
# Re-Override
def self.root
options.storage_path
end
def self.base_dir
File.join(root_dir, '-', 'system')
def self.base_dir(model)
File.join(options.base_dir, model_path_segment(model))
end
private
def self.model_path_segment(model)
return 'temp/' unless model
def secure_url
File.join(self.class.model_path(model), secret, file.filename)
File.join(model.class.to_s.underscore, model.id.to_s)
end
def object_store
return Store::LOCAL unless model
super
end
# Revert-Override
def store_dir
store_dirs[object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(model_path_segment, dynamic_segment)
}
end
def self.model_path(model)
if model
File.join("/#{base_dir}", model.class.to_s.underscore, model.id.to_s)
else
File.join("/#{base_dir}", 'temp')
end
private
def secure_url
File.join('/', base_dir, secret, file.filename)
end
end
module RecordsUploads
extend ActiveSupport::Concern
module Concern
extend ActiveSupport::Concern
included do
after :store, :record_upload
before :remove, :destroy_upload
end
attr_accessor :upload
# After storing an attachment, create a corresponding Upload record
#
# NOTE: We're ignoring the argument passed to this callback because we want
# the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
# `Tempfile` object the callback gets.
#
# Called `after :store`
def record_upload(_tempfile = nil)
return unless model
return unless file_storage?
return unless file.exists?
Upload.record(self)
end
included do
after :store, :record_upload
before :remove, :destroy_upload
end
# After storing an attachment, create a corresponding Upload record
#
# NOTE: We're ignoring the argument passed to this callback because we want
# the `SanitizedFile` object from `CarrierWave::Uploader::Base#file`, not the
# `Tempfile` object the callback gets.
#
# Called `after :store`
def record_upload(_tempfile = nil)
return unless model
return unless file && file.exists?
Upload.transaction do
uploads.where(path: upload_path).delete_all
upload.destroy! if upload
self.upload = build_upload_from_uploader(self)
upload.save!
end
end
def upload_path
File.join(store_dir, filename.to_s)
end
private
def uploads
Upload.order(id: :desc).where(uploader: self.class.to_s)
end
private
def build_upload_from_uploader(uploader)
Upload.new(
size: uploader.file.size,
path: uploader.upload_path,
model: uploader.model,
uploader: uploader.class.to_s
)
end
# Before removing an attachment, destroy any Upload records at the same path
#
# Called `before :remove`
def destroy_upload(*args)
return unless file_storage?
return unless file
# Before removing an attachment, destroy any Upload records at the same path
#
# Called `before :remove`
def destroy_upload(*args)
return unless file && file.exists?
Upload.remove_path(relative_path)
self.upload = nil
uploads.where(path: upload_path).delete_all
end
end
end
......@@ -32,14 +32,7 @@ module UploaderHelper
def extension_match?(extensions)
return false unless file
extension =
if file.respond_to?(:extension)
file.extension
else
# Not all CarrierWave storages respond to :extension
File.extname(file.path).delete('.')
end
extension = file.try(:extension) || File.extname(file.path).delete('.')
extensions.include?(extension.downcase)
end
end
module Workhorse
module UploadPath
def workhorse_upload_path
File.join(root, base_dir, 'tmp/uploads')
end
end
end
......@@ -3,7 +3,7 @@ class UploadChecksumWorker
def perform(upload_id)
upload = Upload.find(upload_id)
upload.calculate_checksum
upload.calculate_checksum!
upload.save!
rescue ActiveRecord::RecordNotFound
Rails.logger.error("UploadChecksumWorker: couldn't find upload #{upload_id}, skipping")
......
---
title: Add object storage support for uploads.
merge_request: 3867
author:
type: added
......@@ -174,6 +174,25 @@ production: &base
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## Uploads (attachments, avatars, etc...)
uploads:
# The location where uploads objects are stored (default: public/).
# storage_path: public/
# base_dir: uploads/-/system
object_store:
enabled: true
remote_directory: uploads # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
# Use the following options to configure an AWS compatible host
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
enabled: false
......@@ -780,6 +799,16 @@ test:
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
uploads:
storage_path: tmp/tests/public
enabled: true
object_store:
enabled: false
connection:
provider: AWS # Only AWS supported at the moment
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
gitlab:
host: localhost
port: 80
......
......@@ -342,13 +342,15 @@ Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled']
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
Settings.artifacts['path'] = Settings.absolute(Settings.artifacts['path'] || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
# Settings.artifact['path'] is deprecated, use `storage_path` instead
Settings.artifacts['path'] = Settings.artifacts['storage_path']
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
Settings.artifacts['object_store']['enabled'] ||= false
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
......@@ -391,14 +393,26 @@ Settings.gitlab['geo_status_timeout'] ||= 10
Settings['lfs'] ||= Settingslogic.new({})
Settings.lfs['enabled'] = true if Settings.lfs['enabled'].nil?
Settings.lfs['storage_path'] = Settings.absolute(Settings.lfs['storage_path'] || File.join(Settings.shared['path'], "lfs-objects"))
Settings.lfs['object_store'] ||= Settingslogic.new({})
Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
Settings.lfs['object_store']['remote_directory'] ||= nil
Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
Settings.lfs['object_store']['enabled'] ||= false
Settings.lfs['object_store']['remote_directory'] ||= nil
Settings.lfs['object_store']['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
Settings.lfs['object_store']['connection']&.deep_stringify_keys!
#
# Uploads
#
Settings['uploads'] ||= Settingslogic.new({})
Settings.uploads['storage_path'] = Settings.absolute(Settings.uploads['storage_path'] || 'public')
Settings.uploads['base_dir'] = Settings.uploads['base_dir'] || 'uploads/-/system'
Settings.uploads['object_store'] ||= Settingslogic.new({})
Settings.uploads['object_store']['enabled'] ||= false
Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
Settings.uploads['object_store']['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
Settings.uploads['object_store']['connection']&.deep_stringify_keys!
#
# Mattermost
#
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddStoreColumnToUploads < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :uploads, :store, :integer
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddUploaderIndexToUploads < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_concurrent_index :uploads, :path
add_concurrent_index :uploads, [:uploader, :path], using: :btree
end
def down
remove_concurrent_index :uploads, [:uploader, :path]
add_concurrent_index :uploads, :path, using: :btree
end
end
......@@ -2252,11 +2252,12 @@ ActiveRecord::Schema.define(version: 20180201101405) do
t.string "model_type"
t.string "uploader", null: false
t.datetime "created_at", null: false
t.integer "store"
end
add_index "uploads", ["checksum"], name: "index_uploads_on_checksum", using: :btree
add_index "uploads", ["model_id", "model_type"], name: "index_uploads_on_model_id_and_model_type", using: :btree
add_index "uploads", ["path"], name: "index_uploads_on_path", using: :btree
add_index "uploads", ["uploader", "path"], name: "index_uploads_on_uploader_and_path", using: :btree
create_table "user_agent_details", force: :cascade do |t|
t.string "user_agent", null: false
......
......@@ -14,8 +14,8 @@ There are many places where file uploading is used, according to contexts:
- User snippet attachments
* Project
- Project avatars
- Issues/MR Markdown attachments
- Issues/MR Legacy Markdown attachments
- Issues/MR/Notes Markdown attachments
- Issues/MR/Notes Legacy Markdown attachments
- CI Build Artifacts
- LFS Objects
......@@ -25,7 +25,7 @@ There are many places where file uploading is used, according to contexts:
GitLab started saving everything on local disk. While directory location changed from previous versions,
they are still not 100% standardized. You can see them below:
| Description | In DB? | Relative path | Uploader class | model_type |
| Description | In DB? | Relative path (from CarrierWave.root) | Uploader class | model_type |
| ------------------------------------- | ------ | ----------------------------------------------------------- | ---------------------- | ---------- |
| Instance logo | yes | uploads/-/system/appearance/logo/:id/:filename | `AttachmentUploader` | Appearance |
| Header logo | yes | uploads/-/system/appearance/header_logo/:id/:filename | `AttachmentUploader` | Appearance |
......@@ -33,17 +33,107 @@ they are still not 100% standardized. You can see them below:
| User avatars | yes | uploads/-/system/user/avatar/:id/:filename | `AvatarUploader` | User |
| User snippet attachments | yes | uploads/-/system/personal_snippet/:id/:random_hex/:filename | `PersonalFileUploader` | Snippet |
| Project avatars | yes | uploads/-/system/project/avatar/:id/:filename | `AvatarUploader` | Project |
| Issues/MR Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
| Issues/MR Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
| Issues/MR/Notes Markdown attachments | yes | uploads/:project_path_with_namespace/:random_hex/:filename | `FileUploader` | Project |
| Issues/MR/Notes Legacy Markdown attachments | no | uploads/-/system/note/attachment/:id/:filename | `AttachmentUploader` | Note |
| CI Artifacts (CE) | yes | shared/artifacts/:year_:month/:project_id/:id | `ArtifactUploader` | Ci::Build |
| LFS Objects (CE) | yes | shared/lfs-objects/:hex/:hex/:object_hash | `LfsObjectUploader` | LfsObject |
CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the `GitlabUploader`
while in EE they inherit the `ObjectStoreUploader` and store files in and S3 API compatible object store.
while in EE they inherit the `ObjectStorage` and store files in and S3 API compatible object store.
In the case of Issues/MR Markdown attachments, there is a different approach using the [Hashed Storage] layout,
In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the [Hashed Storage] layout,
instead of basing the path into a mutable variable `:project_path_with_namespace`, it's possible to use the
hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).
### Path segments
Files are stored at multiple locations and use different path schemes.
All the `GitlabUploader` derived classes should comply with this path segment schema:
```
| GitlabUploader
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `<gitlab_root>/public/` | `uploads/-/system/` | `user/avatar/:id/` | `:filename` |
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | `CarrierWave::Uploader#store_dir` | |
| FileUploader
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `<gitlab_root>/shared/` | `artifacts/` | `:year_:month/:id` | `:filename` |
| `<gitlab_root>/shared/` | `snippets/` | `:secret/` | `:filename` |
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `CarrierWave.root` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | `CarrierWave::Uploader#store_dir` | |
| | | `FileUploader#upload_path |
| ObjectStore::Concern (store = remote)
| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
| `<bucket_name>` | <ignored> | `user/avatar/:id/` | `:filename` |
| ----------------------- + ------------------------- + ----------------------------------- + -------------------------------- |
| `#fog_dir` | `GitlabUploader.base_dir` | `GitlabUploader#dynamic_segment` | `CarrierWave::Uploader#filename` |
| | | `ObjectStorage::Concern#store_dir` | |
| | | `ObjectStorage::Concern#upload_path |
```
The `RecordsUploads::Concern` concern will create an `Upload` entry for every file stored by a `GitlabUploader` persisting the dynamic parts of the path using
`GitlabUploader#dynamic_path`. You may then use the `Upload#build_uploader` method to manipulate the file.
## Object Storage
By including the `ObjectStorage::Concern` in the `GitlabUploader` derived class, you may enable the object storage for this uploader. To enable the object storage
in your uploader, you need to either 1) include `RecordsUpload::Concern` and prepend `ObjectStorage::Extension::RecordsUploads` or 2) mount the uploader and create a new field named `<mount>_store`.
The `CarrierWave::Uploader#store_dir` is overriden to
- `GitlabUploader.base_dir` + `GitlabUploader.dynamic_segment` when the store is LOCAL
- `GitlabUploader.dynamic_segment` when the store is REMOTE (the bucket name is used to namespace)
### Using `ObjectStorage::Extension::RecordsUploads`
> Note: this concern will automatically include `RecordsUploads::Concern` if not already included.
The `ObjectStorage::Concern` uploader will search for the matching `Upload` to select the correct object store. The `Upload` is mapped using `#store_dirs + identifier` for each store (LOCAL/REMOTE).
```ruby
class SongUploader < GitlabUploader
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
...
end
class Thing < ActiveRecord::Base
mount :theme, SongUploader # we have a great theme song!
...
end
```
### Using a mounted uploader
The `ObjectStorage::Concern` will query the `model.<mount>_store` attribute to select the correct object store.
This column must be present in the model schema.
```ruby
class SongUploader < GitlabUploader
include ObjectStorage::Concern
...
end
class Thing < ActiveRecord::Base
attr_reader :theme_store # this is an ActiveRecord attribute
mount :theme, SongUploader # we have a great theme song!
def theme_store
super || ObjectStorage::Store::LOCAL
end
...
end
```
[CarrierWave]: https://github.com/carrierwaveuploader/carrierwave
[Hashed Storage]: ../administration/repository_storage_types.md
......@@ -9,11 +9,11 @@ module EE
prepended do
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::LOCAL_STORE]) }
scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
end
def local_store?
[nil, JobArtifactUploader::LOCAL_STORE].include?(self.file_store)
[nil, JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
private
......
......@@ -11,7 +11,7 @@ module EE
end
def local_store?
[nil, LfsObjectUploader::LOCAL_STORE].include?(self.file_store)
[nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
end
private
......
......@@ -4,7 +4,7 @@ module Geo
class JobArtifact < ::Geo::BaseFdw
self.table_name = Gitlab::Geo.fdw_table('ci_job_artifacts')
scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::LOCAL_STORE]) }
scope :with_files_stored_locally, -> { where(file_store: [nil, JobArtifactUploader::Store::LOCAL]) }
end
end
end
......
......@@ -3,7 +3,7 @@ module Geo
class LfsObject < ::Geo::BaseFdw
self.table_name = Gitlab::Geo.fdw_table('lfs_objects')
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::LOCAL_STORE]) }
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
end
end
end
......@@ -34,7 +34,7 @@ module Geo
#
# @return base directory where all uploads for the project are stored
def base_dir
@base_dir ||= File.join(CarrierWave.root, FileUploader.base_dir, old_full_path)
@base_dir ||= File.join(FileUploader.root, old_full_path)
end
private
......
......@@ -21,8 +21,8 @@ module Geo
end
def execute
origin = File.join(CarrierWave.root, FileUploader.base_dir, old_attachments_path)
target = File.join(CarrierWave.root, FileUploader.base_dir, new_attachments_path)
origin = File.join(FileUploader.root, old_attachments_path)
target = File.join(FileUploader.root, new_attachments_path)
move_folder!(origin, target)
end
......
......@@ -24,7 +24,7 @@ module Geo
end
def local_store_path
Pathname.new(JobArtifactUploader.local_store_path)
Pathname.new(JobArtifactUploader.root)
end
def relative_file_path
......
......@@ -25,7 +25,7 @@ module Geo
end
def local_store_path
Pathname.new(LfsObjectUploader.local_store_path)
Pathname.new(LfsObjectUploader.root)
end
def relative_file_path
......
require 'fog/aws'
require 'carrierwave/storage/fog'
#
# This concern should add object storage support
# to the GitlabUploader class
#
module ObjectStorage
RemoteStoreError = Class.new(StandardError)
UnknownStoreError = Class.new(StandardError)
ObjectStoreUnavailable = Class.new(StandardError)
module Store
LOCAL = 1
REMOTE = 2
end
module Extension
# this extension is the glue between the ObjectStorage::Concern and RecordsUploads::Concern
module RecordsUploads
extend ActiveSupport::Concern
prepended do |base|
raise ObjectStoreUnavailable, "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
base.include(::RecordsUploads::Concern)
end
def retrieve_from_store!(identifier)
paths = store_dirs.map { |store, path| File.join(path, identifier) }
unless current_upload_satisfies?(paths, model)
# the upload we already have isn't right, find the correct one
self.upload = uploads.find_by(model: model, path: paths)
end
super
end
def build_upload_from_uploader(uploader)
super.tap { |upload| upload.store = object_store }
end
def upload=(upload)
return unless upload
self.object_store = upload.store
super
end
private
def current_upload_satisfies?(paths, model)
return false unless upload
return false unless model
paths.include?(upload.path) &&
upload.model_id == model.id &&
upload.model_type == model.class.base_class.sti_name
end
end
end
module Concern
extend ActiveSupport::Concern
included do |base|
base.include(ObjectStorage)
before :store, :verify_license!
after :migrate, :delete_migrated_file
end
class_methods do
def object_store_options
options.object_store
end
def object_store_enabled?
object_store_options.enabled
end
def background_upload_enabled?
object_store_options.background_upload
end
def object_store_credentials
object_store_options.connection.to_hash.deep_symbolize_keys
end
def remote_store_path
object_store_options.remote_directory
end
def licensed?
License.feature_available?(:object_storage)
end
end
def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
def object_store
@object_store ||= model.try(store_serialization_column) || Store::LOCAL
end
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def object_store=(value)
@object_store = value || Store::LOCAL
@storage = storage_for(object_store)
end
# rubocop:enable Gitlab/ModuleWithInstanceVariables
# Return true if the current file is part or the model (i.e. is mounted in the model)
#
def persist_object_store?
model.respond_to?(:"#{store_serialization_column}=")
end
# Save the current @object_store to the model <mounted_as>_store column
def persist_object_store!
return unless persist_object_store?
updated = model.update_column(store_serialization_column, object_store)
raise ActiveRecordError unless updated
end
def use_file
if file_storage?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
def filename
super || file&.filename
end
#
# Move the file to another store
#
# new_store: Enum (Store::LOCAL, Store::REMOTE)
#
def migrate!(new_store)
return unless object_store != new_store
return unless file
new_file = nil
file_to_delete = file
from_object_store = object_store
self.object_store = new_store # changes the storage and file
cache_stored_file! if file_storage?
with_callbacks(:migrate, file_to_delete) do
with_callbacks(:store, file_to_delete) do # for #store_versions!
new_file = storage.store!(file)
persist_object_store!
self.file = new_file
end
end
file
rescue => e
# in case of failure delete new file
new_file.delete unless new_file.nil?
# revert back to the old file
self.object_store = from_object_store
self.file = file_to_delete
raise e
end
def schedule_migration_to_object_storage(*args)
return unless self.class.object_store_enabled?
return unless self.class.background_upload_enabled?
return unless self.class.licensed?
return unless self.file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
end
def fog_directory
self.class.remote_store_path
end
def fog_credentials
self.class.object_store_credentials
end
def fog_public
false
end
def delete_migrated_file(migrated_file)
migrated_file.delete if exists?
end
def verify_license!(_file)
return if file_storage?
raise 'Object Storage feature is missing' unless self.class.licensed?
end
def exists?
file.present?
end
def store_dir(store = nil)
store_dirs[store || object_store]
end
def store_dirs
{
Store::LOCAL => File.join(base_dir, dynamic_segment),
Store::REMOTE => File.join(dynamic_segment)
}
end
private
# this is a hack around CarrierWave. The #migrate method needs to be
# able to force the current file to the migrated file upon success.
def file=(file)
@file = file # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
def serialization_column
model.class.uploader_options.dig(mounted_as, :mount_on) || mounted_as
end
# Returns the column where the 'store' is saved
# defaults to 'store'
def store_serialization_column
[serialization_column, 'store'].compact.join('_').to_sym
end
def storage
@storage ||= storage_for(object_store)
end
def storage_for(store)
case store
when Store::REMOTE
raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
when Store::LOCAL
CarrierWave::Storage::File.new(self)
else
raise UnknownStoreError
end
end
end
end
require 'fog/aws'
require 'carrierwave/storage/fog'
class ObjectStoreUploader < CarrierWave::Uploader::Base
before :store, :set_default_local_store
before :store, :verify_license!
LOCAL_STORE = 1
REMOTE_STORE = 2
class << self
def storage_options(options) # rubocop:disable Style/TrivialAccessors
@storage_options = options
end
def object_store_options
@storage_options&.object_store
end
def object_store_enabled?
object_store_options&.enabled
end
def background_upload_enabled?
object_store_options&.background_upload
end
def object_store_credentials
@object_store_credentials ||= object_store_options&.connection&.to_hash&.deep_symbolize_keys
end
def object_store_directory
object_store_options&.remote_directory
end
def local_store_path
raise NotImplementedError
end
end
def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
def real_object_store
model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
end
def object_store
real_object_store || LOCAL_STORE
end
def object_store=(value)
@storage = nil
model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
end
def store_dir
if file_storage?
default_local_path
else
default_path
end
end
def use_file
if file_storage?
return yield path
end
begin
cache_stored_file!
yield cache_path
ensure
cache_storage.delete_dir!(cache_path(nil))
end
end
def filename
super || file&.filename
end
def migrate!(new_store)
raise 'Undefined new store' unless new_store
return unless object_store != new_store
return unless file
old_file = file
old_store = object_store
# for moving remote file we need to first store it locally
cache_stored_file! unless file_storage?
# change storage
self.object_store = new_store
with_callbacks(:store, file) do
storage.store!(file).tap do |new_file|
# since we change storage store the new storage
# in case of failure delete new file
begin
model.save!
rescue => e
new_file.delete
self.object_store = old_store
raise e
end
old_file.delete
end
end
end
def schedule_migration_to_object_storage(*args)
return unless self.class.object_store_enabled?
return unless self.class.background_upload_enabled?
return unless self.licensed?
return unless self.file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
end
def fog_directory
self.class.object_store_directory
end
def fog_credentials
self.class.object_store_credentials
end
def fog_public
false
end
def move_to_store
return true if object_store == LOCAL_STORE
file.try(:storage) == storage
end
def move_to_cache
return true if object_store == LOCAL_STORE
file.try(:storage) == cache_storage
end
# We block storing artifacts on Object Storage, not receiving
def verify_license!(new_file)
return if file_storage?
raise 'Object Storage feature is missing' unless licensed?
end
def exists?
file.present?
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
# Override this if you don't want to save local files by default to the Rails.root directory
def work_dir
# Default path set by CarrierWave:
# https://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L182
# CarrierWave.tmp_path
File.join(self.class.local_store_path, 'tmp/work')
end
def licensed?
License.feature_available?(:object_storage)
end
private
def set_default_local_store(new_file)
self.object_store = LOCAL_STORE unless self.real_object_store
end
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path
raise NotImplementedError
end
def serialization_column
model.class.uploader_option(mounted_as, :mount_on) || mounted_as
end
def store_serialization_column
:"#{serialization_column}_store"
end
def storage
@storage ||=
if object_store == REMOTE_STORE
remote_storage
else
local_storage
end
end
def remote_storage
raise 'Object Storage is not enabled' unless self.class.object_store_enabled?
CarrierWave::Storage::Fog.new(self)
end
def local_storage
CarrierWave::Storage::File.new(self)
end
# To prevent files in local storage from moving across filesystems, override
# the default implementation:
# http://github.com/carrierwaveuploader/carrierwave/blob/v1.1.0/lib/carrierwave/uploader/cache.rb#L181-L183
def workfile_path(for_file = original_filename)
# To be safe, keep this directory outside of the the cache directory
# because calling CarrierWave.clean_cache_files! will remove any files in
# the cache directory.
File.join(work_dir, @cache_id, version_name.to_s, for_file)
end
end
......@@ -7,16 +7,16 @@ class ObjectStorageUploadWorker
uploader_class = uploader_class_name.constantize
subject_class = subject_class_name.constantize
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find_by(id: subject_id)
return unless subject
file = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
return unless file.licensed?
file.migrate!(uploader_class::REMOTE_STORE)
subject = subject_class.find(subject_id)
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
end
......@@ -2,12 +2,12 @@ module Gitlab
module Geo
class FileTransfer < Transfer
def initialize(file_type, upload)
uploader = upload.uploader.constantize
@file_type = file_type
@file_id = upload.id
@filename = uploader.absolute_path(upload)
@filename = upload.absolute_path
@request_data = build_request_data(upload)
rescue ObjectStorage::RemoteStoreError
Rails.logger.warn "Cannot transfer a remote object."
end
private
......
......@@ -201,7 +201,7 @@ module Gitlab
end
def handle_lfs_object_deleted_event(event, created_at)
file_path = File.join(LfsObjectUploader.local_store_path, event.file_path)
file_path = File.join(LfsObjectUploader.root, event.file_path)
job_id = ::Geo::FileRemovalWorker.perform_async(file_path)
......@@ -220,7 +220,7 @@ module Gitlab
file_registry_job_artifacts = ::Geo::FileRegistry.job_artifacts.where(file_id: event.job_artifact_id)
return unless file_registry_job_artifacts.any? # avoid race condition
file_path = File.join(::JobArtifactUploader.local_store_path, event.file_path)
file_path = File.join(::JobArtifactUploader.root, event.file_path)
if File.file?(file_path)
deleted = delete_file(file_path) # delete synchronously to ensure consistency
......
......@@ -12,8 +12,8 @@ namespace :gitlab do
.with_artifacts_stored_locally
.find_each(batch_size: 10) do |build|
begin
build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE)
build.artifacts_file.migrate!(ObjectStorage::Store::REMOTE)
build.artifacts_metadata.migrate!(ObjectStorage::Store::REMOTE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e
......
......@@ -10,7 +10,7 @@ namespace :gitlab do
LfsObject.with_files_stored_locally
.find_each(batch_size: 10) do |lfs_object|
begin
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
logger.info("Transferred LFS object #{lfs_object.oid} of size #{lfs_object.size.to_i.bytes} to object storage")
rescue => e
......
......@@ -215,9 +215,9 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = JobArtifactUploader.artifacts_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
workhorse_upload_path = JobArtifactUploader.workhorse_upload_path
artifacts = uploaded_file(:file, workhorse_upload_path)
metadata = uploaded_file(:metadata, workhorse_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
......
......@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
super('artifacts', LegacyArtifactUploader.local_store_path)
super('artifacts', JobArtifactUploader.root)
end
def create_files_dir
......
......@@ -143,7 +143,7 @@ module Gitlab
end
def absolute_path
File.join(CarrierWave.root, path)
File.join(Gitlab.config.uploads.storage_path, path)
end
end
......
......@@ -11,9 +11,12 @@ module Gitlab
FIND_BATCH_SIZE = 500
RELATIVE_UPLOAD_DIR = "uploads".freeze
ABSOLUTE_UPLOAD_DIR = "#{CarrierWave.root}/#{RELATIVE_UPLOAD_DIR}".freeze
ABSOLUTE_UPLOAD_DIR = File.join(
Gitlab.config.uploads.storage_path,
RELATIVE_UPLOAD_DIR
)
FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze
START_WITH_CARRIERWAVE_ROOT_REGEX = %r{\A#{CarrierWave.root}/}
START_WITH_ROOT_REGEX = %r{\A#{Gitlab.config.uploads.storage_path}/}
EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze
EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze
......@@ -81,7 +84,7 @@ module Gitlab
paths = []
stdout.each_line("\0") do |line|
paths << line.chomp("\0").sub(START_WITH_CARRIERWAVE_ROOT_REGEX, '')
paths << line.chomp("\0").sub(START_WITH_ROOT_REGEX, '')
if paths.size >= batch_size
yield(paths)
......
......@@ -27,7 +27,7 @@ module Gitlab
with_link_in_tmp_dir(file.file) do |open_tmp_file|
new_uploader.store!(open_tmp_file)
end
new_uploader.to_markdown
new_uploader.markdown_link
end
end
......
......@@ -17,15 +17,13 @@ module Gitlab
false
end
private
def uploads_path
FileUploader.absolute_base_dir(@project)
end
def uploads_export_path
File.join(@shared.export_path, 'uploads')
end
def uploads_path
FileUploader.dynamic_path_segment(@project)
end
end
end
end
module Gitlab
class UploadsTransfer < ProjectTransfer
def root_dir
File.join(CarrierWave.root, FileUploader.base_dir)
FileUploader.root
end
end
end
......@@ -55,14 +55,14 @@ module Gitlab
def lfs_upload_ok(oid, size)
{
StoreLFSPath: "#{Gitlab.config.lfs.storage_path}/tmp/upload",
StoreLFSPath: LfsObjectUploader.workhorse_upload_path,
LfsOid: oid,
LfsSize: size
}
end
def artifact_upload_ok
{ TempPath: JobArtifactUploader.artifacts_upload_path }
{ TempPath: JobArtifactUploader.workhorse_upload_path }
end
def send_git_blob(repository, blob)
......
......@@ -6,5 +6,7 @@ describe Groups::UploadsController do
{ group_id: model }
end
it_behaves_like 'handle uploads'
it_behaves_like 'handle uploads' do
let(:uploader_class) { NamespaceFileUploader }
end
end
......@@ -145,8 +145,8 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::LOCAL_STORE }
let(:archive_path) { JobArtifactUploader.local_store_path }
let(:store) { ObjectStorage::Store::LOCAL }
let(:archive_path) { JobArtifactUploader.root }
end
end
......@@ -159,7 +159,7 @@ describe Projects::ArtifactsController do
it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
let(:store) { ObjectStorage::Store::REMOTE }
let(:archive_path) { 'https://' }
end
end
......
......@@ -47,7 +47,7 @@ describe Projects::RawController do
end
it 'serves the file' do
expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
......@@ -58,7 +58,7 @@ describe Projects::RawController do
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect to file' do
......
......@@ -180,6 +180,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
response
end
end
......@@ -196,6 +197,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'user', mounted_as: 'avatar', id: user.id, filename: 'image.png'
response
end
end
......@@ -220,6 +222,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
response
end
end
......@@ -239,6 +242,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
response
end
end
......@@ -291,6 +295,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'project', mounted_as: 'avatar', id: project.id, filename: 'image.png'
response
end
end
......@@ -322,6 +327,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
response
end
end
......@@ -341,6 +347,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
response
end
end
......@@ -384,6 +391,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'group', mounted_as: 'avatar', id: group.id, filename: 'image.png'
response
end
end
......@@ -420,6 +428,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
response
end
end
......@@ -439,6 +448,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
response
end
end
......@@ -491,6 +501,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'note', mounted_as: 'attachment', id: note.id, filename: 'image.png'
response
end
end
......@@ -522,6 +533,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png'
response
end
end
......@@ -541,6 +553,7 @@ describe UploadsController do
it_behaves_like 'content not cached without revalidation' do
subject do
get :show, model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png'
response
end
end
......
......@@ -16,7 +16,7 @@ describe Geo::AttachmentRegistryFinder, :geo do
let!(:upload_3) { create(:upload, :issuable_upload, model: synced_project) }
let!(:upload_4) { create(:upload, model: unsynced_project) }
let(:upload_5) { create(:upload, model: synced_project) }
let(:upload_6) { create(:upload, :personal_snippet) }
let(:upload_6) { create(:upload, :personal_snippet_upload) }
let(:upload_7) { create(:upload, model: synced_subgroup) }
let(:lfs_object) { create(:lfs_object) }
......
......@@ -11,7 +11,7 @@ describe Gitlab::Geo::FileTransfer do
it 'sets an absolute path' do
expect(subject.file_type).to eq(:file)
expect(subject.file_id).to eq(upload.id)
expect(subject.filename).to eq(AvatarUploader.absolute_path(upload))
expect(subject.filename).to eq(upload.absolute_path)
expect(Pathname.new(subject.filename).absolute?).to be_truthy
expect(subject.request_data).to eq({ id: upload.model_id,
type: 'User',
......
......@@ -291,8 +291,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared
end
it 'schedules a Geo::FileRemovalWorker' do
file_path = File.join(LfsObjectUploader.local_store_path,
lfs_object_deleted_event.file_path)
file_path = File.join(LfsObjectUploader.root, lfs_object_deleted_event.file_path)
expect(::Geo::FileRemovalWorker).to receive(:perform_async)
.with(file_path)
......
......@@ -8,14 +8,14 @@ describe LfsObject do
expect(subject.local_store?).to eq true
end
it 'returns true when file_store is equal to LfsObjectUploader::LOCAL_STORE' do
subject.file_store = LfsObjectUploader::LOCAL_STORE
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
it 'returns false whe file_store is equal to LfsObjectUploader::REMOTE_STORE' do
subject.file_store = LfsObjectUploader::REMOTE_STORE
it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
......
......@@ -15,7 +15,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
context 'on success' do
before do
TestEnv.clean_test_path
FileUtils.mkdir_p(FileUploader.dynamic_path_builder(old_attachments_path))
FileUtils.mkdir_p(File.join(FileUploader.root, old_attachments_path))
end
it 'returns true' do
......
......@@ -98,7 +98,7 @@ describe Geo::FileDownloadService do
end
context 'with a snippet' do
let(:upload) { create(:upload, :personal_snippet) }
let(:upload) { create(:upload, :personal_snippet_upload) }
subject(:execute!) { described_class.new(:personal_file, upload.id).execute }
......
......@@ -9,7 +9,7 @@ describe Geo::FilesExpireService, :geo, :delete do
describe '#execute' do
let(:file_uploader) { build(:file_uploader, project: project) }
let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let!(:file_registry) { create(:geo_file_registry, file_id: upload.id) }
before do
......
require 'spec_helper'
def base_path(storage)
File.join(FileUploader.root, storage.disk_path)
end
describe Geo::HashedStorageAttachmentsMigrationService do
let!(:project) { create(:project) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
subject(:service) { described_class.new(project.id, old_attachments_path: legacy_storage.disk_path, new_attachments_path: hashed_storage.disk_path) }
subject(:service) do
described_class.new(project.id,
old_attachments_path: legacy_storage.disk_path,
new_attachments_path: hashed_storage.disk_path)
end
describe '#execute' do
context 'when succeeds' do
......@@ -72,8 +80,4 @@ describe Geo::HashedStorageAttachmentsMigrationService do
expect(service.async_execute).to eq('foo')
end
end
def base_path(storage)
File.join(CarrierWave.root, FileUploader.base_dir, storage.disk_path)
end
end
......@@ -52,7 +52,7 @@ describe Geo::FileDownloadDispatchWorker, :geo do
before do
stub_lfs_object_storage
lfs_object_remote_store.file.migrate!(LfsObjectUploader::REMOTE_STORE)
lfs_object_remote_store.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'filters S3-backed files' do
......@@ -118,7 +118,7 @@ describe Geo::FileDownloadDispatchWorker, :geo do
create_list(:lfs_object, 2, :with_file)
create_list(:user, 2, avatar: avatar)
create_list(:note, 2, :with_attachment)
create_list(:upload, 1, :personal_snippet)
create_list(:upload, 1, :personal_snippet_upload)
create_list(:ci_job_artifact, 1)
create(:appearance, logo: avatar, header_logo: avatar)
......
require 'spec_helper'
describe ObjectStorageUploadWorker do
let(:local) { ObjectStoreUploader::LOCAL_STORE }
let(:remote) { ObjectStoreUploader::REMOTE_STORE }
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
def perform
described_class.perform_async(uploader_class.name, subject_class, file_field, subject_id)
......
......@@ -6,7 +6,7 @@ FactoryBot.define do
file_type :archive
trait :remote_store do
file_store JobArtifactUploader::REMOTE_STORE
file_store JobArtifactUploader::Store::REMOTE
end
after :build do |artifact|
......
......@@ -100,7 +100,7 @@ FactoryBot.define do
lfs_object { create(:lfs_object, :with_file) }
after(:build, :stub) do |event, _|
local_store_path = Pathname.new(LfsObjectUploader.local_store_path)
local_store_path = Pathname.new(LfsObjectUploader.root)
relative_path = Pathname.new(event.lfs_object.file.path).relative_path_from(local_store_path)
event.oid = event.lfs_object.oid
......@@ -112,7 +112,7 @@ FactoryBot.define do
job_artifact { create(:ci_job_artifact, :archive) }
after(:build, :stub) do |event, _|
local_store_path = Pathname.new(JobArtifactUploader.local_store_path)
local_store_path = Pathname.new(JobArtifactUploader.root)
relative_path = Pathname.new(event.job_artifact.file.path).relative_path_from(local_store_path)
event.file_path = relative_path
......
......@@ -19,7 +19,7 @@ FactoryBot.define do
end
trait :with_avatar do
avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
factory :group_with_members do
......
......@@ -122,11 +122,11 @@ FactoryBot.define do
end
trait :with_attachment do
attachment { fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png") }
attachment { fixture_file_upload(Rails.root.join( "spec/fixtures/dk.png"), "image/png") }
end
trait :with_svg_attachment do
attachment { fixture_file_upload(Rails.root + "spec/fixtures/unsanitized.svg", "image/svg+xml") }
attachment { fixture_file_upload(Rails.root.join("spec/fixtures/unsanitized.svg"), "image/svg+xml") }
end
transient do
......
......@@ -122,7 +122,7 @@ FactoryBot.define do
end
trait :with_avatar do
avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :broken_storage do
......
FactoryBot.define do
factory :upload do
model { build(:project) }
path { "uploads/-/system/project/avatar/avatar.jpg" }
size 100.kilobytes
uploader "AvatarUploader"
store ObjectStorage::Store::LOCAL
trait :personal_snippet do
# we should build a mount agnostic upload by default
transient do
mounted_as :avatar
secret SecureRandom.hex
end
# this needs to comply with RecordsUpload::Concern#upload_path
path { File.join("uploads/-/system", model.class.to_s.underscore, mounted_as.to_s, 'avatar.jpg') }
trait :personal_snippet_upload do
model { build(:personal_snippet) }
path { File.join(secret, 'myfile.jpg') }
uploader "PersonalFileUploader"
end
trait :issuable_upload do
path { "#{SecureRandom.hex}/myfile.jpg" }
path { File.join(secret, 'myfile.jpg') }
uploader "FileUploader"
end
trait :namespace_upload do
path { "#{SecureRandom.hex}/myfile.jpg" }
model { build(:group) }
path { File.join(secret, 'myfile.jpg') }
uploader "NamespaceFileUploader"
end
trait :attachment_upload do
transient do
mounted_as :attachment
end
model { build(:note) }
uploader "AttachmentUploader"
end
end
end
......@@ -42,7 +42,7 @@ FactoryBot.define do
end
trait :with_avatar do
avatar { File.open(Rails.root.join('spec/fixtures/dk.png')) }
avatar { fixture_file_upload('spec/fixtures/dk.png') }
end
trait :two_factor_via_otp do
......
......@@ -23,6 +23,27 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
# E.g. The installation is in use at the time of migration, and someone has
# just uploaded a file
shared_examples 'does not add files in /uploads/tmp' do
let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
before do
FileUtils.mkdir(File.dirname(tmp_file))
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm(tmp_file)
end
it 'does not add files from /uploads/tmp' do
described_class.new.perform
expect(untracked_files_for_uploads.count).to eq(5)
end
end
it 'ensures the untracked_files_for_uploads table exists' do
expect do
described_class.new.perform
......@@ -109,24 +130,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
# E.g. The installation is in use at the time of migration, and someone has
# just uploaded a file
context 'when there are files in /uploads/tmp' do
let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm(tmp_file)
end
it 'does not add files from /uploads/tmp' do
described_class.new.perform
expect(untracked_files_for_uploads.count).to eq(5)
end
it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
......@@ -197,24 +202,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
# E.g. The installation is in use at the time of migration, and someone has
# just uploaded a file
context 'when there are files in /uploads/tmp' do
let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm(tmp_file)
end
it 'does not add files from /uploads/tmp' do
described_class.new.perform
expect(untracked_files_for_uploads.count).to eq(5)
end
it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
......
......@@ -17,7 +17,7 @@ describe Gitlab::Gfm::UploadsRewriter do
end
let(:text) do
"Text and #{image_uploader.to_markdown} and #{zip_uploader.to_markdown}"
"Text and #{image_uploader.markdown_link} and #{zip_uploader.markdown_link}"
end
describe '#rewrite' do
......
......@@ -4,7 +4,6 @@ describe Gitlab::ImportExport::UploadsRestorer do
describe 'bundle a project Git repo' do
let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
let(:uploads_path) { FileUploader.dynamic_path_segment(project) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
......@@ -26,9 +25,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
restorer.restore
subject.restore
uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
......@@ -44,9 +43,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
restorer.restore
subject.restore
uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
......
......@@ -30,7 +30,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
......@@ -52,7 +52,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
......
......@@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy!
end
after do
Upload.reset_column_information
end
it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2)
......
......@@ -204,7 +204,7 @@ describe Namespace do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
let(:uploads_dir) { FileUploader.root }
let(:pages_dir) { File.join(TestEnv.pages_path) }
before do
......
......@@ -45,63 +45,6 @@ describe Upload do
end
end
describe '.remove_path' do
it 'removes all records at the given path' do
described_class.create!(
size: File.size(__FILE__),
path: __FILE__,
model: build_stubbed(:user),
uploader: 'AvatarUploader'
)
expect { described_class.remove_path(__FILE__) }
.to change { described_class.count }.from(1).to(0)
end
end
describe '.record' do
let(:fake_uploader) do
double(
file: double(size: 12_345),
relative_path: 'foo/bar.jpg',
model: build_stubbed(:user),
class: 'AvatarUploader'
)
end
it 'removes existing paths before creation' do
expect(described_class).to receive(:remove_path)
.with(fake_uploader.relative_path)
described_class.record(fake_uploader)
end
it 'creates a new record and assigns size, path, model, and uploader' do
upload = described_class.record(fake_uploader)
aggregate_failures do
expect(upload).to be_persisted
expect(upload.size).to eq fake_uploader.file.size
expect(upload.path).to eq fake_uploader.relative_path
expect(upload.model_id).to eq fake_uploader.model.id
expect(upload.model_type).to eq fake_uploader.model.class.to_s
expect(upload.uploader).to eq fake_uploader.class
end
end
end
describe '.hexdigest' do
it 'calculates the SHA256 sum' do
expected = Digest::SHA256.file(__FILE__).hexdigest
expect(described_class.hexdigest(__FILE__)).to eq expected
end
it 'returns nil for a non-existant file' do
expect(described_class.hexdigest("#{__FILE__}-nope")).to be_nil
end
end
describe '#absolute_path' do
it 'returns the path directly when already absolute' do
path = '/path/to/namespace/project/secret/file.jpg'
......@@ -123,27 +66,27 @@ describe Upload do
end
end
describe '#calculate_checksum' do
it 'calculates the SHA256 sum' do
upload = described_class.new(
path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
)
describe '#calculate_checksum!' do
let(:upload) do
described_class.new(path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD - 1.megabyte)
end
it 'sets `checksum` to SHA256 sum of the file' do
expected = Digest::SHA256.file(__FILE__).hexdigest
expect { upload.calculate_checksum }
expect { upload.calculate_checksum! }
.to change { upload.checksum }.from(nil).to(expected)
end
it 'returns nil for a non-existant file' do
upload = described_class.new(
path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD - 1.megabyte
)
it 'sets `checksum` to nil for a non-existant file' do
expect(upload).to receive(:exist?).and_return(false)
expect(upload.calculate_checksum).to be_nil
checksum = Digest::SHA256.file(__FILE__).hexdigest
upload.checksum = checksum
expect { upload.calculate_checksum! }
.to change { upload.checksum }.from(checksum).to(nil)
end
end
......
......@@ -948,7 +948,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
end
context 'when job has been erased' do
......@@ -1125,7 +1125,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
end
after do
......@@ -1155,7 +1155,7 @@ describe API::Runner do
context 'when job has artifacts' do
let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::LOCAL_STORE }
let(:store) { JobArtifactUploader::Store::LOCAL }
before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
......@@ -1177,7 +1177,7 @@ describe API::Runner do
end
context 'when artifacts are stored remotely' do
let(:store) { JobArtifactUploader::REMOTE_STORE }
let(:store) { JobArtifactUploader::Store::REMOTE }
let!(:job) { create(:ci_build) }
it 'download artifacts' do
......
......@@ -245,7 +245,7 @@ describe 'Git LFS API and storage' do
context 'when LFS uses object storage' do
let(:before_get) do
stub_lfs_object_storage
lfs_object.file.migrate!(LfsObjectUploader::REMOTE_STORE)
lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
end
it 'responds with redirect' do
......@@ -1003,7 +1003,7 @@ describe 'Git LFS API and storage' do
end
it 'responds with status 200, location of lfs store and object details' do
expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
......@@ -1160,7 +1160,7 @@ describe 'Git LFS API and storage' do
end
it 'with location of lfs store and object details' do
expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
......@@ -1274,7 +1274,7 @@ describe 'Git LFS API and storage' do
end
def setup_tempfile(lfs_tmp)
upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
upload_path = LfsObjectUploader.workhorse_upload_path
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
......
......@@ -250,7 +250,7 @@ describe Issues::MoveService do
context 'issue description with uploads' do
let(:uploader) { build(:file_uploader, project: old_project) }
let(:description) { "Text and #{uploader.to_markdown}" }
let(:description) { "Text and #{uploader.markdown_link}" }
include_context 'issue move executed'
......
......@@ -6,7 +6,7 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
let!(:upload) { Upload.find_by(path: file_uploader.relative_path) }
let!(:upload) { Upload.find_by(path: file_uploader.upload_path) }
let(:file_uploader) { build(:file_uploader, project: project) }
let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
......@@ -58,6 +58,6 @@ describe Projects::HashedStorage::MigrateAttachmentsService do
end
def base_path(storage)
FileUploader.dynamic_path_builder(storage.disk_path)
File.join(FileUploader.root, storage.disk_path)
end
end
......@@ -2,6 +2,8 @@ shared_examples 'handle uploads' do
let(:user) { create(:user) }
let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
let(:secret) { FileUploader.generate_secret }
let(:uploader_class) { FileUploader }
describe "POST #create" do
context 'when a user is not authorized to upload a file' do
......@@ -65,7 +67,12 @@ shared_examples 'handle uploads' do
describe "GET #show" do
let(:show_upload) do
get :show, params.merge(secret: "123456", filename: "image.jpg")
get :show, params.merge(secret: secret, filename: "rails_sample.jpg")
end
before do
expect(FileUploader).to receive(:generate_secret).and_return(secret)
UploadService.new(model, jpg, uploader_class).execute
end
context "when the model is public" do
......@@ -75,11 +82,6 @@ shared_examples 'handle uploads' do
context "when not signed in" do
context "when the file exists" do
before do
allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
allow(jpg).to receive(:exists?).and_return(true)
end
it "responds with status 200" do
show_upload
......@@ -88,6 +90,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
before do
allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
end
it "responds with status 404" do
show_upload
......@@ -102,11 +108,6 @@ shared_examples 'handle uploads' do
end
context "when the file exists" do
before do
allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
allow(jpg).to receive(:exists?).and_return(true)
end
it "responds with status 200" do
show_upload
......@@ -115,6 +116,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
before do
allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
end
it "responds with status 404" do
show_upload
......@@ -131,11 +136,6 @@ shared_examples 'handle uploads' do
context "when not signed in" do
context "when the file exists" do
before do
allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
allow(jpg).to receive(:exists?).and_return(true)
end
context "when the file is an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
......@@ -149,6 +149,10 @@ shared_examples 'handle uploads' do
end
context "when the file is not an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
end
it "redirects to the sign in page" do
show_upload
......@@ -158,6 +162,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
before do
allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
end
it "redirects to the sign in page" do
show_upload
......@@ -177,11 +185,6 @@ shared_examples 'handle uploads' do
end
context "when the file exists" do
before do
allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
allow(jpg).to receive(:exists?).and_return(true)
end
it "responds with status 200" do
show_upload
......@@ -190,6 +193,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
before do
allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
end
it "responds with status 404" do
show_upload
......@@ -200,11 +207,6 @@ shared_examples 'handle uploads' do
context "when the user doesn't have access to the model" do
context "when the file exists" do
before do
allow_any_instance_of(FileUploader).to receive(:file).and_return(jpg)
allow(jpg).to receive(:exists?).and_return(true)
end
context "when the file is an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
......@@ -218,6 +220,10 @@ shared_examples 'handle uploads' do
end
context "when the file is not an image" do
before do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(false)
end
it "responds with status 404" do
show_upload
......@@ -227,6 +233,10 @@ shared_examples 'handle uploads' do
end
context "when the file doesn't exist" do
before do
allow_any_instance_of(FileUploader).to receive(:exists?).and_return(false)
end
it "responds with status 404" do
show_upload
......
shared_context 'with storage' do |store, **stub_params|
before do
subject.object_store = store
end
end
shared_examples "migrates" do |to_store:, from_store: nil|
let(:to) { to_store }
let(:from) { from_store || subject.object_store }
def migrate(to)
subject.migrate!(to)
end
def checksum
Digest::SHA256.hexdigest(subject.read)
end
before do
migrate(from)
end
it 'does nothing when migrating to the current store' do
expect { migrate(from) }.not_to change { subject.object_store }.from(from)
end
it 'migrate to the specified store' do
from_checksum = checksum
expect { migrate(to) }.to change { subject.object_store }.from(from).to(to)
expect(checksum).to eq(from_checksum)
end
it 'removes the original file after the migration' do
original_file = subject.file.path
migrate(to)
expect(File.exist?(original_file)).to be_falsey
end
context 'migration is unsuccessful' do
shared_examples "handles gracefully" do |error:|
it 'does not update the object_store' do
expect { migrate(to) }.to raise_error(error)
expect(subject.object_store).to eq(from)
end
it 'does not delete the original file' do
expect { migrate(to) }.to raise_error(error)
expect(subject.exists?).to be_truthy
end
end
context 'when the store is not supported' do
let(:to) { -1 } # not a valid store
include_examples "handles gracefully", error: ObjectStorage::UnknownStoreError
end
context 'upon a fog failure' do
before do
storage_class = subject.send(:storage_for, to).class
expect_any_instance_of(storage_class).to receive(:store!).and_raise("Store failure.")
end
include_examples "handles gracefully", error: "Store failure."
end
context 'upon a database failure' do
before do
expect(uploader).to receive(:persist_object_store!).and_raise("ActiveRecord failure.")
end
include_examples "handles gracefully", error: "ActiveRecord failure."
end
end
end
shared_examples "matches the method pattern" do |method|
let(:target) { subject }
let(:args) { nil }
let(:pattern) { patterns[method] }
it do
return skip "No pattern provided, skipping." unless pattern
expect(target.method(method).call(*args)).to match(pattern)
end
end
shared_examples "builds correct paths" do |**patterns|
let(:patterns) { patterns }
before do
allow(subject).to receive(:filename).and_return('<filename>')
end
describe "#store_dir" do
it_behaves_like "matches the method pattern", :store_dir
end
describe "#cache_dir" do
it_behaves_like "matches the method pattern", :cache_dir
end
describe "#work_dir" do
it_behaves_like "matches the method pattern", :work_dir
end
describe "#upload_path" do
it_behaves_like "matches the method pattern", :upload_path
end
describe ".absolute_path" do
it_behaves_like "matches the method pattern", :absolute_path do
let(:target) { subject.class }
let(:args) { [upload] }
end
end
describe ".base_dir" do
it_behaves_like "matches the method pattern", :base_dir do
let(:target) { subject.class }
end
end
end
......@@ -30,4 +30,11 @@ module StubConfiguration
remote_directory: 'lfs-objects',
**params)
end
def stub_uploads_object_storage(uploader = described_class, **params)
stub_object_storage_uploader(config: Gitlab.config.uploads.object_store,
uploader: uploader,
remote_directory: 'uploads',
**params)
end
end
......@@ -238,7 +238,7 @@ module TestEnv
end
def artifacts_path
Gitlab.config.artifacts.path
Gitlab.config.artifacts.storage_path
end
# When no cached assets exist, manually hit the root path to create them
......
module TrackUntrackedUploadsHelpers
def uploaded_file
fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg')
fixture_file_upload(fixture_path)
end
......
......@@ -18,7 +18,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
......@@ -27,8 +27,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
......@@ -38,8 +38,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
......@@ -47,8 +47,8 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::LOCAL)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
......@@ -56,13 +56,13 @@ describe 'gitlab:artifacts namespace rake task' do
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_file_store).to eq(ObjectStorage::Store::REMOTE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
......@@ -72,7 +72,7 @@ describe 'gitlab:artifacts namespace rake task' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
let(:store) { ObjectStorage::Store::LOCAL }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
......@@ -81,7 +81,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
......@@ -91,7 +91,7 @@ describe 'gitlab:artifacts namespace rake task' do
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
......@@ -99,19 +99,19 @@ describe 'gitlab:artifacts namespace rake task' do
it "fails to migrate to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
let(:store) { ObjectStorage::Store::REMOTE }
it "file stays on remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(artifact.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
end
end
end
......
......@@ -6,8 +6,8 @@ describe 'gitlab:lfs namespace rake task' do
end
describe 'migrate' do
let(:local) { ObjectStoreUploader::LOCAL_STORE }
let(:remote) { ObjectStoreUploader::REMOTE_STORE }
let(:local) { ObjectStorage::Store::LOCAL }
let(:remote) { ObjectStorage::Store::REMOTE }
let!(:lfs_object) { create(:lfs_object, :with_file, file_store: local) }
def lfs_migrate
......
require 'spec_helper'
describe AttachmentUploader do
let(:uploader) { described_class.new(build_stubbed(:user)) }
let(:note) { create(:note, :with_attachment) }
let(:uploader) { note.attachment }
let(:upload) { create(:upload, :attachment_upload, model: uploader.model) }
describe "#store_dir" do
it "stores in the system dir" do
expect(uploader.store_dir).to start_with("uploads/-/system/user")
end
subject { uploader }
it "uses the old path when using object storage" do
expect(described_class).to receive(:file_storage?).and_return(false)
expect(uploader.store_dir).to start_with("uploads/user")
end
end
it_behaves_like 'builds correct paths',
store_dir: %r[uploads/-/system/note/attachment/],
upload_path: %r[uploads/-/system/note/attachment/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/note/attachment/]
describe '#move_to_cache' do
it 'is true' do
expect(uploader.move_to_cache).to eq(true)
# EE-specific
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[note/attachment/],
upload_path: %r[note/attachment/]
end
describe '#move_to_store' do
it 'is true' do
expect(uploader.move_to_store).to eq(true)
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/doc_sample.txt')))
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
require 'spec_helper'
describe AvatarUploader do
let(:uploader) { described_class.new(build_stubbed(:user)) }
let(:model) { build_stubbed(:user) }
let(:uploader) { described_class.new(model, :avatar) }
let(:upload) { create(:upload, model: model) }
describe "#store_dir" do
it "stores in the system dir" do
expect(uploader.store_dir).to start_with("uploads/-/system/user")
end
subject { uploader }
it "uses the old path when using object storage" do
expect(described_class).to receive(:file_storage?).and_return(false)
expect(uploader.store_dir).to start_with("uploads/user")
end
end
it_behaves_like 'builds correct paths',
store_dir: %r[uploads/-/system/user/avatar/],
upload_path: %r[uploads/-/system/user/avatar/],
absolute_path: %r[#{CarrierWave.root}/uploads/-/system/user/avatar/]
describe '#move_to_cache' do
it 'is false' do
expect(uploader.move_to_cache).to eq(false)
# EE-specific
context "object_store is REMOTE" do
before do
stub_uploads_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like 'builds correct paths',
store_dir: %r[user/avatar/],
upload_path: %r[user/avatar/]
end
describe '#move_to_store' do
it 'is false' do
expect(uploader.move_to_store).to eq(false)
context "with a file" do
let(:project) { create(:project, :with_avatar) }
let(:uploader) { project.avatar }
let(:upload) { uploader.upload }
before do
stub_uploads_object_storage
end
it_behaves_like "migrates", to_store: described_class::Store::REMOTE
it_behaves_like "migrates", from_store: described_class::Store::REMOTE, to_store: described_class::Store::LOCAL
end
end
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment