Commit 1fb66181 authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'live-trace-v2' into live-trace-v2-efficient-destroy-all

parents abde73c0 f819bb72
...@@ -289,7 +289,6 @@ stages: ...@@ -289,7 +289,6 @@ stages:
# Trigger a package build in omnibus-gitlab repository # Trigger a package build in omnibus-gitlab repository
# #
package-and-qa: package-and-qa:
<<: *dedicated-runner
image: ruby:2.4-alpine image: ruby:2.4-alpine
before_script: [] before_script: []
stage: build stage: build
......
...@@ -40,10 +40,6 @@ ...@@ -40,10 +40,6 @@
.project-home-panel { .project-home-panel {
padding-left: 0 !important; padding-left: 0 !important;
.project-avatar {
display: block;
}
.project-repo-buttons, .project-repo-buttons,
.git-clone-holder { .git-clone-holder {
display: none; display: none;
......
...@@ -241,8 +241,6 @@ ...@@ -241,8 +241,6 @@
} }
.scrolling-tabs-container { .scrolling-tabs-container {
position: relative;
.merge-request-tabs-container & { .merge-request-tabs-container & {
overflow: hidden; overflow: hidden;
} }
...@@ -272,8 +270,6 @@ ...@@ -272,8 +270,6 @@
} }
.inner-page-scroll-tabs { .inner-page-scroll-tabs {
position: relative;
.fade-right { .fade-right {
@include fade(left, $white-light); @include fade(left, $white-light);
right: 0; right: 0;
......
...@@ -314,6 +314,10 @@ ...@@ -314,6 +314,10 @@
display: inline-flex; display: inline-flex;
vertical-align: top; vertical-align: top;
&:hover .color-label {
text-decoration: underline;
}
.label { .label {
vertical-align: inherit; vertical-align: inherit;
font-size: $label-font-size; font-size: $label-font-size;
......
...@@ -400,7 +400,8 @@ module ProjectsHelper ...@@ -400,7 +400,8 @@ module ProjectsHelper
exports_path = File.join(Settings.shared['path'], 'tmp/project_exports') exports_path = File.join(Settings.shared['path'], 'tmp/project_exports')
filtered_message = message.strip.gsub(exports_path, "[REPO EXPORT PATH]") filtered_message = message.strip.gsub(exports_path, "[REPO EXPORT PATH]")
filtered_message.gsub(project.repository_storage_path.chomp('/'), "[REPOS PATH]") disk_path = Gitlab.config.repositories.storages[project.repository_storage].legacy_disk_path
filtered_message.gsub(disk_path.chomp('/'), "[REPOS PATH]")
end end
def project_child_container_class(view_path) def project_child_container_class(view_path)
......
...@@ -19,14 +19,18 @@ module Ci ...@@ -19,14 +19,18 @@ module Ci
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment' has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection' has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
<<<<<<< HEAD
has_many :chunks, class_name: 'Ci::JobTraceChunk', foreign_key: :job_id has_many :chunks, class_name: 'Ci::JobTraceChunk', foreign_key: :job_id
=======
>>>>>>> live-trace-v2
has_one :metadata, class_name: 'Ci::BuildMetadata' has_one :metadata, class_name: 'Ci::BuildMetadata'
delegate :timeout, to: :metadata, prefix: true, allow_nil: true delegate :timeout, to: :metadata, prefix: true, allow_nil: true
delegate :gitlab_deploy_token, to: :project delegate :gitlab_deploy_token, to: :project
......
module Ci module Ci
class JobTraceChunk < ActiveRecord::Base class BuildTraceChunk < ActiveRecord::Base
extend Gitlab::Ci::Model extend Gitlab::Ci::Model
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
default_value_for :data_store, :redis default_value_for :data_store, :redis
...@@ -10,15 +10,16 @@ module Ci ...@@ -10,15 +10,16 @@ module Ci
CHUNK_SIZE = 128.kilobytes CHUNK_SIZE = 128.kilobytes
CHUNK_REDIS_TTL = 1.week CHUNK_REDIS_TTL = 1.week
LOCK_RETRY = 100 WRITE_LOCK_RETRY = 100
LOCK_SLEEP = 1 WRITE_LOCK_SLEEP = 1
LOCK_TTL = 5.minutes WRITE_LOCK_TTL = 5.minutes
enum data_store: { enum data_store: {
redis: 1, redis: 1,
db: 2 db: 2
} }
<<<<<<< HEAD:app/models/ci/job_trace_chunk.rb
def self.delayed_cleanup_blk def self.delayed_cleanup_blk
ids = all.redis.pluck(:job_id, :chunk_index).map do |data| ids = all.redis.pluck(:job_id, :chunk_index).map do |data|
"gitlab:ci:trace:#{data.first}:chunks:#{data.second}:data" "gitlab:ci:trace:#{data.first}:chunks:#{data.second}:data"
...@@ -43,36 +44,16 @@ module Ci ...@@ -43,36 +44,16 @@ module Ci
end end
end end
=======
##
# Data is memoized for optimizing #size and #end_offset
>>>>>>> live-trace-v2:app/models/ci/build_trace_chunk.rb
def data def data
if redis? @data ||= get_data
redis_data
elsif db?
raw_data
else
raise 'Unsupported data store'
end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def set_data(value)
raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
in_lock do
if redis?
redis_set_data(value)
elsif db?
self.raw_data = value
else
raise 'Unsupported data store'
end
save! if changed?
end
schedule_to_db if fullfilled?
end end
def truncate(offset = 0) def truncate(offset = 0)
self.append("", offset) self.append("", offset) if offset < size
end end
def append(new_data, offset) def append(new_data, offset)
...@@ -80,7 +61,7 @@ module Ci ...@@ -80,7 +61,7 @@ module Ci
raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0 raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0
raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize
self.set_data(current_data.byteslice(0, offset) + new_data) set_data(current_data.byteslice(0, offset) + new_data)
end end
def size def size
...@@ -111,6 +92,36 @@ module Ci ...@@ -111,6 +92,36 @@ module Ci
private private
def get_data
if redis?
redis_data
elsif db?
raw_data
else
raise 'Unsupported data store'
end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def set_data(value)
raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
in_lock do
if redis?
redis_set_data(value)
elsif db?
self.raw_data = value
else
raise 'Unsupported data store'
end
@data = value
save! if changed?
end
schedule_to_db if fullfilled?
end
def schedule_to_db def schedule_to_db
return if db? return if db?
...@@ -140,22 +151,22 @@ module Ci ...@@ -140,22 +151,22 @@ module Ci
end end
def redis_data_key def redis_data_key
"gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:data" "gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:data"
end end
def redis_lock_key def redis_lock_key
"gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:lock" "trace_write:#{build_id}:chunks:#{chunk_index}"
end end
def in_lock def in_lock
lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: LOCK_TTL) lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: WRITE_LOCK_TTL)
retry_count = 0 retry_count = 0
until uuid = lease.try_obtain until uuid = lease.try_obtain
# Keep trying until we obtain the lease. To prevent hammering Redis too # Keep trying until we obtain the lease. To prevent hammering Redis too
# much we'll wait for a bit between retries. # much we'll wait for a bit between retries.
sleep(LOCK_SLEEP) sleep(WRITE_LOCK_SLEEP)
break if LOCK_RETRY < (retry_count += 1) break if WRITE_LOCK_RETRY < (retry_count += 1)
end end
raise WriteError, 'Failed to obtain write lock' unless uuid raise WriteError, 'Failed to obtain write lock' unless uuid
......
...@@ -45,25 +45,25 @@ module Storage ...@@ -45,25 +45,25 @@ module Storage
# Hooks # Hooks
# Save the storage paths before the projects are destroyed to use them on after destroy # Save the storages before the projects are destroyed to use them on after destroy
def prepare_for_destroy def prepare_for_destroy
old_repository_storage_paths old_repository_storages
end end
private private
def move_repositories def move_repositories
# Move the namespace directory in all storage paths used by member projects # Move the namespace directory in all storages used by member projects
repository_storage_paths.each do |repository_storage_path| repository_storages.each do |repository_storage|
# Ensure old directory exists before moving it # Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, full_path_was) gitlab_shell.add_namespace(repository_storage, full_path_was)
# Ensure new directory exists before moving it (if there's a parent) # Ensure new directory exists before moving it (if there's a parent)
gitlab_shell.add_namespace(repository_storage_path, parent.full_path) if parent gitlab_shell.add_namespace(repository_storage, parent.full_path) if parent
unless gitlab_shell.mv_namespace(repository_storage_path, full_path_was, full_path) unless gitlab_shell.mv_namespace(repository_storage, full_path_was, full_path)
Rails.logger.error "Exception moving path #{repository_storage_path} from #{full_path_was} to #{full_path}" Rails.logger.error "Exception moving path #{repository_storage} from #{full_path_was} to #{full_path}"
# if we cannot move namespace directory we should rollback # if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs # db changes in order to prevent out of sync between db and fs
...@@ -72,33 +72,33 @@ module Storage ...@@ -72,33 +72,33 @@ module Storage
end end
end end
def old_repository_storage_paths def old_repository_storages
@old_repository_storage_paths ||= repository_storage_paths @old_repository_storage_paths ||= repository_storages
end end
def repository_storage_paths def repository_storages
# We need to get the storage paths for all the projects, even the ones that are # We need to get the storage paths for all the projects, even the ones that are
# pending delete. Unscoping also get rids of the default order, which causes # pending delete. Unscoping also get rids of the default order, which causes
# problems with SELECT DISTINCT. # problems with SELECT DISTINCT.
Project.unscoped do Project.unscoped do
all_projects.select('distinct(repository_storage)').to_a.map(&:repository_storage_path) all_projects.select('distinct(repository_storage)').to_a.map(&:repository_storage)
end end
end end
def rm_dir def rm_dir
# Remove the namespace directory in all storages paths used by member projects # Remove the namespace directory in all storages paths used by member projects
old_repository_storage_paths.each do |repository_storage_path| old_repository_storages.each do |repository_storage|
# Move namespace directory into trash. # Move namespace directory into trash.
# We will remove it later async # We will remove it later async
new_path = "#{full_path}+#{id}+deleted" new_path = "#{full_path}+#{id}+deleted"
if gitlab_shell.mv_namespace(repository_storage_path, full_path, new_path) if gitlab_shell.mv_namespace(repository_storage, full_path, new_path)
Gitlab::AppLogger.info %Q(Namespace directory "#{full_path}" moved to "#{new_path}") Gitlab::AppLogger.info %Q(Namespace directory "#{full_path}" moved to "#{new_path}")
# Remove namespace directroy async with delay so # Remove namespace directroy async with delay so
# GitLab has time to remove all projects first # GitLab has time to remove all projects first
run_after_commit do run_after_commit do
GitlabShellWorker.perform_in(5.minutes, :rm_namespace, repository_storage_path, new_path) GitlabShellWorker.perform_in(5.minutes, :rm_namespace, repository_storage, new_path)
end end
end end
end end
......
...@@ -197,10 +197,6 @@ class MergeRequestDiff < ActiveRecord::Base ...@@ -197,10 +197,6 @@ class MergeRequestDiff < ActiveRecord::Base
CompareService.new(project, head_commit_sha).execute(project, sha, straight: true) CompareService.new(project, head_commit_sha).execute(project, sha, straight: true)
end end
def commits_count
super || merge_request_diff_commits.size
end
private private
def create_merge_request_diff_files(diffs) def create_merge_request_diff_files(diffs)
......
...@@ -527,10 +527,6 @@ class Project < ActiveRecord::Base ...@@ -527,10 +527,6 @@ class Project < ActiveRecord::Base
repository.empty? repository.empty?
end end
def repository_storage_path
Gitlab.config.repositories.storages[repository_storage]&.legacy_disk_path
end
def team def team
@team ||= ProjectTeam.new(self) @team ||= ProjectTeam.new(self)
end end
...@@ -1114,7 +1110,7 @@ class Project < ActiveRecord::Base ...@@ -1114,7 +1110,7 @@ class Project < ActiveRecord::Base
# Check if repository already exists on disk # Check if repository already exists on disk
def check_repository_path_availability def check_repository_path_availability
return true if skip_disk_validation return true if skip_disk_validation
return false unless repository_storage_path return false unless repository_storage
expires_full_path_cache # we need to clear cache to validate renames correctly expires_full_path_cache # we need to clear cache to validate renames correctly
...@@ -1919,14 +1915,14 @@ class Project < ActiveRecord::Base ...@@ -1919,14 +1915,14 @@ class Project < ActiveRecord::Base
def check_repository_absence! def check_repository_absence!
return if skip_disk_validation return if skip_disk_validation
if repository_storage_path.blank? || repository_with_same_path_already_exists? if repository_storage.blank? || repository_with_same_path_already_exists?
errors.add(:base, 'There is already a repository with that name on disk') errors.add(:base, 'There is already a repository with that name on disk')
throw :abort throw :abort
end end
end end
def repository_with_same_path_already_exists? def repository_with_same_path_already_exists?
gitlab_shell.exists?(repository_storage_path, "#{disk_path}.git") gitlab_shell.exists?(repository_storage, "#{disk_path}.git")
end end
# set last_activity_at to the same as created_at # set last_activity_at to the same as created_at
......
...@@ -21,7 +21,7 @@ class ProjectWiki ...@@ -21,7 +21,7 @@ class ProjectWiki
end end
delegate :empty?, to: :pages delegate :empty?, to: :pages
delegate :repository_storage_path, :hashed_storage?, to: :project delegate :repository_storage, :hashed_storage?, to: :project
def path def path
@project.path + '.wiki' @project.path + '.wiki'
......
...@@ -84,9 +84,14 @@ class Repository ...@@ -84,9 +84,14 @@ class Repository
# Return absolute path to repository # Return absolute path to repository
def path_to_repo def path_to_repo
@path_to_repo ||= File.expand_path( @path_to_repo ||=
File.join(repository_storage_path, disk_path + '.git') begin
) storage = Gitlab.config.repositories.storages[@project.repository_storage]
File.expand_path(
File.join(storage.legacy_disk_path, disk_path + '.git')
)
end
end end
def inspect def inspect
...@@ -915,10 +920,6 @@ class Repository ...@@ -915,10 +920,6 @@ class Repository
raw_repository.fetch_ref(source_repository.raw_repository, source_ref: source_ref, target_ref: target_ref) raw_repository.fetch_ref(source_repository.raw_repository, source_ref: source_ref, target_ref: target_ref)
end end
def repository_storage_path
@project.repository_storage_path
end
def rebase(user, merge_request) def rebase(user, merge_request)
raw.rebase(user, merge_request.id, branch: merge_request.source_branch, raw.rebase(user, merge_request.id, branch: merge_request.source_branch,
branch_sha: merge_request.source_branch_sha, branch_sha: merge_request.source_branch_sha,
......
module Storage module Storage
class HashedProject class HashedProject
attr_accessor :project attr_accessor :project
delegate :gitlab_shell, :repository_storage_path, to: :project delegate :gitlab_shell, :repository_storage, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze ROOT_PATH_PREFIX = '@hashed'.freeze
...@@ -24,7 +24,7 @@ module Storage ...@@ -24,7 +24,7 @@ module Storage
end end
def ensure_storage_path_exists def ensure_storage_path_exists
gitlab_shell.add_namespace(repository_storage_path, base_dir) gitlab_shell.add_namespace(repository_storage, base_dir)
end end
def rename_repo def rename_repo
......
module Storage module Storage
class LegacyProject class LegacyProject
attr_accessor :project attr_accessor :project
delegate :namespace, :gitlab_shell, :repository_storage_path, to: :project delegate :namespace, :gitlab_shell, :repository_storage, to: :project
def initialize(project) def initialize(project)
@project = project @project = project
...@@ -24,18 +24,18 @@ module Storage ...@@ -24,18 +24,18 @@ module Storage
def ensure_storage_path_exists def ensure_storage_path_exists
return unless namespace return unless namespace
gitlab_shell.add_namespace(repository_storage_path, base_dir) gitlab_shell.add_namespace(repository_storage, base_dir)
end end
def rename_repo def rename_repo
new_full_path = project.build_full_path new_full_path = project.build_full_path
if gitlab_shell.mv_repository(repository_storage_path, project.full_path_was, new_full_path) if gitlab_shell.mv_repository(repository_storage, project.full_path_was, new_full_path)
# If repository moved successfully we need to send update instructions to users. # If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository # However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions # So we basically we mute exceptions in next actions
begin begin
gitlab_shell.mv_repository(repository_storage_path, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki") gitlab_shell.mv_repository(repository_storage, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki")
return true return true
rescue => e rescue => e
Rails.logger.error "Exception renaming #{project.full_path_was} -> #{new_full_path}: #{e}" Rails.logger.error "Exception renaming #{project.full_path_was} -> #{new_full_path}: #{e}"
......
...@@ -91,7 +91,7 @@ module Projects ...@@ -91,7 +91,7 @@ module Projects
project.run_after_commit do project.run_after_commit do
# self is now project # self is now project
GitlabShellWorker.perform_in(5.minutes, :remove_repository, self.repository_storage_path, new_path) GitlabShellWorker.perform_in(5.minutes, :remove_repository, self.repository_storage, new_path)
end end
else else
false false
...@@ -100,9 +100,9 @@ module Projects ...@@ -100,9 +100,9 @@ module Projects
def mv_repository(from_path, to_path) def mv_repository(from_path, to_path)
# There is a possibility project does not have repository or wiki # There is a possibility project does not have repository or wiki
return true unless gitlab_shell.exists?(project.repository_storage_path, from_path + '.git') return true unless gitlab_shell.exists?(project.repository_storage, from_path + '.git')
gitlab_shell.mv_repository(project.repository_storage_path, from_path, to_path) gitlab_shell.mv_repository(project.repository_storage, from_path, to_path)
end end
def attempt_rollback(project, message) def attempt_rollback(project, message)
......
...@@ -47,8 +47,8 @@ module Projects ...@@ -47,8 +47,8 @@ module Projects
private private
def move_repository(from_name, to_name) def move_repository(from_name, to_name)
from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git") from_exists = gitlab_shell.exists?(project.repository_storage, "#{from_name}.git")
to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git") to_exists = gitlab_shell.exists?(project.repository_storage, "#{to_name}.git")
# If we don't find the repository on either original or target we should log that as it could be an issue if the # If we don't find the repository on either original or target we should log that as it could be an issue if the
# project was not originally empty. # project was not originally empty.
...@@ -60,7 +60,7 @@ module Projects ...@@ -60,7 +60,7 @@ module Projects
return true return true
end end
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name) gitlab_shell.mv_repository(project.repository_storage, from_name, to_name)
end end
def rollback_folder_move def rollback_folder_move
......
...@@ -127,7 +127,7 @@ module Projects ...@@ -127,7 +127,7 @@ module Projects
end end
def move_repo_folder(from_name, to_name) def move_repo_folder(from_name, to_name)
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name) gitlab_shell.mv_repository(project.repository_storage, from_name, to_name)
end end
def execute_system_hooks def execute_system_hooks
......
- breadcrumb_title "General Settings" - breadcrumb_title "General Settings"
- @content_class = "limit-container-width" unless fluid_layout
.panel.panel-default.prepend-top-default .panel.panel-default.prepend-top-default
.panel-heading .panel-heading
Group settings Group settings
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
- if @namespaces.present? - if @namespaces.present?
.fork-thumbnail-container.js-fork-content .fork-thumbnail-container.js-fork-content
%h5.prepend-top-0.append-bottom-0.prepend-left-default.append-right-default %h5.prepend-top-0.append-bottom-0.prepend-left-default.append-right-default
Click to fork the project = _("Select a namespace to fork the project")
- @namespaces.each do |namespace| - @namespaces.each do |namespace|
= render 'fork_button', namespace: namespace = render 'fork_button', namespace: namespace
- else - else
......
...@@ -32,6 +32,13 @@ ...@@ -32,6 +32,13 @@
required: true, required: true,
title: 'You can choose a descriptive name different from the path.' title: 'You can choose a descriptive name different from the path.'
- if @group.persisted?
.form-group.group-name-holder
= f.label :id, class: 'control-label' do
= _("Group ID")
.col-sm-10
= f.text_field :id, class: 'form-control', readonly: true
.form-group.group-description-holder .form-group.group-description-holder
= f.label :description, class: 'control-label' = f.label :description, class: 'control-label'
.col-sm-10 .col-sm-10
......
...@@ -4,9 +4,9 @@ class BuildTraceSwapChunkWorker ...@@ -4,9 +4,9 @@ class BuildTraceSwapChunkWorker
queue_namespace :pipeline_processing queue_namespace :pipeline_processing
def perform(job_trace_chunk_id) def perform(build_trace_chunk_id)
Ci::JobTraceChunk.find_by(id: job_trace_chunk_id).try do |job_trace_chunk| Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk|
job_trace_chunk.use_database! build_trace_chunk.use_database!
end end
end end
end end
...@@ -13,7 +13,9 @@ class RepositoryForkWorker ...@@ -13,7 +13,9 @@ class RepositoryForkWorker
# See https://gitlab.com/gitlab-org/gitaly/issues/1110 # See https://gitlab.com/gitlab-org/gitaly/issues/1110
if args.empty? if args.empty?
source_project = target_project.forked_from_project source_project = target_project.forked_from_project
return target_project.mark_import_as_failed('Source project cannot be found.') unless source_project unless source_project
return target_project.mark_import_as_failed('Source project cannot be found.')
end
fork_repository(target_project, source_project.repository_storage, source_project.disk_path) fork_repository(target_project, source_project.repository_storage, source_project.disk_path)
else else
......
---
title: Fix tabs container styles to make RSS button clickable
merge_request: 18559
author:
type: fixed
---
title: Replace "Click" with "Select" to be more inclusive of people with accessibility
requirements
merge_request: 18386
author: Mark Lapierre
type: other
---
title: Align project avatar on small viewports
merge_request: 18513
author: George Tsiolis
type: changed
---
title: Restore label underline color
merge_request: 18407
author: George Tsiolis
type: fixed
---
title: Show group id in group settings
merge_request: 18482
author: George Tsiolis
type: added
require_dependency File.expand_path('../../lib/gitlab', __dir__) # Load Gitlab as soon as possible require_relative '../settings'
# Default settings # Default settings
Settings['ldap'] ||= Settingslogic.new({}) Settings['ldap'] ||= Settingslogic.new({})
......
require_relative '../../lib/gitlab'
deprecator = ActiveSupport::Deprecation.new('11.0', 'GitLab') deprecator = ActiveSupport::Deprecation.new('11.0', 'GitLab')
if Gitlab.com? || Rails.env.development? if Gitlab.dev_env_or_com?
ActiveSupport::Deprecation.deprecate_methods(Gitlab::GitalyClient::StorageSettings, :legacy_disk_path, deprecator: deprecator) ActiveSupport::Deprecation.deprecate_methods(Gitlab::GitalyClient::StorageSettings, :legacy_disk_path, deprecator: deprecator)
end end
...@@ -59,17 +59,17 @@ class RemoveDotGitFromGroupNames < ActiveRecord::Migration ...@@ -59,17 +59,17 @@ class RemoveDotGitFromGroupNames < ActiveRecord::Migration
end end
def move_namespace(group_id, path_was, path) def move_namespace(group_id, path_was, path)
repository_storage_paths = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{group_id}").map do |row| repository_storages = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{group_id}").map do |row|
Gitlab.config.repositories.storages[row['repository_storage']].legacy_disk_path row['repository_storage']
end.compact end.compact
# Move the namespace directory in all storages paths used by member projects # Move the namespace directory in all storages paths used by member projects
repository_storage_paths.each do |repository_storage_path| repository_storages.each do |repository_storage|
# Ensure old directory exists before moving it # Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, path_was) gitlab_shell.add_namespace(repository_storage, path_was)
unless gitlab_shell.mv_namespace(repository_storage_path, path_was, path) unless gitlab_shell.mv_namespace(repository_storage, path_was, path)
Rails.logger.error "Exception moving path #{repository_storage_path} from #{path_was} to #{path}" Rails.logger.error "Exception moving on shard #{repository_storage} from #{path_was} to #{path}"
# if we cannot move namespace directory we should rollback # if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs # db changes in order to prevent out of sync between db and fs
......
...@@ -53,8 +53,8 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration ...@@ -53,8 +53,8 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration
select_all("SELECT id, path FROM routes WHERE path = '#{quote_string(path)}'").present? select_all("SELECT id, path FROM routes WHERE path = '#{quote_string(path)}'").present?
end end
def path_exists?(path, repository_storage_path) def path_exists?(shard, repository_storage_path)
repository_storage_path && gitlab_shell.exists?(repository_storage_path, path) repository_storage_path && gitlab_shell.exists?(shard, repository_storage_path)
end end
# Accepts invalid path like test.git and returns test_git or # Accepts invalid path like test.git and returns test_git or
...@@ -70,8 +70,8 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration ...@@ -70,8 +70,8 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration
def check_routes(base, counter, path) def check_routes(base, counter, path)
route_exists = route_exists?(path) route_exists = route_exists?(path)
Gitlab.config.repositories.storages.each_value do |storage| Gitlab.config.repositories.storages.each do |shard, storage|
if route_exists || path_exists?(path, storage.legacy_disk_path) if route_exists || path_exists?(shard, storage.legacy_disk_path)
counter += 1 counter += 1
path = "#{base}#{counter}" path = "#{base}#{counter}"
...@@ -83,17 +83,17 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration ...@@ -83,17 +83,17 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration
end end
def move_namespace(namespace_id, path_was, path) def move_namespace(namespace_id, path_was, path)
repository_storage_paths = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{namespace_id}").map do |row| repository_storages = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{namespace_id}").map do |row|
Gitlab.config.repositories.storages[row['repository_storage']].legacy_disk_path row['repository_storage']
end.compact end.compact
# Move the namespace directory in all storages paths used by member projects # Move the namespace directory in all storages used by member projects
repository_storage_paths.each do |repository_storage_path| repository_storages.each do |repository_storage|
# Ensure old directory exists before moving it # Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, path_was) gitlab_shell.add_namespace(repository_storage, path_was)
unless gitlab_shell.mv_namespace(repository_storage_path, path_was, path) unless gitlab_shell.mv_namespace(repository_storage, path_was, path)
Rails.logger.error "Exception moving path #{repository_storage_path} from #{path_was} to #{path}" Rails.logger.error "Exception moving on shard #{repository_storage} from #{path_was} to #{path}"
# if we cannot move namespace directory we should rollback # if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs # db changes in order to prevent out of sync between db and fs
......
class CreateCiJobTraceChunks < ActiveRecord::Migration class CreateCiBuildTraceChunks < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers include Gitlab::Database::MigrationHelpers
DOWNTIME = false DOWNTIME = false
def change def change
create_table :ci_job_trace_chunks, id: :bigserial do |t| create_table :ci_build_trace_chunks, id: :bigserial do |t|
t.integer :job_id, null: false t.integer :build_id, null: false
t.integer :chunk_index, null: false t.integer :chunk_index, null: false
t.integer :data_store, null: false t.integer :data_store, null: false
t.binary :raw_data t.binary :raw_data
t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade t.foreign_key :ci_builds, column: :build_id, on_delete: :cascade
t.index [:job_id, :chunk_index], unique: true t.index [:build_id, :chunk_index], unique: true
end end
end end
end end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html # See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab. # for more information on how to write migrations for GitLab.
require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql') require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql')
class AddLimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration class AddLimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers include Gitlab::Database::MigrationHelpers
DOWNTIME = false DOWNTIME = false
def up def up
LimitsCiJobTraceChunksRawDataForMysql.new.up LimitsCiBuildTraceChunksRawDataForMysql.new.up
end end
end end
class AssureCommitsCountForMergeRequestDiff < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
include ::EachBatch
end
def up
Gitlab::BackgroundMigration.steal('AddMergeRequestDiffCommitsCount')
MergeRequestDiff.where(commits_count: nil).each_batch(of: 50) do |batch|
range = batch.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount.new.perform(*range)
end
end
def down
# noop
end
end
class LimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration class LimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration
def up def up
return unless Gitlab::Database.mysql? return unless Gitlab::Database.mysql?
# Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB) # Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB)
# Because 'raw_data' is always capped by Ci::JobTraceChunk::CHUNK_SIZE, which is 128KB # Because 'raw_data' is always capped by Ci::BuildTraceChunk::CHUNK_SIZE, which is 128KB
change_column :ci_job_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 #MEDIUMTEXT change_column :ci_build_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 #MEDIUMTEXT
end end
end end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180418053107) do ActiveRecord::Schema.define(version: 20180425205249) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -246,6 +246,15 @@ ActiveRecord::Schema.define(version: 20180418053107) do ...@@ -246,6 +246,15 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_index "chat_teams", ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree add_index "chat_teams", ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree
create_table "ci_build_trace_chunks", id: :bigserial, force: :cascade do |t|
t.integer "build_id", null: false
t.integer "chunk_index", null: false
t.integer "data_store", null: false
t.binary "raw_data"
end
add_index "ci_build_trace_chunks", ["build_id", "chunk_index"], name: "index_ci_build_trace_chunks_on_build_id_and_chunk_index", unique: true, using: :btree
create_table "ci_build_trace_section_names", force: :cascade do |t| create_table "ci_build_trace_section_names", force: :cascade do |t|
t.integer "project_id", null: false t.integer "project_id", null: false
t.string "name", null: false t.string "name", null: false
...@@ -371,15 +380,6 @@ ActiveRecord::Schema.define(version: 20180418053107) do ...@@ -371,15 +380,6 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_job_trace_chunks", id: :bigserial, force: :cascade do |t|
t.integer "job_id", null: false
t.integer "chunk_index", null: false
t.integer "data_store", null: false
t.binary "raw_data"
end
add_index "ci_job_trace_chunks", ["job_id", "chunk_index"], name: "index_ci_job_trace_chunks_on_job_id_and_chunk_index", unique: true, using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t| create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false t.string "key", null: false
t.text "value" t.text "value"
...@@ -2075,6 +2075,7 @@ ActiveRecord::Schema.define(version: 20180418053107) do ...@@ -2075,6 +2075,7 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_foreign_key "boards", "namespaces", column: "group_id", on_delete: :cascade add_foreign_key "boards", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade
add_foreign_key "chat_teams", "namespaces", on_delete: :cascade add_foreign_key "chat_teams", "namespaces", on_delete: :cascade
add_foreign_key "ci_build_trace_chunks", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_build_trace_section_names", column: "section_name_id", name: "fk_264e112c66", on_delete: :cascade add_foreign_key "ci_build_trace_sections", "ci_build_trace_section_names", column: "section_name_id", name: "fk_264e112c66", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_builds", column: "build_id", name: "fk_4ebe41f502", on_delete: :cascade add_foreign_key "ci_build_trace_sections", "ci_builds", column: "build_id", name: "fk_4ebe41f502", on_delete: :cascade
...@@ -2087,7 +2088,6 @@ ActiveRecord::Schema.define(version: 20180418053107) do ...@@ -2087,7 +2088,6 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_job_trace_chunks", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
...@@ -13,6 +13,7 @@ following locations: ...@@ -13,6 +13,7 @@ following locations:
- [Broadcast Messages](broadcast_messages.md) - [Broadcast Messages](broadcast_messages.md)
- [Project-level Variables](project_level_variables.md) - [Project-level Variables](project_level_variables.md)
- [Group-level Variables](group_level_variables.md) - [Group-level Variables](group_level_variables.md)
- [Code Snippets](snippets.md)
- [Commits](commits.md) - [Commits](commits.md)
- [Custom Attributes](custom_attributes.md) - [Custom Attributes](custom_attributes.md)
- [Deployments](deployments.md) - [Deployments](deployments.md)
......
...@@ -298,6 +298,28 @@ Mentioned briefly earlier, but the following things of Runners can be exploited. ...@@ -298,6 +298,28 @@ Mentioned briefly earlier, but the following things of Runners can be exploited.
We're always looking for contributions that can mitigate these We're always looking for contributions that can mitigate these
[Security Considerations](https://docs.gitlab.com/runner/security/). [Security Considerations](https://docs.gitlab.com/runner/security/).
### Resetting the registration token for a Project
If you think that registration token for a Project was revealed, you should
reset them. It's recommended because such token can be used to register another
Runner to thi Project. It may be next used to obtain the values of secret
variables or clone the project code, that normally may be unavailable for the
attacker.
To reset the token:
1. Go to **Settings > CI/CD** for a specified Project
1. Expand the **General pipelines settings** section
1. Find the **Runner token** form field and click the **Reveal value** button
1. Delete the value and save the form
1. After the page is refreshed, expand the **Runners settings** section
and check the registration token - it should be changed
From now on the old token is not valid anymore and will not allow to register
a new Runner to the project. If you are using any tools to provision and
register new Runners, you should now update the token that is used to the
new value.
## Determining the IP address of a Runner ## Determining the IP address of a Runner
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/17286) in GitLab 10.6. > [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/17286) in GitLab 10.6.
......
...@@ -61,7 +61,7 @@ future GitLab releases.** ...@@ -61,7 +61,7 @@ future GitLab releases.**
| **CI_RUNNER_EXECUTABLE_ARCH** | all | 10.6 | The OS/architecture of the GitLab Runner executable (note that this is not necessarily the same as the environment of the executor) | | **CI_RUNNER_EXECUTABLE_ARCH** | all | 10.6 | The OS/architecture of the GitLab Runner executable (note that this is not necessarily the same as the environment of the executor) |
| **CI_PIPELINE_ID** | 8.10 | 0.5 | The unique id of the current pipeline that GitLab CI uses internally | | **CI_PIPELINE_ID** | 8.10 | 0.5 | The unique id of the current pipeline that GitLab CI uses internally |
| **CI_PIPELINE_TRIGGERED** | all | all | The flag to indicate that job was [triggered] | | **CI_PIPELINE_TRIGGERED** | all | all | The flag to indicate that job was [triggered] |
| **CI_PIPELINE_SOURCE** | 10.0 | all | The source for this pipeline, one of: push, web, trigger, schedule, api, external. Pipelines created before 9.5 will have unknown as source | | **CI_PIPELINE_SOURCE** | 10.0 | all | Indicates how the pipeline was triggered. Possible options are: `push`, `web`, `trigger`, `schedule`, `api`, and `pipeline`. For pipelines created before GitLab 9.5, this will show as `unknown` |
| **CI_PROJECT_DIR** | all | all | The full path where the repository is cloned and where the job is run | | **CI_PROJECT_DIR** | all | all | The full path where the repository is cloned and where the job is run |
| **CI_PROJECT_ID** | all | all | The unique id of the current project that GitLab CI uses internally | | **CI_PROJECT_ID** | all | all | The unique id of the current project that GitLab CI uses internally |
| **CI_PROJECT_NAME** | 8.10 | 0.5 | The project name that is currently being built (actually it is project folder name) | | **CI_PROJECT_NAME** | 8.10 | 0.5 | The project name that is currently being built (actually it is project folder name) |
......
This diff is collapsed.
...@@ -10,10 +10,9 @@ should be deployed, upgraded, and configured. ...@@ -10,10 +10,9 @@ should be deployed, upgraded, and configured.
## Chart Overview ## Chart Overview
* **[GitLab-Omnibus](gitlab_omnibus.md)**: The best way to run GitLab on Kubernetes today, suited for small deployments. The chart is in beta and will be deprecated by the [cloud native GitLab chart](#cloud-native-gitlab-chart). * **[GitLab-Omnibus](gitlab_omnibus.md)**: The best way to run GitLab on Kubernetes today, suited for small deployments. The chart is in beta and will be deprecated by the [cloud native GitLab chart](#cloud-native-gitlab-chart).
* **[Cloud Native GitLab Chart](https://gitlab.com/charts/helm.gitlab.io/blob/master/README.md)**: The next generation GitLab chart, currently in alpha. Will support large deployments with horizontal scaling of individual GitLab components. * **[Cloud Native GitLab Chart](https://gitlab.com/charts/gitlab/blob/master/README.md)**: The next generation GitLab chart, currently in alpha. Will support large deployments with horizontal scaling of individual GitLab components.
* Other Charts * Other Charts
* [GitLab Runner Chart](gitlab_runner_chart.md): For deploying just the GitLab Runner. * [GitLab Runner Chart](gitlab_runner_chart.md): For deploying just the GitLab Runner.
* [Advanced GitLab Installation](gitlab_chart.md): Deprecated, being replaced by the [cloud native GitLab chart](#cloud-native-gitlab-chart). Provides additional deployment options, but provides less functionality out-of-the-box.
* [Community Contributed Charts](#community-contributed-charts): Community contributed charts, deprecated by the official GitLab chart. * [Community Contributed Charts](#community-contributed-charts): Community contributed charts, deprecated by the official GitLab chart.
## GitLab-Omnibus Chart (Recommended) ## GitLab-Omnibus Chart (Recommended)
...@@ -27,7 +26,7 @@ Learn more about the [gitlab-omnibus chart](gitlab_omnibus.md). ...@@ -27,7 +26,7 @@ Learn more about the [gitlab-omnibus chart](gitlab_omnibus.md).
## Cloud Native GitLab Chart ## Cloud Native GitLab Chart
GitLab is working towards building a [cloud native GitLab chart](https://gitlab.com/charts/helm.gitlab.io/blob/master/README.md). A key part of this effort is to isolate each service into its [own Docker container and Helm chart](https://gitlab.com/gitlab-org/omnibus-gitlab/issues/2420), rather than utilizing the all-in-one container image of the [current chart](#gitlab-omnibus-chart-recommended). GitLab is working towards building a [cloud native GitLab chart](https://gitlab.com/charts/gitlab/blob/master/README.md). A key part of this effort is to isolate each service into its [own Docker container and Helm chart](https://gitlab.com/gitlab-org/omnibus-gitlab/issues/2420), rather than utilizing the all-in-one container image of the [current chart](#gitlab-omnibus-chart-recommended).
By offering individual containers and charts, we will be able to provide a number of benefits: By offering individual containers and charts, we will be able to provide a number of benefits:
* Easier horizontal scaling of each service, * Easier horizontal scaling of each service,
...@@ -37,7 +36,7 @@ By offering individual containers and charts, we will be able to provide a numbe ...@@ -37,7 +36,7 @@ By offering individual containers and charts, we will be able to provide a numbe
Presently this chart is available in alpha for testing, and not recommended for production use. Presently this chart is available in alpha for testing, and not recommended for production use.
Learn more about the [cloud native GitLab chart here ](https://gitlab.com/charts/helm.gitlab.io/blob/master/README.md) and [here [Video]](https://youtu.be/Z6jWR8Z8dv8). Learn more about the [cloud native GitLab chart here ](https://gitlab.com/charts/gitlab/blob/master/README.md) and [here [Video]](https://youtu.be/Z6jWR8Z8dv8).
## Other Charts ## Other Charts
......
...@@ -69,7 +69,7 @@ GitHub will generate an application ID and secret key for you to use. ...@@ -69,7 +69,7 @@ GitHub will generate an application ID and secret key for you to use.
"name" => "github", "name" => "github",
"app_id" => "YOUR_APP_ID", "app_id" => "YOUR_APP_ID",
"app_secret" => "YOUR_APP_SECRET", "app_secret" => "YOUR_APP_SECRET",
"url" => "https://github.com/", "url" => "https://github.example.com/",
"args" => { "scope" => "user:email" } "args" => { "scope" => "user:email" }
} }
] ]
...@@ -125,7 +125,7 @@ For omnibus package: ...@@ -125,7 +125,7 @@ For omnibus package:
"name" => "github", "name" => "github",
"app_id" => "YOUR_APP_ID", "app_id" => "YOUR_APP_ID",
"app_secret" => "YOUR_APP_SECRET", "app_secret" => "YOUR_APP_SECRET",
"url" => "https://github.com/", "url" => "https://github.example.com/",
"verify_ssl" => false, "verify_ssl" => false,
"args" => { "scope" => "user:email" } "args" => { "scope" => "user:email" }
} }
......
...@@ -10,8 +10,30 @@ applications. ...@@ -10,8 +10,30 @@ applications.
## Overview ## Overview
With Auto DevOps, the software development process becomes easier to set up With Auto DevOps, the software development process becomes easier to set up
as every project can have a complete workflow from build to deploy and monitoring, as every project can have a complete workflow from verification to monitoring
with minimal to zero configuration. without needing to configure anything. Just push your code and GitLab takes
care of everything else. This makes it easier to start new projects and brings
consistency to how applications are set up throughout a company.
## Comparison to application platforms and PaaS
Auto DevOps provides functionality described by others as an application
platform or as a Platform as a Service (PaaS). It takes inspiration from the
innovative work done by [Heroku](https://www.heroku.com/) and goes beyond it
in a couple of ways:
1. Auto DevOps works with any Kubernetes cluster, you're not limited to running
on GitLab's infrastructure (note that many features also work without Kubernetes).
1. There is no additional cost (no markup on the infrastructure costs), and you
can use a self-hosted Kubernetes cluster or Containers as a Service on any
public cloud (for example [Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine/)).
1. Auto DevOps has more features including security testing, performance testing,
and code quality testing.
1. It offers an incremental graduation path. If you need advanced customizations
you can start modifying the templates without having to start over on a
completely different platform.
## Features
Comprised of a set of stages, Auto DevOps brings these best practices to your Comprised of a set of stages, Auto DevOps brings these best practices to your
project in an easy and automatic way: project in an easy and automatic way:
......
doc/user/group/img/groups.png

198 KB | W: | H:

doc/user/group/img/groups.png

244 KB | W: | H:

doc/user/group/img/groups.png
doc/user/group/img/groups.png
doc/user/group/img/groups.png
doc/user/group/img/groups.png
  • 2-up
  • Swipe
  • Onion skin
...@@ -31,7 +31,8 @@ with all their related data and be moved into a new GitLab instance. ...@@ -31,7 +31,8 @@ with all their related data and be moved into a new GitLab instance.
| GitLab version | Import/Export version | | GitLab version | Import/Export version |
| ---------------- | --------------------- | | ---------------- | --------------------- |
| 10.4 to current | 0.2.2 | | 10.8 to current | 0.2.3 |
| 10.4 | 0.2.2 |
| 10.3 | 0.2.1 | | 10.3 | 0.2.1 |
| 10.0 | 0.2.0 | | 10.0 | 0.2.0 |
| 9.4.0 | 0.1.8 | | 9.4.0 | 0.1.8 |
......
require_dependency 'settings'
require_dependency 'gitlab/popen' require_dependency 'gitlab/popen'
module Gitlab module Gitlab
...@@ -30,6 +29,6 @@ module Gitlab ...@@ -30,6 +29,6 @@ module Gitlab
end end
def self.dev_env_or_com? def self.dev_env_or_com?
Rails.env.test? || Rails.env.development? || org? || com? Rails.env.development? || org? || com?
end end
end end
...@@ -75,10 +75,11 @@ module Gitlab ...@@ -75,10 +75,11 @@ module Gitlab
end end
def mv_repo(project) def mv_repo(project)
FileUtils.mv(repo_path, File.join(project.repository_storage_path, project.disk_path + '.git')) storage_path = storage_path_for_shard(project.repository_storage)
FileUtils.mv(repo_path, project.repository.path_to_repo)
if bare_repo.wiki_exists? if bare_repo.wiki_exists?
FileUtils.mv(wiki_path, File.join(project.repository_storage_path, project.disk_path + '.wiki.git')) FileUtils.mv(wiki_path, File.join(storage_path, project.disk_path + '.wiki.git'))
end end
true true
...@@ -88,6 +89,10 @@ module Gitlab ...@@ -88,6 +89,10 @@ module Gitlab
false false
end end
def storage_path_for_shard(shard)
Gitlab.config.repositories.storages[shard].legacy_disk_path
end
def find_or_create_groups def find_or_create_groups
return nil unless group_path.present? return nil unless group_path.present?
......
...@@ -54,14 +54,14 @@ module Gitlab ...@@ -54,14 +54,14 @@ module Gitlab
end end
def exist? def exist?
trace_artifact&.exists? || job.chunks.any? || current_path.present? || old_trace.present? trace_artifact&.exists? || job.trace_chunks.any? || current_path.present? || old_trace.present?
end end
def read def read
stream = Gitlab::Ci::Trace::Stream.new do stream = Gitlab::Ci::Trace::Stream.new do
if trace_artifact if trace_artifact
trace_artifact.open trace_artifact.open
elsif job.chunks.any? elsif job.trace_chunks.any?
Gitlab::Ci::Trace::ChunkedIO.new(job) Gitlab::Ci::Trace::ChunkedIO.new(job)
elsif current_path elsif current_path
File.open(current_path, "rb") File.open(current_path, "rb")
...@@ -100,7 +100,7 @@ module Gitlab ...@@ -100,7 +100,7 @@ module Gitlab
FileUtils.rm(trace_path, force: true) FileUtils.rm(trace_path, force: true)
end end
job.chunks.fast_destroy_all job.trace_chunks.fast_destroy_all
job.erase_old_trace! job.erase_old_trace!
end end
...@@ -108,7 +108,7 @@ module Gitlab ...@@ -108,7 +108,7 @@ module Gitlab
raise ArchiveError, 'Already archived' if trace_artifact raise ArchiveError, 'Already archived' if trace_artifact
raise ArchiveError, 'Job is not finished yet' unless job.complete? raise ArchiveError, 'Job is not finished yet' unless job.complete?
if job.chunks.any? if job.trace_chunks.any?
Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream| Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream|
archive_stream!(stream) archive_stream!(stream)
stream.delete! stream.delete!
...@@ -130,7 +130,7 @@ module Gitlab ...@@ -130,7 +130,7 @@ module Gitlab
def archive_stream!(stream) def archive_stream!(stream)
clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path| clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path|
create_job_trace!(job, clone_path) create_build_trace!(job, clone_path)
end end
end end
...@@ -146,7 +146,7 @@ module Gitlab ...@@ -146,7 +146,7 @@ module Gitlab
end end
end end
def create_job_trace!(job, path) def create_build_trace!(job, path)
File.open(path) do |stream| File.open(path) do |stream|
job.create_job_artifacts_trace!( job.create_job_artifacts_trace!(
project: job.project, project: job.project,
......
...@@ -5,18 +5,18 @@ module Gitlab ...@@ -5,18 +5,18 @@ module Gitlab
module Ci module Ci
class Trace class Trace
class ChunkedIO class ChunkedIO
CHUNK_SIZE = ::Ci::JobTraceChunk::CHUNK_SIZE CHUNK_SIZE = ::Ci::BuildTraceChunk::CHUNK_SIZE
FailedToGetChunkError = Class.new(StandardError) FailedToGetChunkError = Class.new(StandardError)
attr_reader :job attr_reader :build
attr_reader :tell, :size attr_reader :tell, :size
attr_reader :chunk, :chunk_range attr_reader :chunk, :chunk_range
alias_method :pos, :tell alias_method :pos, :tell
def initialize(job, &block) def initialize(build, &block)
@job = job @build = build
@chunks_cache = [] @chunks_cache = []
@tell = 0 @tell = 0
@size = calculate_size @size = calculate_size
...@@ -140,10 +140,10 @@ module Gitlab ...@@ -140,10 +140,10 @@ module Gitlab
@size = offset @size = offset
# remove all next chunks # remove all next chunks
job_chunks.where('chunk_index > ?', chunk_index).fast_destroy_all trace_chunks.where('chunk_index > ?', chunk_index).fast_destroy_all
# truncate current chunk # truncate current chunk
current_chunk.truncate(chunk_offset) if chunk_offset != 0 current_chunk.truncate(chunk_offset)
ensure ensure
invalidate_chunk_cache invalidate_chunk_cache
end end
...@@ -157,7 +157,7 @@ module Gitlab ...@@ -157,7 +157,7 @@ module Gitlab
end end
def destroy! def destroy!
job_chunks.fast_destroy_all trace_chunks.fast_destroy_all
@tell = @size = 0 @tell = @size = 0
ensure ensure
invalidate_chunk_cache invalidate_chunk_cache
...@@ -206,23 +206,23 @@ module Gitlab ...@@ -206,23 +206,23 @@ module Gitlab
end end
def current_chunk def current_chunk
@chunks_cache[chunk_index] ||= job_chunks.find_by(chunk_index: chunk_index) @chunks_cache[chunk_index] ||= trace_chunks.find_by(chunk_index: chunk_index)
end end
def build_chunk def build_chunk
@chunks_cache[chunk_index] = ::Ci::JobTraceChunk.new(job: job, chunk_index: chunk_index) @chunks_cache[chunk_index] = ::Ci::BuildTraceChunk.new(build: build, chunk_index: chunk_index)
end end
def ensure_chunk def ensure_chunk
current_chunk || build_chunk current_chunk || build_chunk
end end
def job_chunks def trace_chunks
::Ci::JobTraceChunk.where(job: job) ::Ci::BuildTraceChunk.where(build: build)
end end
def calculate_size def calculate_size
job_chunks.order(chunk_index: :desc).first.try(&:end_offset).to_i trace_chunks.order(chunk_index: :desc).first.try(&:end_offset).to_i
end end
end end
end end
......
...@@ -52,7 +52,6 @@ module Gitlab ...@@ -52,7 +52,6 @@ module Gitlab
stream.seek(0, IO::SEEK_SET) stream.seek(0, IO::SEEK_SET)
stream.write(data) stream.write(data)
stream.truncate(data.bytesize)
stream.flush() stream.flush()
end end
......
...@@ -62,21 +62,20 @@ module Gitlab ...@@ -62,21 +62,20 @@ module Gitlab
end end
def move_repositories(namespace, old_full_path, new_full_path) def move_repositories(namespace, old_full_path, new_full_path)
repo_paths_for_namespace(namespace).each do |repository_storage_path| repo_shards_for_namespace(namespace).each do |repository_storage|
# Ensure old directory exists before moving it # Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, old_full_path) gitlab_shell.add_namespace(repository_storage, old_full_path)
unless gitlab_shell.mv_namespace(repository_storage_path, old_full_path, new_full_path) unless gitlab_shell.mv_namespace(repository_storage, old_full_path, new_full_path)
message = "Exception moving path #{repository_storage_path} \ message = "Exception moving on shard #{repository_storage} from #{old_full_path} to #{new_full_path}"
from #{old_full_path} to #{new_full_path}"
Rails.logger.error message Rails.logger.error message
end end
end end
end end
def repo_paths_for_namespace(namespace) def repo_shards_for_namespace(namespace)
projects_for_namespace(namespace).distinct.select(:repository_storage) projects_for_namespace(namespace).distinct.select(:repository_storage)
.map(&:repository_storage_path) .map(&:repository_storage)
end end
def projects_for_namespace(namespace) def projects_for_namespace(namespace)
......
...@@ -51,7 +51,7 @@ module Gitlab ...@@ -51,7 +51,7 @@ module Gitlab
end end
def move_repository(project, old_path, new_path) def move_repository(project, old_path, new_path)
unless gitlab_shell.mv_repository(project.repository_storage_path, unless gitlab_shell.mv_repository(project.repository_storage,
old_path, old_path,
new_path) new_path)
Rails.logger.error "Error moving #{old_path} to #{new_path}" Rails.logger.error "Error moving #{old_path} to #{new_path}"
......
...@@ -3,7 +3,7 @@ module Gitlab ...@@ -3,7 +3,7 @@ module Gitlab
extend self extend self
# For every version update, the version history in import_export.md has to be kept up to date. # For every version update, the version history in import_export.md has to be kept up to date.
VERSION = '0.2.2'.freeze VERSION = '0.2.3'.freeze
FILENAME_LIMIT = 50 FILENAME_LIMIT = 50
def export_path(relative_path:) def export_path(relative_path:)
......
...@@ -65,11 +65,11 @@ module Gitlab ...@@ -65,11 +65,11 @@ module Gitlab
# Init new repository # Init new repository
# #
# storage - project's storage name # storage - the shard key
# name - project disk path # name - project disk path
# #
# Ex. # Ex.
# create_repository("/path/to/storage", "gitlab/gitlab-ci") # create_repository("default", "gitlab/gitlab-ci")
# #
def create_repository(storage, name) def create_repository(storage, name)
relative_path = name.dup relative_path = name.dup
...@@ -291,13 +291,13 @@ module Gitlab ...@@ -291,13 +291,13 @@ module Gitlab
# Add empty directory for storing repositories # Add empty directory for storing repositories
# #
# Ex. # Ex.
# add_namespace("/path/to/storage", "gitlab") # add_namespace("default", "gitlab")
# #
def add_namespace(storage, name) def add_namespace(storage, name)
Gitlab::GitalyClient.migrate(:add_namespace, Gitlab::GitalyClient.migrate(:add_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled| status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled if enabled
gitaly_namespace_client(storage).add(name) Gitlab::GitalyClient::NamespaceService.new(storage).add(name)
else else
path = full_path(storage, name) path = full_path(storage, name)
FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name) FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name)
...@@ -313,13 +313,13 @@ module Gitlab ...@@ -313,13 +313,13 @@ module Gitlab
# Every repository inside this directory will be removed too # Every repository inside this directory will be removed too
# #
# Ex. # Ex.
# rm_namespace("/path/to/storage", "gitlab") # rm_namespace("default", "gitlab")
# #
def rm_namespace(storage, name) def rm_namespace(storage, name)
Gitlab::GitalyClient.migrate(:remove_namespace, Gitlab::GitalyClient.migrate(:remove_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled| status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled if enabled
gitaly_namespace_client(storage).remove(name) Gitlab::GitalyClient::NamespaceService.new(storage).remove(name)
else else
FileUtils.rm_r(full_path(storage, name), force: true) FileUtils.rm_r(full_path(storage, name), force: true)
end end
...@@ -338,7 +338,8 @@ module Gitlab ...@@ -338,7 +338,8 @@ module Gitlab
Gitlab::GitalyClient.migrate(:rename_namespace, Gitlab::GitalyClient.migrate(:rename_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled| status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled if enabled
gitaly_namespace_client(storage).rename(old_name, new_name) Gitlab::GitalyClient::NamespaceService.new(storage)
.rename(old_name, new_name)
else else
break false if exists?(storage, new_name) || !exists?(storage, old_name) break false if exists?(storage, new_name) || !exists?(storage, old_name)
...@@ -374,7 +375,8 @@ module Gitlab ...@@ -374,7 +375,8 @@ module Gitlab
Gitlab::GitalyClient.migrate(:namespace_exists, Gitlab::GitalyClient.migrate(:namespace_exists,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled| status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled if enabled
gitaly_namespace_client(storage).exists?(dir_name) Gitlab::GitalyClient::NamespaceService.new(storage)
.exists?(dir_name)
else else
File.exist?(full_path(storage, dir_name)) File.exist?(full_path(storage, dir_name))
end end
...@@ -398,7 +400,7 @@ module Gitlab ...@@ -398,7 +400,7 @@ module Gitlab
def full_path(storage, dir_name) def full_path(storage, dir_name)
raise ArgumentError.new("Directory name can't be blank") if dir_name.blank? raise ArgumentError.new("Directory name can't be blank") if dir_name.blank?
File.join(storage, dir_name) File.join(Gitlab.config.repositories.storages[storage].legacy_disk_path, dir_name)
end end
def gitlab_shell_projects_path def gitlab_shell_projects_path
...@@ -475,14 +477,6 @@ module Gitlab ...@@ -475,14 +477,6 @@ module Gitlab
Bundler.with_original_env { Popen.popen(cmd, nil, vars) } Bundler.with_original_env { Popen.popen(cmd, nil, vars) }
end end
def gitaly_namespace_client(storage_path)
storage, _value = Gitlab.config.repositories.storages.find do |storage, value|
value.legacy_disk_path == storage_path
end
Gitlab::GitalyClient::NamespaceService.new(storage)
end
def git_timeout def git_timeout
Gitlab.config.gitlab_shell.git_timeout Gitlab.config.gitlab_shell.git_timeout
end end
......
...@@ -427,10 +427,7 @@ namespace :gitlab do ...@@ -427,10 +427,7 @@ namespace :gitlab do
user = User.find_by(username: username) user = User.find_by(username: username)
if user if user
repo_dirs = user.authorized_projects.map do |p| repo_dirs = user.authorized_projects.map do |p|
File.join( p.repository.path_to_repo
p.repository_storage_path,
"#{p.disk_path}.git"
)
end end
repo_dirs.each { |repo_dir| check_repo_integrity(repo_dir) } repo_dirs.each { |repo_dir| check_repo_integrity(repo_dir) }
......
...@@ -10,9 +10,8 @@ namespace :gitlab do ...@@ -10,9 +10,8 @@ namespace :gitlab do
end end
scope.find_each do |project| scope.find_each do |project|
base = File.join(project.repository_storage_path, project.disk_path) puts project.repository.path_to_repo
puts base + '.git' puts project.wiki.repository.path_to_repo
puts base + '.wiki.git'
end end
end end
end end
require Rails.root.join('db/migrate/limits_to_mysql') require Rails.root.join('db/migrate/limits_to_mysql')
require Rails.root.join('db/migrate/markdown_cache_limits_to_mysql') require Rails.root.join('db/migrate/markdown_cache_limits_to_mysql')
require Rails.root.join('db/migrate/merge_request_diff_file_limits_to_mysql') require Rails.root.join('db/migrate/merge_request_diff_file_limits_to_mysql')
require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql') require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql')
desc "GitLab | Add limits to strings in mysql database" desc "GitLab | Add limits to strings in mysql database"
task add_limits_mysql: :environment do task add_limits_mysql: :environment do
...@@ -9,5 +9,5 @@ task add_limits_mysql: :environment do ...@@ -9,5 +9,5 @@ task add_limits_mysql: :environment do
LimitsToMysql.new.up LimitsToMysql.new.up
MarkdownCacheLimitsToMysql.new.up MarkdownCacheLimitsToMysql.new.up
MergeRequestDiffFileLimitsToMysql.new.up MergeRequestDiffFileLimitsToMysql.new.up
LimitsCiJobTraceChunksRawDataForMysql.new.up LimitsCiBuildTraceChunksRawDataForMysql.new.up
end end
...@@ -8,8 +8,8 @@ msgid "" ...@@ -8,8 +8,8 @@ msgid ""
msgstr "" msgstr ""
"Project-Id-Version: gitlab 1.0.0\n" "Project-Id-Version: gitlab 1.0.0\n"
"Report-Msgid-Bugs-To: \n" "Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-04-17 11:44+0200\n" "POT-Creation-Date: 2018-04-24 13:19+0000\n"
"PO-Revision-Date: 2018-04-17 11:44+0200\n" "PO-Revision-Date: 2018-04-24 13:19+0000\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n" "Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n" "Language: \n"
...@@ -3136,6 +3136,9 @@ msgstr "" ...@@ -3136,6 +3136,9 @@ msgstr ""
msgid "Select Archive Format" msgid "Select Archive Format"
msgstr "" msgstr ""
msgid "Select a namespace to fork the project"
msgstr ""
msgid "Select a timezone" msgid "Select a timezone"
msgstr "" msgstr ""
......
...@@ -125,7 +125,7 @@ describe ProfilesController, :request_store do ...@@ -125,7 +125,7 @@ describe ProfilesController, :request_store do
user.reload user.reload
expect(response.status).to eq(302) expect(response.status).to eq(302)
expect(gitlab_shell.exists?(project.repository_storage_path, "#{new_username}/#{project.path}.git")).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, "#{new_username}/#{project.path}.git")).to be_truthy
end end
end end
...@@ -143,7 +143,7 @@ describe ProfilesController, :request_store do ...@@ -143,7 +143,7 @@ describe ProfilesController, :request_store do
user.reload user.reload
expect(response.status).to eq(302) expect(response.status).to eq(302)
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_truthy
expect(before_disk_path).to eq(project.disk_path) expect(before_disk_path).to eq(project.disk_path)
end end
end end
......
FactoryBot.define do FactoryBot.define do
factory :ci_job_trace_chunk, class: Ci::JobTraceChunk do factory :ci_build_trace_chunk, class: Ci::BuildTraceChunk do
job factory: :ci_build job factory: :ci_build
chunk_index 0 chunk_index 0
data_store :redis data_store :redis
......
...@@ -147,7 +147,8 @@ FactoryBot.define do ...@@ -147,7 +147,8 @@ FactoryBot.define do
# We delete hooks so that gitlab-shell will not try to authenticate with # We delete hooks so that gitlab-shell will not try to authenticate with
# an API that isn't running # an API that isn't running
FileUtils.rm_r(File.join(project.repository_storage_path, "#{project.disk_path}.git", 'hooks')) project.gitlab_shell.rm_directory(project.repository_storage,
File.join("#{project.disk_path}.git", 'hooks'))
end end
end end
...@@ -172,7 +173,8 @@ FactoryBot.define do ...@@ -172,7 +173,8 @@ FactoryBot.define do
after(:create) do |project| after(:create) do |project|
raise "Failed to create repository!" unless project.create_repository raise "Failed to create repository!" unless project.create_repository
FileUtils.rm_r(File.join(project.repository_storage_path, "#{project.disk_path}.git", 'refs')) project.gitlab_shell.rm_directory(project.repository_storage,
File.join("#{project.disk_path}.git", 'refs'))
end end
end end
......
...@@ -9,7 +9,8 @@ unless Object.respond_to?(:require_dependency) ...@@ -9,7 +9,8 @@ unless Object.respond_to?(:require_dependency)
end end
end end
# Defines Gitlab and Gitlab.config which are at the center of the app # Defines Settings and Gitlab.config which are at the center of the app
require_relative '../config/settings'
require_relative '../lib/gitlab' unless defined?(Gitlab.config) require_relative '../lib/gitlab' unless defined?(Gitlab.config)
require_relative 'support/rspec' require_relative 'support/rspec'
...@@ -274,16 +274,16 @@ describe ProjectsHelper do ...@@ -274,16 +274,16 @@ describe ProjectsHelper do
end end
end end
describe '#sanitized_import_error' do describe '#sanitizerepo_repo_path' do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:storage_path) { Gitlab.config.repositories.storages.default.legacy_disk_path }
before do before do
allow(project).to receive(:repository_storage_path).and_return('/base/repo/path')
allow(Settings.shared).to receive(:[]).with('path').and_return('/base/repo/export/path') allow(Settings.shared).to receive(:[]).with('path').and_return('/base/repo/export/path')
end end
it 'removes the repo path' do it 'removes the repo path' do
repo = '/base/repo/path/namespace/test.git' repo = "#{storage_path}/namespace/test.git"
import_error = "Could not clone #{repo}\n" import_error = "Could not clone #{repo}\n"
expect(sanitize_repo_path(project, import_error)).to eq('Could not clone [REPOS PATH]/namespace/test.git') expect(sanitize_repo_path(project, import_error)).to eq('Could not clone [REPOS PATH]/namespace/test.git')
......
...@@ -4,6 +4,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do ...@@ -4,6 +4,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
let!(:admin) { create(:admin) } let!(:admin) { create(:admin) }
let!(:base_dir) { Dir.mktmpdir + '/' } let!(:base_dir) { Dir.mktmpdir + '/' }
let(:bare_repository) { Gitlab::BareRepositoryImport::Repository.new(base_dir, File.join(base_dir, "#{project_path}.git")) } let(:bare_repository) { Gitlab::BareRepositoryImport::Repository.new(base_dir, File.join(base_dir, "#{project_path}.git")) }
let(:gitlab_shell) { Gitlab::Shell.new }
subject(:importer) { described_class.new(admin, bare_repository) } subject(:importer) { described_class.new(admin, bare_repository) }
...@@ -84,12 +85,14 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do ...@@ -84,12 +85,14 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
importer.create_project_if_needed importer.create_project_if_needed
project = Project.find_by_full_path(project_path) project = Project.find_by_full_path(project_path)
repo_path = File.join(project.repository_storage_path, project.disk_path + '.git') repo_path = "#{project.disk_path}.git"
hook_path = File.join(repo_path, 'hooks') hook_path = File.join(repo_path, 'hooks')
expect(File).to exist(repo_path) expect(gitlab_shell.exists?(project.repository_storage, repo_path)).to be(true)
expect(File.symlink?(hook_path)).to be true expect(gitlab_shell.exists?(project.repository_storage, hook_path)).to be(true)
expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path)
full_hook_path = File.join(project.repository.path_to_repo, 'hooks')
expect(File.readlink(full_hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path)
end end
context 'hashed storage enabled' do context 'hashed storage enabled' do
...@@ -144,8 +147,8 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do ...@@ -144,8 +147,8 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
project = Project.find_by_full_path("#{admin.full_path}/#{project_path}") project = Project.find_by_full_path("#{admin.full_path}/#{project_path}")
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.git')) expect(gitlab_shell.exists?(project.repository_storage, project.disk_path + '.git')).to be(true)
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.wiki.git')) expect(gitlab_shell.exists?(project.repository_storage, project.disk_path + '.wiki.git')).to be(true)
end end
it 'moves an existing project to the correct path' do it 'moves an existing project to the correct path' do
...@@ -155,7 +158,9 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do ...@@ -155,7 +158,9 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
project = build(:project, :legacy_storage, :repository) project = build(:project, :legacy_storage, :repository)
original_commit_count = project.repository.commit_count original_commit_count = project.repository.commit_count
bare_repo = Gitlab::BareRepositoryImport::Repository.new(project.repository_storage_path, project.repository.path) legacy_path = Gitlab.config.repositories.storages[project.repository_storage].legacy_disk_path
bare_repo = Gitlab::BareRepositoryImport::Repository.new(legacy_path, project.repository.path)
gitlab_importer = described_class.new(admin, bare_repo) gitlab_importer = described_class.new(admin, bare_repo)
expect(gitlab_importer).to receive(:create_project).and_call_original expect(gitlab_importer).to receive(:create_project).and_call_original
...@@ -183,7 +188,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do ...@@ -183,7 +188,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
project = Project.find_by_full_path(project_path) project = Project.find_by_full_path(project_path)
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.wiki.git')) expect(gitlab_shell.exists?(project.repository_storage, project.disk_path + '.wiki.git')).to be(true)
end end
end end
......
...@@ -67,7 +67,7 @@ describe ::Gitlab::BareRepositoryImport::Repository do ...@@ -67,7 +67,7 @@ describe ::Gitlab::BareRepositoryImport::Repository do
end end
after do after do
gitlab_shell.remove_repository(root_path, hashed_path) gitlab_shell.remove_repository(repository_storage, hashed_path)
end end
subject { described_class.new(root_path, repo_path) } subject { described_class.new(root_path, repo_path) }
......
...@@ -3,8 +3,8 @@ require 'spec_helper' ...@@ -3,8 +3,8 @@ require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers include ChunkedIOHelpers
set(:job) { create(:ci_build, :running) } set(:build) { create(:ci_build, :running) }
let(:chunked_io) { described_class.new(job) } let(:chunked_io) { described_class.new(build) }
before do before do
stub_feature_flags(ci_enable_live_trace: true) stub_feature_flags(ci_enable_live_trace: true)
...@@ -13,7 +13,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -13,7 +13,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context "#initialize" do context "#initialize" do
context 'when a chunk exists' do context 'when a chunk exists' do
before do before do
job.trace.set('ABC') build.trace.set('ABC')
end end
it { expect(chunked_io.size).to eq(3) } it { expect(chunked_io.size).to eq(3) }
...@@ -22,7 +22,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -22,7 +22,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when two chunks exist' do context 'when two chunks exist' do
before do before do
stub_buffer_size(4) stub_buffer_size(4)
job.trace.set('ABCDEF') build.trace.set('ABCDEF')
end end
it { expect(chunked_io.size).to eq(6) } it { expect(chunked_io.size).to eq(6) }
...@@ -37,7 +37,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -37,7 +37,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
subject { chunked_io.seek(pos, where) } subject { chunked_io.seek(pos, where) }
before do before do
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
context 'when moves pos to end of the file' do context 'when moves pos to end of the file' do
...@@ -68,7 +68,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -68,7 +68,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
subject { chunked_io.eof? } subject { chunked_io.eof? }
before do before do
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
context 'when current pos is at end of the file' do context 'when current pos is at end of the file' do
...@@ -94,7 +94,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -94,7 +94,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'yields lines' do it 'yields lines' do
...@@ -106,7 +106,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -106,7 +106,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'calls get_chunk only once' do it 'calls get_chunk only once' do
...@@ -127,7 +127,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -127,7 +127,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it { is_expected.to eq(sample_trace_raw) } it { is_expected.to eq(sample_trace_raw) }
...@@ -136,7 +136,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -136,7 +136,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it { is_expected.to eq(sample_trace_raw) } it { is_expected.to eq(sample_trace_raw) }
...@@ -149,7 +149,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -149,7 +149,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'reads a trace' do it 'reads a trace' do
...@@ -160,7 +160,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -160,7 +160,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'reads a trace' do it 'reads a trace' do
...@@ -175,7 +175,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -175,7 +175,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'reads a trace' do it 'reads a trace' do
...@@ -186,7 +186,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -186,7 +186,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'reads a trace' do it 'reads a trace' do
...@@ -201,7 +201,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -201,7 +201,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'reads a trace' do it 'reads a trace' do
...@@ -212,7 +212,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -212,7 +212,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'reads a trace' do it 'reads a trace' do
...@@ -238,7 +238,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -238,7 +238,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it_behaves_like 'all line matching' it_behaves_like 'all line matching'
...@@ -247,7 +247,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -247,7 +247,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it_behaves_like 'all line matching' it_behaves_like 'all line matching'
...@@ -256,7 +256,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -256,7 +256,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when pos is at middle of the file' do context 'when pos is at middle of the file' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
chunked_io.seek(chunked_io.size / 2) chunked_io.seek(chunked_io.size / 2)
string_io.seek(string_io.size / 2) string_io.seek(string_io.size / 2)
...@@ -316,7 +316,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -316,7 +316,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(exist_data) build.trace.set(exist_data)
end end
it_behaves_like 'appends a trace' it_behaves_like 'appends a trace'
...@@ -325,7 +325,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -325,7 +325,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(exist_data) build.trace.set(exist_data)
end end
it_behaves_like 'appends a trace' it_behaves_like 'appends a trace'
...@@ -349,7 +349,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -349,7 +349,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do context 'when buffer size is smaller than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize / 2) stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it_behaves_like 'truncates a trace' it_behaves_like 'truncates a trace'
...@@ -358,7 +358,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -358,7 +358,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do context 'when buffer size is larger than file size' do
before do before do
stub_buffer_size(sample_trace_raw.bytesize * 2) stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it_behaves_like 'truncates a trace' it_behaves_like 'truncates a trace'
...@@ -370,14 +370,14 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do ...@@ -370,14 +370,14 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
subject { chunked_io.destroy! } subject { chunked_io.destroy! }
before do before do
job.trace.set(sample_trace_raw) build.trace.set(sample_trace_raw)
end end
it 'deletes' do it 'deletes' do
expect { subject }.to change { chunked_io.size } expect { subject }.to change { chunked_io.size }
.from(sample_trace_raw.bytesize).to(0) .from(sample_trace_raw.bytesize).to(0)
expect(Ci::JobTraceChunk.where(job: job).count).to eq(0) expect(Ci::BuildTraceChunk.where(build: build).count).to eq(0)
end end
end end
end end
require 'spec_helper' require 'spec_helper'
describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
set(:job) { create(:ci_build, :running) } set(:build) { create(:ci_build, :running) }
before do before do
stub_feature_flags(ci_enable_live_trace: true) stub_feature_flags(ci_enable_live_trace: true)
...@@ -83,7 +83,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do ...@@ -83,7 +83,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do context 'when stream is ChunkedIO' do
let(:stream) do let(:stream) do
described_class.new do described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write((1..8).to_a.join("\n")) chunked_io.write((1..8).to_a.join("\n"))
chunked_io.seek(0, IO::SEEK_SET) chunked_io.seek(0, IO::SEEK_SET)
end end
...@@ -137,7 +137,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do ...@@ -137,7 +137,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do context 'when stream is ChunkedIO' do
let(:stream) do let(:stream) do
described_class.new do described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write('12345678') chunked_io.write('12345678')
chunked_io.seek(0, IO::SEEK_SET) chunked_io.seek(0, IO::SEEK_SET)
end end
...@@ -175,7 +175,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do ...@@ -175,7 +175,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do context 'when stream is ChunkedIO' do
let(:stream) do let(:stream) do
described_class.new do described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write('12345678') chunked_io.write('12345678')
chunked_io.seek(0, IO::SEEK_SET) chunked_io.seek(0, IO::SEEK_SET)
end end
...@@ -234,7 +234,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do ...@@ -234,7 +234,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do context 'when stream is ChunkedIO' do
let(:stream) do let(:stream) do
described_class.new do described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write(File.binread(path)) chunked_io.write(File.binread(path))
chunked_io.seek(0, IO::SEEK_SET) chunked_io.seek(0, IO::SEEK_SET)
end end
...@@ -283,7 +283,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do ...@@ -283,7 +283,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do context 'when stream is ChunkedIO' do
let(:stream) do let(:stream) do
described_class.new do described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write("1234") chunked_io.write("1234")
chunked_io.seek(0, IO::SEEK_SET) chunked_io.seek(0, IO::SEEK_SET)
end end
...@@ -318,7 +318,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do ...@@ -318,7 +318,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do context 'when stream is ChunkedIO' do
let(:stream) do let(:stream) do
described_class.new do described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write("12\n34\n56") chunked_io.write("12\n34\n56")
chunked_io.seek(0, IO::SEEK_SET) chunked_io.seek(0, IO::SEEK_SET)
end end
...@@ -473,7 +473,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do ...@@ -473,7 +473,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do context 'when stream is ChunkedIO' do
let(:stream) do let(:stream) do
described_class.new do described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io| Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write(data) chunked_io.write(data)
chunked_io.seek(0, IO::SEEK_SET) chunked_io.seek(0, IO::SEEK_SET)
end end
......
...@@ -436,7 +436,7 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do ...@@ -436,7 +436,7 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do
it "can be erased" do it "can be erased" do
trace.erase! trace.erase!
expect(trace.exist?).to be(false) expect(trace.exist?).to be(false)
expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
end end
it "returns live trace data" do it "returns live trace data" do
...@@ -512,7 +512,7 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do ...@@ -512,7 +512,7 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do
expect(build.trace.exist?).to be_truthy expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log') expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum) expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest) .to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum) expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
......
...@@ -689,7 +689,7 @@ describe Gitlab::Git::Repository, seed_helper: true do ...@@ -689,7 +689,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end end
after do after do
Gitlab::Shell.new.remove_repository(storage_path, 'my_project') Gitlab::Shell.new.remove_repository('default', 'my_project')
end end
shared_examples 'repository mirror fecthing' do shared_examples 'repository mirror fecthing' do
......
...@@ -24,8 +24,8 @@ describe Gitlab::ImportExport::WikiRestorer do ...@@ -24,8 +24,8 @@ describe Gitlab::ImportExport::WikiRestorer do
after do after do
FileUtils.rm_rf(export_path) FileUtils.rm_rf(export_path)
Gitlab::Shell.new.remove_repository(project_with_wiki.wiki.repository_storage_path, project_with_wiki.wiki.disk_path) Gitlab::Shell.new.remove_repository(project_with_wiki.wiki.repository_storage, project_with_wiki.wiki.disk_path)
Gitlab::Shell.new.remove_repository(project.wiki.repository_storage_path, project.wiki.disk_path) Gitlab::Shell.new.remove_repository(project.wiki.repository_storage, project.wiki.disk_path)
end end
it 'restores the wiki repo successfully' do it 'restores the wiki repo successfully' do
......
...@@ -447,18 +447,18 @@ describe Gitlab::Shell do ...@@ -447,18 +447,18 @@ describe Gitlab::Shell do
let(:disk_path) { "#{project.disk_path}.git" } let(:disk_path) { "#{project.disk_path}.git" }
it 'returns true when the command succeeds' do it 'returns true when the command succeeds' do
expect(gitlab_shell.exists?(project.repository_storage_path, disk_path)).to be(true) expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(true)
expect(gitlab_shell.remove_repository(project.repository_storage_path, project.disk_path)).to be(true) expect(gitlab_shell.remove_repository(project.repository_storage, project.disk_path)).to be(true)
expect(gitlab_shell.exists?(project.repository_storage_path, disk_path)).to be(false) expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(false)
end end
it 'keeps the namespace directory' do it 'keeps the namespace directory' do
gitlab_shell.remove_repository(project.repository_storage_path, project.disk_path) gitlab_shell.remove_repository(project.repository_storage, project.disk_path)
expect(gitlab_shell.exists?(project.repository_storage_path, disk_path)).to be(false) expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(false)
expect(gitlab_shell.exists?(project.repository_storage_path, project.disk_path.gsub(project.name, ''))).to be(true) expect(gitlab_shell.exists?(project.repository_storage, project.disk_path.gsub(project.name, ''))).to be(true)
end end
end end
...@@ -469,18 +469,18 @@ describe Gitlab::Shell do ...@@ -469,18 +469,18 @@ describe Gitlab::Shell do
old_path = project2.disk_path old_path = project2.disk_path
new_path = "project/new_path" new_path = "project/new_path"
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{old_path}.git")).to be(true) expect(gitlab_shell.exists?(project2.repository_storage, "#{old_path}.git")).to be(true)
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{new_path}.git")).to be(false) expect(gitlab_shell.exists?(project2.repository_storage, "#{new_path}.git")).to be(false)
expect(gitlab_shell.mv_repository(project2.repository_storage_path, old_path, new_path)).to be_truthy expect(gitlab_shell.mv_repository(project2.repository_storage, old_path, new_path)).to be_truthy
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{old_path}.git")).to be(false) expect(gitlab_shell.exists?(project2.repository_storage, "#{old_path}.git")).to be(false)
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{new_path}.git")).to be(true) expect(gitlab_shell.exists?(project2.repository_storage, "#{new_path}.git")).to be(true)
end end
it 'returns false when the command fails' do it 'returns false when the command fails' do
expect(gitlab_shell.mv_repository(project2.repository_storage_path, project2.disk_path, '')).to be_falsy expect(gitlab_shell.mv_repository(project2.repository_storage, project2.disk_path, '')).to be_falsy
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{project2.disk_path}.git")).to be(true) expect(gitlab_shell.exists?(project2.repository_storage, "#{project2.disk_path}.git")).to be(true)
end end
end end
...@@ -679,48 +679,48 @@ describe Gitlab::Shell do ...@@ -679,48 +679,48 @@ describe Gitlab::Shell do
describe 'namespace actions' do describe 'namespace actions' do
subject { described_class.new } subject { described_class.new }
let(:storage_path) { Gitlab.config.repositories.storages.default.legacy_disk_path } let(:storage) { Gitlab.config.repositories.storages.keys.first }
describe '#add_namespace' do describe '#add_namespace' do
it 'creates a namespace' do it 'creates a namespace' do
subject.add_namespace(storage_path, "mepmep") subject.add_namespace(storage, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true) expect(subject.exists?(storage, "mepmep")).to be(true)
end end
end end
describe '#exists?' do describe '#exists?' do
context 'when the namespace does not exist' do context 'when the namespace does not exist' do
it 'returns false' do it 'returns false' do
expect(subject.exists?(storage_path, "non-existing")).to be(false) expect(subject.exists?(storage, "non-existing")).to be(false)
end end
end end
context 'when the namespace exists' do context 'when the namespace exists' do
it 'returns true' do it 'returns true' do
subject.add_namespace(storage_path, "mepmep") subject.add_namespace(storage, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true) expect(subject.exists?(storage, "mepmep")).to be(true)
end end
end end
end end
describe '#remove' do describe '#remove' do
it 'removes the namespace' do it 'removes the namespace' do
subject.add_namespace(storage_path, "mepmep") subject.add_namespace(storage, "mepmep")
subject.rm_namespace(storage_path, "mepmep") subject.rm_namespace(storage, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(false) expect(subject.exists?(storage, "mepmep")).to be(false)
end end
end end
describe '#mv_namespace' do describe '#mv_namespace' do
it 'renames the namespace' do it 'renames the namespace' do
subject.add_namespace(storage_path, "mepmep") subject.add_namespace(storage, "mepmep")
subject.mv_namespace(storage_path, "mepmep", "2mep") subject.mv_namespace(storage, "mepmep", "2mep")
expect(subject.exists?(storage_path, "mepmep")).to be(false) expect(subject.exists?(storage, "mepmep")).to be(false)
expect(subject.exists?(storage_path, "2mep")).to be(true) expect(subject.exists?(storage, "2mep")).to be(true)
end end
end end
end end
......
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180425131009_assure_commits_count_for_merge_request_diff.rb')
describe AssureCommitsCountForMergeRequestDiff, :migration, :sidekiq, :redis do
let(:migration) { spy('migration') }
before do
allow(Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount)
.to receive(:new).and_return(migration)
end
context 'when there are still unmigrated commit_counts afterwards' do
let(:namespaces) { table('namespaces') }
let(:projects) { table('projects') }
let(:merge_requests) { table('merge_requests') }
let(:diffs) { table('merge_request_diffs') }
before do
namespace = namespaces.create(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
merge_request = merge_requests.create!(source_branch: 'x', target_branch: 'y', target_project_id: project.id)
diffs.create!(commits_count: nil, merge_request_id: merge_request.id)
diffs.create!(commits_count: nil, merge_request_id: merge_request.id)
end
it 'migrates commit_counts sequentially in batches' do
migrate!
expect(migration).to have_received(:perform).once
end
end
end
require 'spec_helper' require 'spec_helper'
describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
set(:job) { create(:ci_build, :running) } set(:build) { create(:ci_build, :running) }
let(:chunk_index) { 0 } let(:chunk_index) { 0 }
let(:data_store) { :redis } let(:data_store) { :redis }
let(:raw_data) { nil } let(:raw_data) { nil }
let(:job_trace_chunk) do let(:build_trace_chunk) do
described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data) described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data)
end end
describe 'CHUNK_SIZE' do describe 'CHUNK_SIZE' do
...@@ -17,13 +17,13 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -17,13 +17,13 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end end
describe '#data' do describe '#data' do
subject { job_trace_chunk.data } subject { build_trace_chunk.data }
context 'when data_store is redis' do context 'when data_store is redis' do
let(:data_store) { :redis } let(:data_store) { :redis }
before do before do
job_trace_chunk.send(:redis_set_data, 'Sample data in redis') build_trace_chunk.send(:redis_set_data, 'Sample data in redis')
end end
it { is_expected.to eq('Sample data in redis') } it { is_expected.to eq('Sample data in redis') }
...@@ -38,7 +38,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -38,7 +38,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is others' do context 'when data_store is others' do
before do before do
job_trace_chunk.send(:write_attribute, :data_store, -1) build_trace_chunk.send(:write_attribute, :data_store, -1)
end end
it { expect { subject }.to raise_error('Unsupported data store') } it { expect { subject }.to raise_error('Unsupported data store') }
...@@ -46,7 +46,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -46,7 +46,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end end
describe '#set_data' do describe '#set_data' do
subject { job_trace_chunk.set_data(value) } subject { build_trace_chunk.set_data(value) }
let(:value) { 'Sample data' } let(:value) { 'Sample data' }
...@@ -60,11 +60,11 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -60,11 +60,11 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis } let(:data_store) { :redis }
it do it do
expect(job_trace_chunk.send(:redis_data)).to be_nil expect(build_trace_chunk.send(:redis_data)).to be_nil
subject subject
expect(job_trace_chunk.send(:redis_data)).to eq(value) expect(build_trace_chunk.send(:redis_data)).to eq(value)
end end
context 'when fullfilled chunk size' do context 'when fullfilled chunk size' do
...@@ -82,26 +82,26 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -82,26 +82,26 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :db } let(:data_store) { :db }
it 'sets data' do it 'sets data' do
expect(job_trace_chunk.raw_data).to be_nil expect(build_trace_chunk.raw_data).to be_nil
subject subject
expect(job_trace_chunk.raw_data).to eq(value) expect(build_trace_chunk.raw_data).to eq(value)
expect(job_trace_chunk.persisted?).to be_truthy expect(build_trace_chunk.persisted?).to be_truthy
end end
context 'when raw_data is not changed' do context 'when raw_data is not changed' do
it 'does not execute UPDATE' do it 'does not execute UPDATE' do
expect(job_trace_chunk.raw_data).to be_nil expect(build_trace_chunk.raw_data).to be_nil
job_trace_chunk.save! build_trace_chunk.save!
# First set # First set
expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0 expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0
expect(job_trace_chunk.raw_data).to eq(value) expect(build_trace_chunk.raw_data).to eq(value)
expect(job_trace_chunk.persisted?).to be_truthy expect(build_trace_chunk.persisted?).to be_truthy
# Second set # Second set
job_trace_chunk.reload build_trace_chunk.reload
expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0) expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0)
end end
end end
...@@ -117,7 +117,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -117,7 +117,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is others' do context 'when data_store is others' do
before do before do
job_trace_chunk.send(:write_attribute, :data_store, -1) build_trace_chunk.send(:write_attribute, :data_store, -1)
end end
it { expect { subject }.to raise_error('Unsupported data store') } it { expect { subject }.to raise_error('Unsupported data store') }
...@@ -125,7 +125,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -125,7 +125,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end end
describe '#truncate' do describe '#truncate' do
subject { job_trace_chunk.truncate(offset) } subject { build_trace_chunk.truncate(offset) }
shared_examples_for 'truncates' do shared_examples_for 'truncates' do
context 'when offset is negative' do context 'when offset is negative' do
...@@ -146,7 +146,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -146,7 +146,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
it 'truncates' do it 'truncates' do
subject subject
expect(job_trace_chunk.data).to eq(data.byteslice(0, offset)) expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
end end
end end
end end
...@@ -156,7 +156,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -156,7 +156,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' } let(:data) { 'Sample data in redis' }
before do before do
job_trace_chunk.send(:redis_set_data, data) build_trace_chunk.send(:redis_set_data, data)
end end
it_behaves_like 'truncates' it_behaves_like 'truncates'
...@@ -172,7 +172,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -172,7 +172,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end end
describe '#append' do describe '#append' do
subject { job_trace_chunk.append(new_data, offset) } subject { build_trace_chunk.append(new_data, offset) }
let(:new_data) { 'Sample new data' } let(:new_data) { 'Sample new data' }
let(:offset) { 0 } let(:offset) { 0 }
...@@ -203,7 +203,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -203,7 +203,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
it 'appends' do it 'appends' do
subject subject
expect(job_trace_chunk.data).to eq(total_data) expect(build_trace_chunk.data).to eq(total_data)
end end
end end
...@@ -213,7 +213,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -213,7 +213,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
it 'appends' do it 'appends' do
subject subject
expect(job_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data) expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
end end
end end
end end
...@@ -223,7 +223,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -223,7 +223,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' } let(:data) { 'Sample data in redis' }
before do before do
job_trace_chunk.send(:redis_set_data, data) build_trace_chunk.send(:redis_set_data, data)
end end
it_behaves_like 'appends' it_behaves_like 'appends'
...@@ -239,7 +239,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -239,7 +239,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end end
describe '#size' do describe '#size' do
subject { job_trace_chunk.size } subject { build_trace_chunk.size }
context 'when data_store is redis' do context 'when data_store is redis' do
let(:data_store) { :redis } let(:data_store) { :redis }
...@@ -248,7 +248,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -248,7 +248,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' } let(:data) { 'Sample data in redis' }
before do before do
job_trace_chunk.send(:redis_set_data, data) build_trace_chunk.send(:redis_set_data, data)
end end
it { is_expected.to eq(data.bytesize) } it { is_expected.to eq(data.bytesize) }
...@@ -276,7 +276,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -276,7 +276,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end end
describe '#use_database!' do describe '#use_database!' do
subject { job_trace_chunk.use_database! } subject { build_trace_chunk.use_database! }
context 'when data_store is redis' do context 'when data_store is redis' do
let(:data_store) { :redis } let(:data_store) { :redis }
...@@ -285,19 +285,19 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -285,19 +285,19 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' } let(:data) { 'Sample data in redis' }
before do before do
job_trace_chunk.send(:redis_set_data, data) build_trace_chunk.send(:redis_set_data, data)
end end
it 'stashes the data' do it 'stashes the data' do
expect(job_trace_chunk.data_store).to eq('redis') expect(build_trace_chunk.data_store).to eq('redis')
expect(job_trace_chunk.send(:redis_data)).to eq(data) expect(build_trace_chunk.send(:redis_data)).to eq(data)
expect(job_trace_chunk.raw_data).to be_nil expect(build_trace_chunk.raw_data).to be_nil
subject subject
expect(job_trace_chunk.data_store).to eq('db') expect(build_trace_chunk.data_store).to eq('db')
expect(job_trace_chunk.send(:redis_data)).to be_nil expect(build_trace_chunk.send(:redis_data)).to be_nil
expect(job_trace_chunk.raw_data).to eq(data) expect(build_trace_chunk.raw_data).to eq(data)
end end
end end
...@@ -320,11 +320,11 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -320,11 +320,11 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
describe 'ExclusiveLock' do describe 'ExclusiveLock' do
before do before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil } allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil }
stub_const('Ci::JobTraceChunk::LOCK_RETRY', 1) stub_const('Ci::BuildTraceChunk::LOCK_RETRY', 1)
end end
it 'raise an error' do it 'raise an error' do
expect { job_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock') expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock')
end end
end end
...@@ -338,7 +338,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -338,7 +338,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project) create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
end end
shared_examples_for 'deletes all job_trace_chunk and data in redis' do shared_examples_for 'deletes all build_trace_chunk and data in redis' do
it do it do
project.builds.each do |build| project.builds.each do |build|
Gitlab::Redis::SharedState.with do |redis| Gitlab::Redis::SharedState.with do |redis|
...@@ -364,20 +364,20 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -364,20 +364,20 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end end
end end
context 'when job_trace_chunk is destroyed' do context 'when build_trace_chunk is destroyed' do
let(:subject) do let(:subject) do
project.builds.each { |build| build.chunks.destroy_all } project.builds.each { |build| build.chunks.destroy_all }
end end
it_behaves_like 'deletes all job_trace_chunk and data in redis' it_behaves_like 'deletes all build_trace_chunk and data in redis'
end end
context 'when job is destroyed' do context 'when build is destroyed' do
let(:subject) do let(:subject) do
project.builds.destroy_all project.builds.destroy_all
end end
it_behaves_like 'deletes all job_trace_chunk and data in redis' it_behaves_like 'deletes all build_trace_chunk and data in redis'
end end
context 'when project is destroyed' do context 'when project is destroyed' do
...@@ -385,7 +385,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -385,7 +385,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
project.destroy! project.destroy!
end end
it_behaves_like 'deletes all job_trace_chunk and data in redis' it_behaves_like 'deletes all build_trace_chunk and data in redis'
end end
end end
end end
...@@ -5,6 +5,7 @@ describe Namespace do ...@@ -5,6 +5,7 @@ describe Namespace do
let!(:namespace) { create(:namespace) } let!(:namespace) { create(:namespace) }
let(:gitlab_shell) { Gitlab::Shell.new } let(:gitlab_shell) { Gitlab::Shell.new }
let(:repository_storage) { 'default' }
describe 'associations' do describe 'associations' do
it { is_expected.to have_many :projects } it { is_expected.to have_many :projects }
...@@ -201,7 +202,7 @@ describe Namespace do ...@@ -201,7 +202,7 @@ describe Namespace do
it "moves dir if path changed" do it "moves dir if path changed" do
namespace.update_attributes(path: namespace.full_path + '_new') namespace.update_attributes(path: namespace.full_path + '_new')
expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, "#{namespace.path}/#{project.path}.git")).to be_truthy
end end
context 'with subgroups', :nested_groups do context 'with subgroups', :nested_groups do
...@@ -281,7 +282,7 @@ describe Namespace do ...@@ -281,7 +282,7 @@ describe Namespace do
namespace.update_attributes(path: namespace.full_path + '_new') namespace.update_attributes(path: namespace.full_path + '_new')
expect(before_disk_path).to eq(project.disk_path) expect(before_disk_path).to eq(project.disk_path)
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_truthy
end end
end end
...@@ -322,7 +323,7 @@ describe Namespace do ...@@ -322,7 +323,7 @@ describe Namespace do
end end
it 'schedules the namespace for deletion' do it 'schedules the namespace for deletion' do
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path) expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage, deleted_path)
namespace.destroy namespace.destroy
end end
...@@ -344,7 +345,7 @@ describe Namespace do ...@@ -344,7 +345,7 @@ describe Namespace do
end end
it 'schedules the namespace for deletion' do it 'schedules the namespace for deletion' do
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path) expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage, deleted_path)
child.destroy child.destroy
end end
......
...@@ -449,14 +449,6 @@ describe Project do ...@@ -449,14 +449,6 @@ describe Project do
end end
end end
describe '#repository_storage_path' do
let(:project) { create(:project) }
it 'returns the repository storage path' do
expect(Dir.exist?(project.repository_storage_path)).to be(true)
end
end
it 'returns valid url to repo' do it 'returns valid url to repo' do
project = described_class.new(path: 'somewhere') project = described_class.new(path: 'somewhere')
expect(project.url_to_repo).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + 'somewhere.git') expect(project.url_to_repo).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + 'somewhere.git')
...@@ -1108,7 +1100,7 @@ describe Project do ...@@ -1108,7 +1100,7 @@ describe Project do
end end
context 'repository storage by default' do context 'repository storage by default' do
let(:project) { create(:project) } let(:project) { build(:project) }
before do before do
storages = { storages = {
...@@ -1461,7 +1453,7 @@ describe Project do ...@@ -1461,7 +1453,7 @@ describe Project do
.and_return(false) .and_return(false)
allow(shell).to receive(:create_repository) allow(shell).to receive(:create_repository)
.with(project.repository_storage_path, project.disk_path) .with(project.repository_storage, project.disk_path)
.and_return(true) .and_return(true)
expect(project).to receive(:create_repository).with(force: true) expect(project).to receive(:create_repository).with(force: true)
...@@ -2636,7 +2628,7 @@ describe Project do ...@@ -2636,7 +2628,7 @@ describe Project do
describe '#ensure_storage_path_exists' do describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do it 'delegates to gitlab_shell to ensure namespace is created' do
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, project.base_dir) expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage, project.base_dir)
project.ensure_storage_path_exists project.ensure_storage_path_exists
end end
...@@ -2675,12 +2667,12 @@ describe Project do ...@@ -2675,12 +2667,12 @@ describe Project do
expect(gitlab_shell).to receive(:mv_repository) expect(gitlab_shell).to receive(:mv_repository)
.ordered .ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}") .with(project.repository_storage, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.and_return(true) .and_return(true)
expect(gitlab_shell).to receive(:mv_repository) expect(gitlab_shell).to receive(:mv_repository)
.ordered .ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki") .with(project.repository_storage, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.and_return(true) .and_return(true)
expect_any_instance_of(SystemHooksService) expect_any_instance_of(SystemHooksService)
...@@ -2829,7 +2821,7 @@ describe Project do ...@@ -2829,7 +2821,7 @@ describe Project do
it 'delegates to gitlab_shell to ensure namespace is created' do it 'delegates to gitlab_shell to ensure namespace is created' do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell) allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, hashed_prefix) expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage, hashed_prefix)
project.ensure_storage_path_exists project.ensure_storage_path_exists
end end
......
...@@ -11,7 +11,7 @@ describe ProjectWiki do ...@@ -11,7 +11,7 @@ describe ProjectWiki do
subject { project_wiki } subject { project_wiki }
it { is_expected.to delegate_method(:empty?).to :pages } it { is_expected.to delegate_method(:empty?).to :pages }
it { is_expected.to delegate_method(:repository_storage_path).to :project } it { is_expected.to delegate_method(:repository_storage).to :project }
it { is_expected.to delegate_method(:hashed_storage?).to :project } it { is_expected.to delegate_method(:hashed_storage?).to :project }
describe "#full_path" do describe "#full_path" do
......
...@@ -53,8 +53,8 @@ describe Groups::DestroyService do ...@@ -53,8 +53,8 @@ describe Groups::DestroyService do
end end
it 'verifies that paths have been deleted' do it 'verifies that paths have been deleted' do
expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, group.path)).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
end end
end end
end end
...@@ -71,13 +71,13 @@ describe Groups::DestroyService do ...@@ -71,13 +71,13 @@ describe Groups::DestroyService do
after do after do
# Clean up stale directories # Clean up stale directories
gitlab_shell.rm_namespace(project.repository_storage_path, group.path) gitlab_shell.rm_namespace(project.repository_storage, group.path)
gitlab_shell.rm_namespace(project.repository_storage_path, remove_path) gitlab_shell.rm_namespace(project.repository_storage, remove_path)
end end
it 'verifies original paths and projects still exist' do it 'verifies original paths and projects still exist' do
expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, group.path)).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
expect(Project.unscoped.count).to eq(1) expect(Project.unscoped.count).to eq(1)
expect(Group.unscoped.count).to eq(2) expect(Group.unscoped.count).to eq(2)
end end
...@@ -144,7 +144,7 @@ describe Groups::DestroyService do ...@@ -144,7 +144,7 @@ describe Groups::DestroyService do
let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: group) } let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: group) }
it 'removes repository' do it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end end
end end
...@@ -152,7 +152,7 @@ describe Groups::DestroyService do ...@@ -152,7 +152,7 @@ describe Groups::DestroyService do
let!(:project) { create(:project, :empty_repo, namespace: group) } let!(:project) { create(:project, :empty_repo, namespace: group) }
it 'removes repository' do it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end end
end end
end end
......
...@@ -171,7 +171,6 @@ describe Projects::CreateService, '#execute' do ...@@ -171,7 +171,6 @@ describe Projects::CreateService, '#execute' do
context 'when another repository already exists on disk' do context 'when another repository already exists on disk' do
let(:repository_storage) { 'default' } let(:repository_storage) { 'default' }
let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path }
let(:opts) do let(:opts) do
{ {
...@@ -186,7 +185,7 @@ describe Projects::CreateService, '#execute' do ...@@ -186,7 +185,7 @@ describe Projects::CreateService, '#execute' do
end end
after do after do
gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing") gitlab_shell.remove_repository(repository_storage, "#{user.namespace.full_path}/existing")
end end
it 'does not allow to create a project when path matches existing repository on disk' do it 'does not allow to create a project when path matches existing repository on disk' do
...@@ -222,7 +221,7 @@ describe Projects::CreateService, '#execute' do ...@@ -222,7 +221,7 @@ describe Projects::CreateService, '#execute' do
end end
after do after do
gitlab_shell.remove_repository(repository_storage_path, hashed_path) gitlab_shell.remove_repository(repository_storage, hashed_path)
end end
it 'does not allow to create a project when path matches existing repository on disk' do it 'does not allow to create a project when path matches existing repository on disk' do
......
...@@ -18,8 +18,8 @@ describe Projects::DestroyService do ...@@ -18,8 +18,8 @@ describe Projects::DestroyService do
it 'deletes the project' do it 'deletes the project' do
expect(Project.unscoped.all).not_to include(project) expect(Project.unscoped.all).not_to include(project)
expect(project.gitlab_shell.exists?(project.repository_storage_path, path + '.git')).to be_falsey expect(project.gitlab_shell.exists?(project.repository_storage, path + '.git')).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path + '.git')).to be_falsey expect(project.gitlab_shell.exists?(project.repository_storage, remove_path + '.git')).to be_falsey
end end
end end
...@@ -252,21 +252,21 @@ describe Projects::DestroyService do ...@@ -252,21 +252,21 @@ describe Projects::DestroyService do
let(:path) { project.disk_path + '.git' } let(:path) { project.disk_path + '.git' }
before do before do
expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_truthy expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_truthy
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
# Dont run sidekiq to check if renamed repository exists # Dont run sidekiq to check if renamed repository exists
Sidekiq::Testing.fake! { destroy_project(project, user, {}) } Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_falsey expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_truthy expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_truthy
end end
it 'restores the repositories' do it 'restores the repositories' do
Sidekiq::Testing.fake! { described_class.new(project, user).attempt_repositories_rollback } Sidekiq::Testing.fake! { described_class.new(project, user).attempt_repositories_rollback }
expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_truthy expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_truthy
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
end end
end end
......
...@@ -112,7 +112,7 @@ describe Projects::ForkService do ...@@ -112,7 +112,7 @@ describe Projects::ForkService do
end end
after do after do
gitlab_shell.remove_repository(repository_storage_path, "#{@to_user.namespace.full_path}/#{@from_project.path}") gitlab_shell.remove_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
end end
it 'does not allow creation' do it 'does not allow creation' do
......
...@@ -16,8 +16,8 @@ describe Projects::HashedStorage::MigrateRepositoryService do ...@@ -16,8 +16,8 @@ describe Projects::HashedStorage::MigrateRepositoryService do
it 'renames project and wiki repositories' do it 'renames project and wiki repositories' do
service.execute service.execute
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
end end
it 'updates project to be hashed and not read-only' do it 'updates project to be hashed and not read-only' do
...@@ -52,8 +52,8 @@ describe Projects::HashedStorage::MigrateRepositoryService do ...@@ -52,8 +52,8 @@ describe Projects::HashedStorage::MigrateRepositoryService do
service.execute service.execute
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
expect(project.repository_read_only?).to be_falsey expect(project.repository_read_only?).to be_falsey
end end
...@@ -63,11 +63,11 @@ describe Projects::HashedStorage::MigrateRepositoryService do ...@@ -63,11 +63,11 @@ describe Projects::HashedStorage::MigrateRepositoryService do
before do before do
hashed_storage.ensure_storage_path_exists hashed_storage.ensure_storage_path_exists
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name) gitlab_shell.mv_repository(project.repository_storage, from_name, to_name)
end end
it 'does not try to move nil repository over hashed' do it 'does not try to move nil repository over hashed' do
expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name) expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, from_name, to_name)
expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki") expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
service.execute service.execute
...@@ -76,7 +76,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do ...@@ -76,7 +76,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do
end end
def expect_move_repository(from_name, to_name) def expect_move_repository(from_name, to_name)
expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
end end
end end
end end
...@@ -84,7 +84,7 @@ describe Projects::TransferService do ...@@ -84,7 +84,7 @@ describe Projects::TransferService do
end end
def project_path(project) def project_path(project)
File.join(project.repository_storage_path, "#{project.disk_path}.git") project.repository.path_to_repo
end end
def current_path def current_path
...@@ -94,7 +94,7 @@ describe Projects::TransferService do ...@@ -94,7 +94,7 @@ describe Projects::TransferService do
it 'rolls back repo location' do it 'rolls back repo location' do
attempt_project_transfer attempt_project_transfer
expect(Dir.exist?(original_path)).to be_truthy expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be(true)
expect(original_path).to eq current_path expect(original_path).to eq current_path
end end
...@@ -165,7 +165,7 @@ describe Projects::TransferService do ...@@ -165,7 +165,7 @@ describe Projects::TransferService do
end end
after do after do
gitlab_shell.remove_repository(repository_storage_path, "#{group.full_path}/#{project.path}") gitlab_shell.remove_repository(repository_storage, "#{group.full_path}/#{project.path}")
end end
it { expect(@result).to eq false } it { expect(@result).to eq false }
......
...@@ -200,7 +200,7 @@ describe Projects::UpdateService do ...@@ -200,7 +200,7 @@ describe Projects::UpdateService do
end end
after do after do
gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing") gitlab_shell.remove_repository(repository_storage, "#{user.namespace.full_path}/existing")
end end
it 'does not allow renaming when new path matches existing repository on disk' do it 'does not allow renaming when new path matches existing repository on disk' do
......
...@@ -176,7 +176,7 @@ describe Users::DestroyService do ...@@ -176,7 +176,7 @@ describe Users::DestroyService do
let!(:project) { create(:project, :empty_repo, :legacy_storage, namespace: user.namespace) } let!(:project) { create(:project, :empty_repo, :legacy_storage, namespace: user.namespace) }
it 'removes repository' do it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end end
end end
...@@ -184,7 +184,7 @@ describe Users::DestroyService do ...@@ -184,7 +184,7 @@ describe Users::DestroyService do
let!(:project) { create(:project, :empty_repo, namespace: user.namespace) } let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
it 'removes repository' do it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end end
end end
end end
......
...@@ -113,10 +113,10 @@ RSpec.configure do |config| ...@@ -113,10 +113,10 @@ RSpec.configure do |config|
m.call(*args) m.call(*args)
shard_name, repository_relative_path = args shard_name, repository_relative_path = args
shard_path = Gitlab.config.repositories.storages.fetch(shard_name).legacy_disk_path
# We can't leave the hooks in place after a fork, as those would fail in tests # We can't leave the hooks in place after a fork, as those would fail in tests
# The "internal" API is not available # The "internal" API is not available
FileUtils.rm_rf(File.join(shard_path, repository_relative_path, 'hooks')) Gitlab::Shell.new.rm_directory(shard_name,
File.join(repository_relative_path, 'hooks'))
end end
# Enable all features by default for testing # Enable all features by default for testing
......
...@@ -5,7 +5,7 @@ module ChunkedIOHelpers ...@@ -5,7 +5,7 @@ module ChunkedIOHelpers
end end
def stub_buffer_size(size) def stub_buffer_size(size)
stub_const('Ci::JobTraceChunk::CHUNK_SIZE', size) stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size)
stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size) stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size)
end end
......
...@@ -31,7 +31,7 @@ module JavaScriptFixturesHelpers ...@@ -31,7 +31,7 @@ module JavaScriptFixturesHelpers
end end
def remove_repository(project) def remove_repository(project)
Gitlab::Shell.new.remove_repository(project.repository_storage_path, project.disk_path) Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
end end
private private
......
...@@ -218,7 +218,8 @@ module TestEnv ...@@ -218,7 +218,8 @@ module TestEnv
end end
def copy_repo(project, bare_repo:, refs:) def copy_repo(project, bare_repo:, refs:)
target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.disk_path}.git") target_repo_path = File.expand_path(repos_path + "/#{project.disk_path}.git")
FileUtils.mkdir_p(target_repo_path) FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path) FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path FileUtils.chmod_R 0755, target_repo_path
...@@ -226,7 +227,7 @@ module TestEnv ...@@ -226,7 +227,7 @@ module TestEnv
end end
def repos_path def repos_path
Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path @repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
end end
def backup_path def backup_path
......
...@@ -195,15 +195,12 @@ describe 'gitlab:app namespace rake task' do ...@@ -195,15 +195,12 @@ describe 'gitlab:app namespace rake task' do
end end
context 'multiple repository storages' do context 'multiple repository storages' do
let(:storage_default) do
Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/default_storage'))
end
let(:test_second_storage) do let(:test_second_storage) do
Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/custom_storage')) Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/custom_storage'))
end end
let(:storages) do let(:storages) do
{ {
'default' => storage_default, 'default' => Gitlab.config.repositories.storages.default,
'test_second_storage' => test_second_storage 'test_second_storage' => test_second_storage
} }
end end
...@@ -215,8 +212,7 @@ describe 'gitlab:app namespace rake task' do ...@@ -215,8 +212,7 @@ describe 'gitlab:app namespace rake task' do
before do before do
# We only need a backup of the repositories for this test # We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry') stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry')
FileUtils.mkdir(Settings.absolute('tmp/tests/default_storage'))
FileUtils.mkdir(Settings.absolute('tmp/tests/custom_storage'))
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
# Avoid asking gitaly about the root ref (which will fail beacuse of the # Avoid asking gitaly about the root ref (which will fail beacuse of the
...@@ -225,14 +221,23 @@ describe 'gitlab:app namespace rake task' do ...@@ -225,14 +221,23 @@ describe 'gitlab:app namespace rake task' do
end end
after do after do
FileUtils.rm_rf(Settings.absolute('tmp/tests/default_storage'))
FileUtils.rm_rf(Settings.absolute('tmp/tests/custom_storage')) FileUtils.rm_rf(Settings.absolute('tmp/tests/custom_storage'))
end end
it 'includes repositories in all repository storages' do it 'includes repositories in all repository storages' do
project_a = create(:project, :repository, repository_storage: 'default') project_a = create(:project, :repository)
project_b = create(:project, :repository, repository_storage: 'test_second_storage') project_b = create(:project, :repository, repository_storage: 'test_second_storage')
b_storage_dir = File.join(Settings.absolute('tmp/tests/custom_storage'), File.dirname(project_b.disk_path))
FileUtils.mkdir_p(b_storage_dir)
# Even when overriding the storage, we have to move it there, so it exists
FileUtils.mv(
File.join(Settings.absolute(storages['default'].legacy_disk_path), project_b.repository.disk_path + '.git'),
Rails.root.join(storages['test_second_storage'].legacy_disk_path, project_b.repository.disk_path + '.git')
)
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
tar_contents, exit_status = Gitlab::Popen.popen( tar_contents, exit_status = Gitlab::Popen.popen(
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment