Commit 1fb66181 authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'live-trace-v2' into live-trace-v2-efficient-destroy-all

parents abde73c0 f819bb72
......@@ -289,7 +289,6 @@ stages:
# Trigger a package build in omnibus-gitlab repository
#
package-and-qa:
<<: *dedicated-runner
image: ruby:2.4-alpine
before_script: []
stage: build
......
......@@ -40,10 +40,6 @@
.project-home-panel {
padding-left: 0 !important;
.project-avatar {
display: block;
}
.project-repo-buttons,
.git-clone-holder {
display: none;
......
......@@ -241,8 +241,6 @@
}
.scrolling-tabs-container {
position: relative;
.merge-request-tabs-container & {
overflow: hidden;
}
......@@ -272,8 +270,6 @@
}
.inner-page-scroll-tabs {
position: relative;
.fade-right {
@include fade(left, $white-light);
right: 0;
......
......@@ -314,6 +314,10 @@
display: inline-flex;
vertical-align: top;
&:hover .color-label {
text-decoration: underline;
}
.label {
vertical-align: inherit;
font-size: $label-font-size;
......
......@@ -400,7 +400,8 @@ module ProjectsHelper
exports_path = File.join(Settings.shared['path'], 'tmp/project_exports')
filtered_message = message.strip.gsub(exports_path, "[REPO EXPORT PATH]")
filtered_message.gsub(project.repository_storage_path.chomp('/'), "[REPOS PATH]")
disk_path = Gitlab.config.repositories.storages[project.repository_storage].legacy_disk_path
filtered_message.gsub(disk_path.chomp('/'), "[REPOS PATH]")
end
def project_child_container_class(view_path)
......
......@@ -19,14 +19,18 @@ module Ci
has_one :last_deployment, -> { order('deployments.id DESC') }, as: :deployable, class_name: 'Deployment'
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
has_many :trace_chunks, class_name: 'Ci::BuildTraceChunk', foreign_key: :build_id, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_one :job_artifacts_archive, -> { where(file_type: Ci::JobArtifact.file_types[:archive]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_metadata, -> { where(file_type: Ci::JobArtifact.file_types[:metadata]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
has_one :job_artifacts_trace, -> { where(file_type: Ci::JobArtifact.file_types[:trace]) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
<<<<<<< HEAD
has_many :chunks, class_name: 'Ci::JobTraceChunk', foreign_key: :job_id
=======
>>>>>>> live-trace-v2
has_one :metadata, class_name: 'Ci::BuildMetadata'
delegate :timeout, to: :metadata, prefix: true, allow_nil: true
delegate :gitlab_deploy_token, to: :project
......
module Ci
class JobTraceChunk < ActiveRecord::Base
class BuildTraceChunk < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :job, class_name: "Ci::Build", foreign_key: :job_id
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
default_value_for :data_store, :redis
......@@ -10,15 +10,16 @@ module Ci
CHUNK_SIZE = 128.kilobytes
CHUNK_REDIS_TTL = 1.week
LOCK_RETRY = 100
LOCK_SLEEP = 1
LOCK_TTL = 5.minutes
WRITE_LOCK_RETRY = 100
WRITE_LOCK_SLEEP = 1
WRITE_LOCK_TTL = 5.minutes
enum data_store: {
redis: 1,
db: 2
}
<<<<<<< HEAD:app/models/ci/job_trace_chunk.rb
def self.delayed_cleanup_blk
ids = all.redis.pluck(:job_id, :chunk_index).map do |data|
"gitlab:ci:trace:#{data.first}:chunks:#{data.second}:data"
......@@ -43,36 +44,16 @@ module Ci
end
end
=======
##
# Data is memoized for optimizing #size and #end_offset
>>>>>>> live-trace-v2:app/models/ci/build_trace_chunk.rb
def data
if redis?
redis_data
elsif db?
raw_data
else
raise 'Unsupported data store'
end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def set_data(value)
raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
in_lock do
if redis?
redis_set_data(value)
elsif db?
self.raw_data = value
else
raise 'Unsupported data store'
end
save! if changed?
end
schedule_to_db if fullfilled?
@data ||= get_data
end
def truncate(offset = 0)
self.append("", offset)
self.append("", offset) if offset < size
end
def append(new_data, offset)
......@@ -80,7 +61,7 @@ module Ci
raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0
raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize
self.set_data(current_data.byteslice(0, offset) + new_data)
set_data(current_data.byteslice(0, offset) + new_data)
end
def size
......@@ -111,6 +92,36 @@ module Ci
private
def get_data
if redis?
redis_data
elsif db?
raw_data
else
raise 'Unsupported data store'
end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def set_data(value)
raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
in_lock do
if redis?
redis_set_data(value)
elsif db?
self.raw_data = value
else
raise 'Unsupported data store'
end
@data = value
save! if changed?
end
schedule_to_db if fullfilled?
end
def schedule_to_db
return if db?
......@@ -140,22 +151,22 @@ module Ci
end
def redis_data_key
"gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:data"
"gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:data"
end
def redis_lock_key
"gitlab:ci:trace:#{job_id}:chunks:#{chunk_index}:lock"
"trace_write:#{build_id}:chunks:#{chunk_index}"
end
def in_lock
lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: LOCK_TTL)
lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: WRITE_LOCK_TTL)
retry_count = 0
until uuid = lease.try_obtain
# Keep trying until we obtain the lease. To prevent hammering Redis too
# much we'll wait for a bit between retries.
sleep(LOCK_SLEEP)
break if LOCK_RETRY < (retry_count += 1)
sleep(WRITE_LOCK_SLEEP)
break if WRITE_LOCK_RETRY < (retry_count += 1)
end
raise WriteError, 'Failed to obtain write lock' unless uuid
......
......@@ -45,25 +45,25 @@ module Storage
# Hooks
# Save the storage paths before the projects are destroyed to use them on after destroy
# Save the storages before the projects are destroyed to use them on after destroy
def prepare_for_destroy
old_repository_storage_paths
old_repository_storages
end
private
def move_repositories
# Move the namespace directory in all storage paths used by member projects
repository_storage_paths.each do |repository_storage_path|
# Move the namespace directory in all storages used by member projects
repository_storages.each do |repository_storage|
# Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, full_path_was)
gitlab_shell.add_namespace(repository_storage, full_path_was)
# Ensure new directory exists before moving it (if there's a parent)
gitlab_shell.add_namespace(repository_storage_path, parent.full_path) if parent
gitlab_shell.add_namespace(repository_storage, parent.full_path) if parent
unless gitlab_shell.mv_namespace(repository_storage_path, full_path_was, full_path)
unless gitlab_shell.mv_namespace(repository_storage, full_path_was, full_path)
Rails.logger.error "Exception moving path #{repository_storage_path} from #{full_path_was} to #{full_path}"
Rails.logger.error "Exception moving path #{repository_storage} from #{full_path_was} to #{full_path}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
......@@ -72,33 +72,33 @@ module Storage
end
end
def old_repository_storage_paths
@old_repository_storage_paths ||= repository_storage_paths
def old_repository_storages
@old_repository_storage_paths ||= repository_storages
end
def repository_storage_paths
def repository_storages
# We need to get the storage paths for all the projects, even the ones that are
# pending delete. Unscoping also get rids of the default order, which causes
# problems with SELECT DISTINCT.
Project.unscoped do
all_projects.select('distinct(repository_storage)').to_a.map(&:repository_storage_path)
all_projects.select('distinct(repository_storage)').to_a.map(&:repository_storage)
end
end
def rm_dir
# Remove the namespace directory in all storages paths used by member projects
old_repository_storage_paths.each do |repository_storage_path|
old_repository_storages.each do |repository_storage|
# Move namespace directory into trash.
# We will remove it later async
new_path = "#{full_path}+#{id}+deleted"
if gitlab_shell.mv_namespace(repository_storage_path, full_path, new_path)
if gitlab_shell.mv_namespace(repository_storage, full_path, new_path)
Gitlab::AppLogger.info %Q(Namespace directory "#{full_path}" moved to "#{new_path}")
# Remove namespace directroy async with delay so
# GitLab has time to remove all projects first
run_after_commit do
GitlabShellWorker.perform_in(5.minutes, :rm_namespace, repository_storage_path, new_path)
GitlabShellWorker.perform_in(5.minutes, :rm_namespace, repository_storage, new_path)
end
end
end
......
......@@ -197,10 +197,6 @@ class MergeRequestDiff < ActiveRecord::Base
CompareService.new(project, head_commit_sha).execute(project, sha, straight: true)
end
def commits_count
super || merge_request_diff_commits.size
end
private
def create_merge_request_diff_files(diffs)
......
......@@ -527,10 +527,6 @@ class Project < ActiveRecord::Base
repository.empty?
end
def repository_storage_path
Gitlab.config.repositories.storages[repository_storage]&.legacy_disk_path
end
def team
@team ||= ProjectTeam.new(self)
end
......@@ -1114,7 +1110,7 @@ class Project < ActiveRecord::Base
# Check if repository already exists on disk
def check_repository_path_availability
return true if skip_disk_validation
return false unless repository_storage_path
return false unless repository_storage
expires_full_path_cache # we need to clear cache to validate renames correctly
......@@ -1919,14 +1915,14 @@ class Project < ActiveRecord::Base
def check_repository_absence!
return if skip_disk_validation
if repository_storage_path.blank? || repository_with_same_path_already_exists?
if repository_storage.blank? || repository_with_same_path_already_exists?
errors.add(:base, 'There is already a repository with that name on disk')
throw :abort
end
end
def repository_with_same_path_already_exists?
gitlab_shell.exists?(repository_storage_path, "#{disk_path}.git")
gitlab_shell.exists?(repository_storage, "#{disk_path}.git")
end
# set last_activity_at to the same as created_at
......
......@@ -21,7 +21,7 @@ class ProjectWiki
end
delegate :empty?, to: :pages
delegate :repository_storage_path, :hashed_storage?, to: :project
delegate :repository_storage, :hashed_storage?, to: :project
def path
@project.path + '.wiki'
......
......@@ -84,9 +84,14 @@ class Repository
# Return absolute path to repository
def path_to_repo
@path_to_repo ||= File.expand_path(
File.join(repository_storage_path, disk_path + '.git')
)
@path_to_repo ||=
begin
storage = Gitlab.config.repositories.storages[@project.repository_storage]
File.expand_path(
File.join(storage.legacy_disk_path, disk_path + '.git')
)
end
end
def inspect
......@@ -915,10 +920,6 @@ class Repository
raw_repository.fetch_ref(source_repository.raw_repository, source_ref: source_ref, target_ref: target_ref)
end
def repository_storage_path
@project.repository_storage_path
end
def rebase(user, merge_request)
raw.rebase(user, merge_request.id, branch: merge_request.source_branch,
branch_sha: merge_request.source_branch_sha,
......
module Storage
class HashedProject
attr_accessor :project
delegate :gitlab_shell, :repository_storage_path, to: :project
delegate :gitlab_shell, :repository_storage, to: :project
ROOT_PATH_PREFIX = '@hashed'.freeze
......@@ -24,7 +24,7 @@ module Storage
end
def ensure_storage_path_exists
gitlab_shell.add_namespace(repository_storage_path, base_dir)
gitlab_shell.add_namespace(repository_storage, base_dir)
end
def rename_repo
......
module Storage
class LegacyProject
attr_accessor :project
delegate :namespace, :gitlab_shell, :repository_storage_path, to: :project
delegate :namespace, :gitlab_shell, :repository_storage, to: :project
def initialize(project)
@project = project
......@@ -24,18 +24,18 @@ module Storage
def ensure_storage_path_exists
return unless namespace
gitlab_shell.add_namespace(repository_storage_path, base_dir)
gitlab_shell.add_namespace(repository_storage, base_dir)
end
def rename_repo
new_full_path = project.build_full_path
if gitlab_shell.mv_repository(repository_storage_path, project.full_path_was, new_full_path)
if gitlab_shell.mv_repository(repository_storage, project.full_path_was, new_full_path)
# If repository moved successfully we need to send update instructions to users.
# However we cannot allow rollback since we moved repository
# So we basically we mute exceptions in next actions
begin
gitlab_shell.mv_repository(repository_storage_path, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki")
gitlab_shell.mv_repository(repository_storage, "#{project.full_path_was}.wiki", "#{new_full_path}.wiki")
return true
rescue => e
Rails.logger.error "Exception renaming #{project.full_path_was} -> #{new_full_path}: #{e}"
......
......@@ -91,7 +91,7 @@ module Projects
project.run_after_commit do
# self is now project
GitlabShellWorker.perform_in(5.minutes, :remove_repository, self.repository_storage_path, new_path)
GitlabShellWorker.perform_in(5.minutes, :remove_repository, self.repository_storage, new_path)
end
else
false
......@@ -100,9 +100,9 @@ module Projects
def mv_repository(from_path, to_path)
# There is a possibility project does not have repository or wiki
return true unless gitlab_shell.exists?(project.repository_storage_path, from_path + '.git')
return true unless gitlab_shell.exists?(project.repository_storage, from_path + '.git')
gitlab_shell.mv_repository(project.repository_storage_path, from_path, to_path)
gitlab_shell.mv_repository(project.repository_storage, from_path, to_path)
end
def attempt_rollback(project, message)
......
......@@ -47,8 +47,8 @@ module Projects
private
def move_repository(from_name, to_name)
from_exists = gitlab_shell.exists?(project.repository_storage_path, "#{from_name}.git")
to_exists = gitlab_shell.exists?(project.repository_storage_path, "#{to_name}.git")
from_exists = gitlab_shell.exists?(project.repository_storage, "#{from_name}.git")
to_exists = gitlab_shell.exists?(project.repository_storage, "#{to_name}.git")
# If we don't find the repository on either original or target we should log that as it could be an issue if the
# project was not originally empty.
......@@ -60,7 +60,7 @@ module Projects
return true
end
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
gitlab_shell.mv_repository(project.repository_storage, from_name, to_name)
end
def rollback_folder_move
......
......@@ -127,7 +127,7 @@ module Projects
end
def move_repo_folder(from_name, to_name)
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
gitlab_shell.mv_repository(project.repository_storage, from_name, to_name)
end
def execute_system_hooks
......
- breadcrumb_title "General Settings"
- @content_class = "limit-container-width" unless fluid_layout
.panel.panel-default.prepend-top-default
.panel-heading
Group settings
......
......@@ -12,7 +12,7 @@
- if @namespaces.present?
.fork-thumbnail-container.js-fork-content
%h5.prepend-top-0.append-bottom-0.prepend-left-default.append-right-default
Click to fork the project
= _("Select a namespace to fork the project")
- @namespaces.each do |namespace|
= render 'fork_button', namespace: namespace
- else
......
......@@ -32,6 +32,13 @@
required: true,
title: 'You can choose a descriptive name different from the path.'
- if @group.persisted?
.form-group.group-name-holder
= f.label :id, class: 'control-label' do
= _("Group ID")
.col-sm-10
= f.text_field :id, class: 'form-control', readonly: true
.form-group.group-description-holder
= f.label :description, class: 'control-label'
.col-sm-10
......
......@@ -4,9 +4,9 @@ class BuildTraceSwapChunkWorker
queue_namespace :pipeline_processing
def perform(job_trace_chunk_id)
Ci::JobTraceChunk.find_by(id: job_trace_chunk_id).try do |job_trace_chunk|
job_trace_chunk.use_database!
def perform(build_trace_chunk_id)
Ci::BuildTraceChunk.find_by(id: build_trace_chunk_id).try do |build_trace_chunk|
build_trace_chunk.use_database!
end
end
end
......@@ -13,7 +13,9 @@ class RepositoryForkWorker
# See https://gitlab.com/gitlab-org/gitaly/issues/1110
if args.empty?
source_project = target_project.forked_from_project
return target_project.mark_import_as_failed('Source project cannot be found.') unless source_project
unless source_project
return target_project.mark_import_as_failed('Source project cannot be found.')
end
fork_repository(target_project, source_project.repository_storage, source_project.disk_path)
else
......
---
title: Fix tabs container styles to make RSS button clickable
merge_request: 18559
author:
type: fixed
---
title: Replace "Click" with "Select" to be more inclusive of people with accessibility
requirements
merge_request: 18386
author: Mark Lapierre
type: other
---
title: Align project avatar on small viewports
merge_request: 18513
author: George Tsiolis
type: changed
---
title: Restore label underline color
merge_request: 18407
author: George Tsiolis
type: fixed
---
title: Show group id in group settings
merge_request: 18482
author: George Tsiolis
type: added
require_dependency File.expand_path('../../lib/gitlab', __dir__) # Load Gitlab as soon as possible
require_relative '../settings'
# Default settings
Settings['ldap'] ||= Settingslogic.new({})
......
require_relative '../../lib/gitlab'
deprecator = ActiveSupport::Deprecation.new('11.0', 'GitLab')
if Gitlab.com? || Rails.env.development?
if Gitlab.dev_env_or_com?
ActiveSupport::Deprecation.deprecate_methods(Gitlab::GitalyClient::StorageSettings, :legacy_disk_path, deprecator: deprecator)
end
......@@ -59,17 +59,17 @@ class RemoveDotGitFromGroupNames < ActiveRecord::Migration
end
def move_namespace(group_id, path_was, path)
repository_storage_paths = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{group_id}").map do |row|
Gitlab.config.repositories.storages[row['repository_storage']].legacy_disk_path
repository_storages = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{group_id}").map do |row|
row['repository_storage']
end.compact
# Move the namespace directory in all storages paths used by member projects
repository_storage_paths.each do |repository_storage_path|
repository_storages.each do |repository_storage|
# Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, path_was)
gitlab_shell.add_namespace(repository_storage, path_was)
unless gitlab_shell.mv_namespace(repository_storage_path, path_was, path)
Rails.logger.error "Exception moving path #{repository_storage_path} from #{path_was} to #{path}"
unless gitlab_shell.mv_namespace(repository_storage, path_was, path)
Rails.logger.error "Exception moving on shard #{repository_storage} from #{path_was} to #{path}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
......
......@@ -53,8 +53,8 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration
select_all("SELECT id, path FROM routes WHERE path = '#{quote_string(path)}'").present?
end
def path_exists?(path, repository_storage_path)
repository_storage_path && gitlab_shell.exists?(repository_storage_path, path)
def path_exists?(shard, repository_storage_path)
repository_storage_path && gitlab_shell.exists?(shard, repository_storage_path)
end
# Accepts invalid path like test.git and returns test_git or
......@@ -70,8 +70,8 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration
def check_routes(base, counter, path)
route_exists = route_exists?(path)
Gitlab.config.repositories.storages.each_value do |storage|
if route_exists || path_exists?(path, storage.legacy_disk_path)
Gitlab.config.repositories.storages.each do |shard, storage|
if route_exists || path_exists?(shard, storage.legacy_disk_path)
counter += 1
path = "#{base}#{counter}"
......@@ -83,17 +83,17 @@ class RemoveDotGitFromUsernames < ActiveRecord::Migration
end
def move_namespace(namespace_id, path_was, path)
repository_storage_paths = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{namespace_id}").map do |row|
Gitlab.config.repositories.storages[row['repository_storage']].legacy_disk_path
repository_storages = select_all("SELECT distinct(repository_storage) FROM projects WHERE namespace_id = #{namespace_id}").map do |row|
row['repository_storage']
end.compact
# Move the namespace directory in all storages paths used by member projects
repository_storage_paths.each do |repository_storage_path|
# Move the namespace directory in all storages used by member projects
repository_storages.each do |repository_storage|
# Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, path_was)
gitlab_shell.add_namespace(repository_storage, path_was)
unless gitlab_shell.mv_namespace(repository_storage_path, path_was, path)
Rails.logger.error "Exception moving path #{repository_storage_path} from #{path_was} to #{path}"
unless gitlab_shell.mv_namespace(repository_storage, path_was, path)
Rails.logger.error "Exception moving on shard #{repository_storage} from #{path_was} to #{path}"
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
......
class CreateCiJobTraceChunks < ActiveRecord::Migration
class CreateCiBuildTraceChunks < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
create_table :ci_job_trace_chunks, id: :bigserial do |t|
t.integer :job_id, null: false
create_table :ci_build_trace_chunks, id: :bigserial do |t|
t.integer :build_id, null: false
t.integer :chunk_index, null: false
t.integer :data_store, null: false
t.binary :raw_data
t.foreign_key :ci_builds, column: :job_id, on_delete: :cascade
t.index [:job_id, :chunk_index], unique: true
t.foreign_key :ci_builds, column: :build_id, on_delete: :cascade
t.index [:build_id, :chunk_index], unique: true
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql')
require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql')
class AddLimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration
class AddLimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
LimitsCiJobTraceChunksRawDataForMysql.new.up
LimitsCiBuildTraceChunksRawDataForMysql.new.up
end
end
class AssureCommitsCountForMergeRequestDiff < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
class MergeRequestDiff < ActiveRecord::Base
self.table_name = 'merge_request_diffs'
include ::EachBatch
end
def up
Gitlab::BackgroundMigration.steal('AddMergeRequestDiffCommitsCount')
MergeRequestDiff.where(commits_count: nil).each_batch(of: 50) do |batch|
range = batch.pluck('MIN(id)', 'MAX(id)').first
Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount.new.perform(*range)
end
end
def down
# noop
end
end
class LimitsCiJobTraceChunksRawDataForMysql < ActiveRecord::Migration
class LimitsCiBuildTraceChunksRawDataForMysql < ActiveRecord::Migration
def up
return unless Gitlab::Database.mysql?
# Mysql needs MEDIUMTEXT type (up to 16MB) rather than TEXT (up to 64KB)
# Because 'raw_data' is always capped by Ci::JobTraceChunk::CHUNK_SIZE, which is 128KB
change_column :ci_job_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 #MEDIUMTEXT
# Because 'raw_data' is always capped by Ci::BuildTraceChunk::CHUNK_SIZE, which is 128KB
change_column :ci_build_trace_chunks, :raw_data, :binary, limit: 16.megabytes - 1 #MEDIUMTEXT
end
end
......@@ -11,7 +11,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180418053107) do
ActiveRecord::Schema.define(version: 20180425205249) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
......@@ -246,6 +246,15 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_index "chat_teams", ["namespace_id"], name: "index_chat_teams_on_namespace_id", unique: true, using: :btree
create_table "ci_build_trace_chunks", id: :bigserial, force: :cascade do |t|
t.integer "build_id", null: false
t.integer "chunk_index", null: false
t.integer "data_store", null: false
t.binary "raw_data"
end
add_index "ci_build_trace_chunks", ["build_id", "chunk_index"], name: "index_ci_build_trace_chunks_on_build_id_and_chunk_index", unique: true, using: :btree
create_table "ci_build_trace_section_names", force: :cascade do |t|
t.integer "project_id", null: false
t.string "name", null: false
......@@ -371,15 +380,6 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_index "ci_job_artifacts", ["job_id", "file_type"], name: "index_ci_job_artifacts_on_job_id_and_file_type", unique: true, using: :btree
add_index "ci_job_artifacts", ["project_id"], name: "index_ci_job_artifacts_on_project_id", using: :btree
create_table "ci_job_trace_chunks", id: :bigserial, force: :cascade do |t|
t.integer "job_id", null: false
t.integer "chunk_index", null: false
t.integer "data_store", null: false
t.binary "raw_data"
end
add_index "ci_job_trace_chunks", ["job_id", "chunk_index"], name: "index_ci_job_trace_chunks_on_job_id_and_chunk_index", unique: true, using: :btree
create_table "ci_pipeline_schedule_variables", force: :cascade do |t|
t.string "key", null: false
t.text "value"
......@@ -2075,6 +2075,7 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_foreign_key "boards", "namespaces", column: "group_id", on_delete: :cascade
add_foreign_key "boards", "projects", name: "fk_f15266b5f9", on_delete: :cascade
add_foreign_key "chat_teams", "namespaces", on_delete: :cascade
add_foreign_key "ci_build_trace_chunks", "ci_builds", column: "build_id", on_delete: :cascade
add_foreign_key "ci_build_trace_section_names", "projects", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_build_trace_section_names", column: "section_name_id", name: "fk_264e112c66", on_delete: :cascade
add_foreign_key "ci_build_trace_sections", "ci_builds", column: "build_id", name: "fk_4ebe41f502", on_delete: :cascade
......@@ -2087,7 +2088,6 @@ ActiveRecord::Schema.define(version: 20180418053107) do
add_foreign_key "ci_group_variables", "namespaces", column: "group_id", name: "fk_33ae4d58d8", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_job_artifacts", "projects", on_delete: :cascade
add_foreign_key "ci_job_trace_chunks", "ci_builds", column: "job_id", on_delete: :cascade
add_foreign_key "ci_pipeline_schedule_variables", "ci_pipeline_schedules", column: "pipeline_schedule_id", name: "fk_41c35fda51", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "projects", name: "fk_8ead60fcc4", on_delete: :cascade
add_foreign_key "ci_pipeline_schedules", "users", column: "owner_id", name: "fk_9ea99f58d2", on_delete: :nullify
......
......@@ -13,6 +13,7 @@ following locations:
- [Broadcast Messages](broadcast_messages.md)
- [Project-level Variables](project_level_variables.md)
- [Group-level Variables](group_level_variables.md)
- [Code Snippets](snippets.md)
- [Commits](commits.md)
- [Custom Attributes](custom_attributes.md)
- [Deployments](deployments.md)
......
......@@ -298,6 +298,28 @@ Mentioned briefly earlier, but the following things of Runners can be exploited.
We're always looking for contributions that can mitigate these
[Security Considerations](https://docs.gitlab.com/runner/security/).
### Resetting the registration token for a Project
If you think that registration token for a Project was revealed, you should
reset them. It's recommended because such token can be used to register another
Runner to thi Project. It may be next used to obtain the values of secret
variables or clone the project code, that normally may be unavailable for the
attacker.
To reset the token:
1. Go to **Settings > CI/CD** for a specified Project
1. Expand the **General pipelines settings** section
1. Find the **Runner token** form field and click the **Reveal value** button
1. Delete the value and save the form
1. After the page is refreshed, expand the **Runners settings** section
and check the registration token - it should be changed
From now on the old token is not valid anymore and will not allow to register
a new Runner to the project. If you are using any tools to provision and
register new Runners, you should now update the token that is used to the
new value.
## Determining the IP address of a Runner
> [Introduced](https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/17286) in GitLab 10.6.
......
......@@ -61,7 +61,7 @@ future GitLab releases.**
| **CI_RUNNER_EXECUTABLE_ARCH** | all | 10.6 | The OS/architecture of the GitLab Runner executable (note that this is not necessarily the same as the environment of the executor) |
| **CI_PIPELINE_ID** | 8.10 | 0.5 | The unique id of the current pipeline that GitLab CI uses internally |
| **CI_PIPELINE_TRIGGERED** | all | all | The flag to indicate that job was [triggered] |
| **CI_PIPELINE_SOURCE** | 10.0 | all | The source for this pipeline, one of: push, web, trigger, schedule, api, external. Pipelines created before 9.5 will have unknown as source |
| **CI_PIPELINE_SOURCE** | 10.0 | all | Indicates how the pipeline was triggered. Possible options are: `push`, `web`, `trigger`, `schedule`, `api`, and `pipeline`. For pipelines created before GitLab 9.5, this will show as `unknown` |
| **CI_PROJECT_DIR** | all | all | The full path where the repository is cloned and where the job is run |
| **CI_PROJECT_ID** | all | all | The unique id of the current project that GitLab CI uses internally |
| **CI_PROJECT_NAME** | 8.10 | 0.5 | The project name that is currently being built (actually it is project folder name) |
......
This diff is collapsed.
......@@ -10,10 +10,9 @@ should be deployed, upgraded, and configured.
## Chart Overview
* **[GitLab-Omnibus](gitlab_omnibus.md)**: The best way to run GitLab on Kubernetes today, suited for small deployments. The chart is in beta and will be deprecated by the [cloud native GitLab chart](#cloud-native-gitlab-chart).
* **[Cloud Native GitLab Chart](https://gitlab.com/charts/helm.gitlab.io/blob/master/README.md)**: The next generation GitLab chart, currently in alpha. Will support large deployments with horizontal scaling of individual GitLab components.
* **[Cloud Native GitLab Chart](https://gitlab.com/charts/gitlab/blob/master/README.md)**: The next generation GitLab chart, currently in alpha. Will support large deployments with horizontal scaling of individual GitLab components.
* Other Charts
* [GitLab Runner Chart](gitlab_runner_chart.md): For deploying just the GitLab Runner.
* [Advanced GitLab Installation](gitlab_chart.md): Deprecated, being replaced by the [cloud native GitLab chart](#cloud-native-gitlab-chart). Provides additional deployment options, but provides less functionality out-of-the-box.
* [Community Contributed Charts](#community-contributed-charts): Community contributed charts, deprecated by the official GitLab chart.
## GitLab-Omnibus Chart (Recommended)
......@@ -27,7 +26,7 @@ Learn more about the [gitlab-omnibus chart](gitlab_omnibus.md).
## Cloud Native GitLab Chart
GitLab is working towards building a [cloud native GitLab chart](https://gitlab.com/charts/helm.gitlab.io/blob/master/README.md). A key part of this effort is to isolate each service into its [own Docker container and Helm chart](https://gitlab.com/gitlab-org/omnibus-gitlab/issues/2420), rather than utilizing the all-in-one container image of the [current chart](#gitlab-omnibus-chart-recommended).
GitLab is working towards building a [cloud native GitLab chart](https://gitlab.com/charts/gitlab/blob/master/README.md). A key part of this effort is to isolate each service into its [own Docker container and Helm chart](https://gitlab.com/gitlab-org/omnibus-gitlab/issues/2420), rather than utilizing the all-in-one container image of the [current chart](#gitlab-omnibus-chart-recommended).
By offering individual containers and charts, we will be able to provide a number of benefits:
* Easier horizontal scaling of each service,
......@@ -37,7 +36,7 @@ By offering individual containers and charts, we will be able to provide a numbe
Presently this chart is available in alpha for testing, and not recommended for production use.
Learn more about the [cloud native GitLab chart here ](https://gitlab.com/charts/helm.gitlab.io/blob/master/README.md) and [here [Video]](https://youtu.be/Z6jWR8Z8dv8).
Learn more about the [cloud native GitLab chart here ](https://gitlab.com/charts/gitlab/blob/master/README.md) and [here [Video]](https://youtu.be/Z6jWR8Z8dv8).
## Other Charts
......
......@@ -69,7 +69,7 @@ GitHub will generate an application ID and secret key for you to use.
"name" => "github",
"app_id" => "YOUR_APP_ID",
"app_secret" => "YOUR_APP_SECRET",
"url" => "https://github.com/",
"url" => "https://github.example.com/",
"args" => { "scope" => "user:email" }
}
]
......@@ -125,7 +125,7 @@ For omnibus package:
"name" => "github",
"app_id" => "YOUR_APP_ID",
"app_secret" => "YOUR_APP_SECRET",
"url" => "https://github.com/",
"url" => "https://github.example.com/",
"verify_ssl" => false,
"args" => { "scope" => "user:email" }
}
......
......@@ -10,8 +10,30 @@ applications.
## Overview
With Auto DevOps, the software development process becomes easier to set up
as every project can have a complete workflow from build to deploy and monitoring,
with minimal to zero configuration.
as every project can have a complete workflow from verification to monitoring
without needing to configure anything. Just push your code and GitLab takes
care of everything else. This makes it easier to start new projects and brings
consistency to how applications are set up throughout a company.
## Comparison to application platforms and PaaS
Auto DevOps provides functionality described by others as an application
platform or as a Platform as a Service (PaaS). It takes inspiration from the
innovative work done by [Heroku](https://www.heroku.com/) and goes beyond it
in a couple of ways:
1. Auto DevOps works with any Kubernetes cluster, you're not limited to running
on GitLab's infrastructure (note that many features also work without Kubernetes).
1. There is no additional cost (no markup on the infrastructure costs), and you
can use a self-hosted Kubernetes cluster or Containers as a Service on any
public cloud (for example [Google Kubernetes Engine](https://cloud.google.com/kubernetes-engine/)).
1. Auto DevOps has more features including security testing, performance testing,
and code quality testing.
1. It offers an incremental graduation path. If you need advanced customizations
you can start modifying the templates without having to start over on a
completely different platform.
## Features
Comprised of a set of stages, Auto DevOps brings these best practices to your
project in an easy and automatic way:
......
doc/user/group/img/groups.png

198 KB | W: | H:

doc/user/group/img/groups.png

244 KB | W: | H:

doc/user/group/img/groups.png
doc/user/group/img/groups.png
doc/user/group/img/groups.png
doc/user/group/img/groups.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -31,7 +31,8 @@ with all their related data and be moved into a new GitLab instance.
| GitLab version | Import/Export version |
| ---------------- | --------------------- |
| 10.4 to current | 0.2.2 |
| 10.8 to current | 0.2.3 |
| 10.4 | 0.2.2 |
| 10.3 | 0.2.1 |
| 10.0 | 0.2.0 |
| 9.4.0 | 0.1.8 |
......
require_dependency 'settings'
require_dependency 'gitlab/popen'
module Gitlab
......@@ -30,6 +29,6 @@ module Gitlab
end
def self.dev_env_or_com?
Rails.env.test? || Rails.env.development? || org? || com?
Rails.env.development? || org? || com?
end
end
......@@ -75,10 +75,11 @@ module Gitlab
end
def mv_repo(project)
FileUtils.mv(repo_path, File.join(project.repository_storage_path, project.disk_path + '.git'))
storage_path = storage_path_for_shard(project.repository_storage)
FileUtils.mv(repo_path, project.repository.path_to_repo)
if bare_repo.wiki_exists?
FileUtils.mv(wiki_path, File.join(project.repository_storage_path, project.disk_path + '.wiki.git'))
FileUtils.mv(wiki_path, File.join(storage_path, project.disk_path + '.wiki.git'))
end
true
......@@ -88,6 +89,10 @@ module Gitlab
false
end
def storage_path_for_shard(shard)
Gitlab.config.repositories.storages[shard].legacy_disk_path
end
def find_or_create_groups
return nil unless group_path.present?
......
......@@ -54,14 +54,14 @@ module Gitlab
end
def exist?
trace_artifact&.exists? || job.chunks.any? || current_path.present? || old_trace.present?
trace_artifact&.exists? || job.trace_chunks.any? || current_path.present? || old_trace.present?
end
def read
stream = Gitlab::Ci::Trace::Stream.new do
if trace_artifact
trace_artifact.open
elsif job.chunks.any?
elsif job.trace_chunks.any?
Gitlab::Ci::Trace::ChunkedIO.new(job)
elsif current_path
File.open(current_path, "rb")
......@@ -100,7 +100,7 @@ module Gitlab
FileUtils.rm(trace_path, force: true)
end
job.chunks.fast_destroy_all
job.trace_chunks.fast_destroy_all
job.erase_old_trace!
end
......@@ -108,7 +108,7 @@ module Gitlab
raise ArchiveError, 'Already archived' if trace_artifact
raise ArchiveError, 'Job is not finished yet' unless job.complete?
if job.chunks.any?
if job.trace_chunks.any?
Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream|
archive_stream!(stream)
stream.delete!
......@@ -130,7 +130,7 @@ module Gitlab
def archive_stream!(stream)
clone_file!(stream, JobArtifactUploader.workhorse_upload_path) do |clone_path|
create_job_trace!(job, clone_path)
create_build_trace!(job, clone_path)
end
end
......@@ -146,7 +146,7 @@ module Gitlab
end
end
def create_job_trace!(job, path)
def create_build_trace!(job, path)
File.open(path) do |stream|
job.create_job_artifacts_trace!(
project: job.project,
......
......@@ -5,18 +5,18 @@ module Gitlab
module Ci
class Trace
class ChunkedIO
CHUNK_SIZE = ::Ci::JobTraceChunk::CHUNK_SIZE
CHUNK_SIZE = ::Ci::BuildTraceChunk::CHUNK_SIZE
FailedToGetChunkError = Class.new(StandardError)
attr_reader :job
attr_reader :build
attr_reader :tell, :size
attr_reader :chunk, :chunk_range
alias_method :pos, :tell
def initialize(job, &block)
@job = job
def initialize(build, &block)
@build = build
@chunks_cache = []
@tell = 0
@size = calculate_size
......@@ -140,10 +140,10 @@ module Gitlab
@size = offset
# remove all next chunks
job_chunks.where('chunk_index > ?', chunk_index).fast_destroy_all
trace_chunks.where('chunk_index > ?', chunk_index).fast_destroy_all
# truncate current chunk
current_chunk.truncate(chunk_offset) if chunk_offset != 0
current_chunk.truncate(chunk_offset)
ensure
invalidate_chunk_cache
end
......@@ -157,7 +157,7 @@ module Gitlab
end
def destroy!
job_chunks.fast_destroy_all
trace_chunks.fast_destroy_all
@tell = @size = 0
ensure
invalidate_chunk_cache
......@@ -206,23 +206,23 @@ module Gitlab
end
def current_chunk
@chunks_cache[chunk_index] ||= job_chunks.find_by(chunk_index: chunk_index)
@chunks_cache[chunk_index] ||= trace_chunks.find_by(chunk_index: chunk_index)
end
def build_chunk
@chunks_cache[chunk_index] = ::Ci::JobTraceChunk.new(job: job, chunk_index: chunk_index)
@chunks_cache[chunk_index] = ::Ci::BuildTraceChunk.new(build: build, chunk_index: chunk_index)
end
def ensure_chunk
current_chunk || build_chunk
end
def job_chunks
::Ci::JobTraceChunk.where(job: job)
def trace_chunks
::Ci::BuildTraceChunk.where(build: build)
end
def calculate_size
job_chunks.order(chunk_index: :desc).first.try(&:end_offset).to_i
trace_chunks.order(chunk_index: :desc).first.try(&:end_offset).to_i
end
end
end
......
......@@ -52,7 +52,6 @@ module Gitlab
stream.seek(0, IO::SEEK_SET)
stream.write(data)
stream.truncate(data.bytesize)
stream.flush()
end
......
......@@ -62,21 +62,20 @@ module Gitlab
end
def move_repositories(namespace, old_full_path, new_full_path)
repo_paths_for_namespace(namespace).each do |repository_storage_path|
repo_shards_for_namespace(namespace).each do |repository_storage|
# Ensure old directory exists before moving it
gitlab_shell.add_namespace(repository_storage_path, old_full_path)
gitlab_shell.add_namespace(repository_storage, old_full_path)
unless gitlab_shell.mv_namespace(repository_storage_path, old_full_path, new_full_path)
message = "Exception moving path #{repository_storage_path} \
from #{old_full_path} to #{new_full_path}"
unless gitlab_shell.mv_namespace(repository_storage, old_full_path, new_full_path)
message = "Exception moving on shard #{repository_storage} from #{old_full_path} to #{new_full_path}"
Rails.logger.error message
end
end
end
def repo_paths_for_namespace(namespace)
def repo_shards_for_namespace(namespace)
projects_for_namespace(namespace).distinct.select(:repository_storage)
.map(&:repository_storage_path)
.map(&:repository_storage)
end
def projects_for_namespace(namespace)
......
......@@ -51,7 +51,7 @@ module Gitlab
end
def move_repository(project, old_path, new_path)
unless gitlab_shell.mv_repository(project.repository_storage_path,
unless gitlab_shell.mv_repository(project.repository_storage,
old_path,
new_path)
Rails.logger.error "Error moving #{old_path} to #{new_path}"
......
......@@ -3,7 +3,7 @@ module Gitlab
extend self
# For every version update, the version history in import_export.md has to be kept up to date.
VERSION = '0.2.2'.freeze
VERSION = '0.2.3'.freeze
FILENAME_LIMIT = 50
def export_path(relative_path:)
......
......@@ -65,11 +65,11 @@ module Gitlab
# Init new repository
#
# storage - project's storage name
# storage - the shard key
# name - project disk path
#
# Ex.
# create_repository("/path/to/storage", "gitlab/gitlab-ci")
# create_repository("default", "gitlab/gitlab-ci")
#
def create_repository(storage, name)
relative_path = name.dup
......@@ -291,13 +291,13 @@ module Gitlab
# Add empty directory for storing repositories
#
# Ex.
# add_namespace("/path/to/storage", "gitlab")
# add_namespace("default", "gitlab")
#
def add_namespace(storage, name)
Gitlab::GitalyClient.migrate(:add_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).add(name)
Gitlab::GitalyClient::NamespaceService.new(storage).add(name)
else
path = full_path(storage, name)
FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name)
......@@ -313,13 +313,13 @@ module Gitlab
# Every repository inside this directory will be removed too
#
# Ex.
# rm_namespace("/path/to/storage", "gitlab")
# rm_namespace("default", "gitlab")
#
def rm_namespace(storage, name)
Gitlab::GitalyClient.migrate(:remove_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).remove(name)
Gitlab::GitalyClient::NamespaceService.new(storage).remove(name)
else
FileUtils.rm_r(full_path(storage, name), force: true)
end
......@@ -338,7 +338,8 @@ module Gitlab
Gitlab::GitalyClient.migrate(:rename_namespace,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).rename(old_name, new_name)
Gitlab::GitalyClient::NamespaceService.new(storage)
.rename(old_name, new_name)
else
break false if exists?(storage, new_name) || !exists?(storage, old_name)
......@@ -374,7 +375,8 @@ module Gitlab
Gitlab::GitalyClient.migrate(:namespace_exists,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |enabled|
if enabled
gitaly_namespace_client(storage).exists?(dir_name)
Gitlab::GitalyClient::NamespaceService.new(storage)
.exists?(dir_name)
else
File.exist?(full_path(storage, dir_name))
end
......@@ -398,7 +400,7 @@ module Gitlab
def full_path(storage, dir_name)
raise ArgumentError.new("Directory name can't be blank") if dir_name.blank?
File.join(storage, dir_name)
File.join(Gitlab.config.repositories.storages[storage].legacy_disk_path, dir_name)
end
def gitlab_shell_projects_path
......@@ -475,14 +477,6 @@ module Gitlab
Bundler.with_original_env { Popen.popen(cmd, nil, vars) }
end
def gitaly_namespace_client(storage_path)
storage, _value = Gitlab.config.repositories.storages.find do |storage, value|
value.legacy_disk_path == storage_path
end
Gitlab::GitalyClient::NamespaceService.new(storage)
end
def git_timeout
Gitlab.config.gitlab_shell.git_timeout
end
......
......@@ -427,10 +427,7 @@ namespace :gitlab do
user = User.find_by(username: username)
if user
repo_dirs = user.authorized_projects.map do |p|
File.join(
p.repository_storage_path,
"#{p.disk_path}.git"
)
p.repository.path_to_repo
end
repo_dirs.each { |repo_dir| check_repo_integrity(repo_dir) }
......
......@@ -10,9 +10,8 @@ namespace :gitlab do
end
scope.find_each do |project|
base = File.join(project.repository_storage_path, project.disk_path)
puts base + '.git'
puts base + '.wiki.git'
puts project.repository.path_to_repo
puts project.wiki.repository.path_to_repo
end
end
end
require Rails.root.join('db/migrate/limits_to_mysql')
require Rails.root.join('db/migrate/markdown_cache_limits_to_mysql')
require Rails.root.join('db/migrate/merge_request_diff_file_limits_to_mysql')
require Rails.root.join('db/migrate/limits_ci_job_trace_chunks_raw_data_for_mysql')
require Rails.root.join('db/migrate/limits_ci_build_trace_chunks_raw_data_for_mysql')
desc "GitLab | Add limits to strings in mysql database"
task add_limits_mysql: :environment do
......@@ -9,5 +9,5 @@ task add_limits_mysql: :environment do
LimitsToMysql.new.up
MarkdownCacheLimitsToMysql.new.up
MergeRequestDiffFileLimitsToMysql.new.up
LimitsCiJobTraceChunksRawDataForMysql.new.up
LimitsCiBuildTraceChunksRawDataForMysql.new.up
end
......@@ -8,8 +8,8 @@ msgid ""
msgstr ""
"Project-Id-Version: gitlab 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2018-04-17 11:44+0200\n"
"PO-Revision-Date: 2018-04-17 11:44+0200\n"
"POT-Creation-Date: 2018-04-24 13:19+0000\n"
"PO-Revision-Date: 2018-04-24 13:19+0000\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
......@@ -3136,6 +3136,9 @@ msgstr ""
msgid "Select Archive Format"
msgstr ""
msgid "Select a namespace to fork the project"
msgstr ""
msgid "Select a timezone"
msgstr ""
......
......@@ -125,7 +125,7 @@ describe ProfilesController, :request_store do
user.reload
expect(response.status).to eq(302)
expect(gitlab_shell.exists?(project.repository_storage_path, "#{new_username}/#{project.path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{new_username}/#{project.path}.git")).to be_truthy
end
end
......@@ -143,7 +143,7 @@ describe ProfilesController, :request_store do
user.reload
expect(response.status).to eq(302)
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_truthy
expect(before_disk_path).to eq(project.disk_path)
end
end
......
FactoryBot.define do
factory :ci_job_trace_chunk, class: Ci::JobTraceChunk do
factory :ci_build_trace_chunk, class: Ci::BuildTraceChunk do
job factory: :ci_build
chunk_index 0
data_store :redis
......
......@@ -147,7 +147,8 @@ FactoryBot.define do
# We delete hooks so that gitlab-shell will not try to authenticate with
# an API that isn't running
FileUtils.rm_r(File.join(project.repository_storage_path, "#{project.disk_path}.git", 'hooks'))
project.gitlab_shell.rm_directory(project.repository_storage,
File.join("#{project.disk_path}.git", 'hooks'))
end
end
......@@ -172,7 +173,8 @@ FactoryBot.define do
after(:create) do |project|
raise "Failed to create repository!" unless project.create_repository
FileUtils.rm_r(File.join(project.repository_storage_path, "#{project.disk_path}.git", 'refs'))
project.gitlab_shell.rm_directory(project.repository_storage,
File.join("#{project.disk_path}.git", 'refs'))
end
end
......
......@@ -9,7 +9,8 @@ unless Object.respond_to?(:require_dependency)
end
end
# Defines Gitlab and Gitlab.config which are at the center of the app
# Defines Settings and Gitlab.config which are at the center of the app
require_relative '../config/settings'
require_relative '../lib/gitlab' unless defined?(Gitlab.config)
require_relative 'support/rspec'
......@@ -274,16 +274,16 @@ describe ProjectsHelper do
end
end
describe '#sanitized_import_error' do
describe '#sanitizerepo_repo_path' do
let(:project) { create(:project, :repository) }
let(:storage_path) { Gitlab.config.repositories.storages.default.legacy_disk_path }
before do
allow(project).to receive(:repository_storage_path).and_return('/base/repo/path')
allow(Settings.shared).to receive(:[]).with('path').and_return('/base/repo/export/path')
end
it 'removes the repo path' do
repo = '/base/repo/path/namespace/test.git'
repo = "#{storage_path}/namespace/test.git"
import_error = "Could not clone #{repo}\n"
expect(sanitize_repo_path(project, import_error)).to eq('Could not clone [REPOS PATH]/namespace/test.git')
......
......@@ -4,6 +4,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
let!(:admin) { create(:admin) }
let!(:base_dir) { Dir.mktmpdir + '/' }
let(:bare_repository) { Gitlab::BareRepositoryImport::Repository.new(base_dir, File.join(base_dir, "#{project_path}.git")) }
let(:gitlab_shell) { Gitlab::Shell.new }
subject(:importer) { described_class.new(admin, bare_repository) }
......@@ -84,12 +85,14 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
importer.create_project_if_needed
project = Project.find_by_full_path(project_path)
repo_path = File.join(project.repository_storage_path, project.disk_path + '.git')
repo_path = "#{project.disk_path}.git"
hook_path = File.join(repo_path, 'hooks')
expect(File).to exist(repo_path)
expect(File.symlink?(hook_path)).to be true
expect(File.readlink(hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path)
expect(gitlab_shell.exists?(project.repository_storage, repo_path)).to be(true)
expect(gitlab_shell.exists?(project.repository_storage, hook_path)).to be(true)
full_hook_path = File.join(project.repository.path_to_repo, 'hooks')
expect(File.readlink(full_hook_path)).to eq(Gitlab.config.gitlab_shell.hooks_path)
end
context 'hashed storage enabled' do
......@@ -144,8 +147,8 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
project = Project.find_by_full_path("#{admin.full_path}/#{project_path}")
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.git'))
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.wiki.git'))
expect(gitlab_shell.exists?(project.repository_storage, project.disk_path + '.git')).to be(true)
expect(gitlab_shell.exists?(project.repository_storage, project.disk_path + '.wiki.git')).to be(true)
end
it 'moves an existing project to the correct path' do
......@@ -155,7 +158,9 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
project = build(:project, :legacy_storage, :repository)
original_commit_count = project.repository.commit_count
bare_repo = Gitlab::BareRepositoryImport::Repository.new(project.repository_storage_path, project.repository.path)
legacy_path = Gitlab.config.repositories.storages[project.repository_storage].legacy_disk_path
bare_repo = Gitlab::BareRepositoryImport::Repository.new(legacy_path, project.repository.path)
gitlab_importer = described_class.new(admin, bare_repo)
expect(gitlab_importer).to receive(:create_project).and_call_original
......@@ -183,7 +188,7 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
project = Project.find_by_full_path(project_path)
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.wiki.git'))
expect(gitlab_shell.exists?(project.repository_storage, project.disk_path + '.wiki.git')).to be(true)
end
end
......
......@@ -67,7 +67,7 @@ describe ::Gitlab::BareRepositoryImport::Repository do
end
after do
gitlab_shell.remove_repository(root_path, hashed_path)
gitlab_shell.remove_repository(repository_storage, hashed_path)
end
subject { described_class.new(root_path, repo_path) }
......
......@@ -3,8 +3,8 @@ require 'spec_helper'
describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
include ChunkedIOHelpers
set(:job) { create(:ci_build, :running) }
let(:chunked_io) { described_class.new(job) }
set(:build) { create(:ci_build, :running) }
let(:chunked_io) { described_class.new(build) }
before do
stub_feature_flags(ci_enable_live_trace: true)
......@@ -13,7 +13,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context "#initialize" do
context 'when a chunk exists' do
before do
job.trace.set('ABC')
build.trace.set('ABC')
end
it { expect(chunked_io.size).to eq(3) }
......@@ -22,7 +22,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when two chunks exist' do
before do
stub_buffer_size(4)
job.trace.set('ABCDEF')
build.trace.set('ABCDEF')
end
it { expect(chunked_io.size).to eq(6) }
......@@ -37,7 +37,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
subject { chunked_io.seek(pos, where) }
before do
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
context 'when moves pos to end of the file' do
......@@ -68,7 +68,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
subject { chunked_io.eof? }
before do
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
context 'when current pos is at end of the file' do
......@@ -94,7 +94,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'yields lines' do
......@@ -106,7 +106,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'calls get_chunk only once' do
......@@ -127,7 +127,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it { is_expected.to eq(sample_trace_raw) }
......@@ -136,7 +136,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it { is_expected.to eq(sample_trace_raw) }
......@@ -149,7 +149,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'reads a trace' do
......@@ -160,7 +160,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'reads a trace' do
......@@ -175,7 +175,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'reads a trace' do
......@@ -186,7 +186,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'reads a trace' do
......@@ -201,7 +201,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'reads a trace' do
......@@ -212,7 +212,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'reads a trace' do
......@@ -238,7 +238,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it_behaves_like 'all line matching'
......@@ -247,7 +247,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it_behaves_like 'all line matching'
......@@ -256,7 +256,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when pos is at middle of the file' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
chunked_io.seek(chunked_io.size / 2)
string_io.seek(string_io.size / 2)
......@@ -316,7 +316,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(exist_data)
build.trace.set(exist_data)
end
it_behaves_like 'appends a trace'
......@@ -325,7 +325,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(exist_data)
build.trace.set(exist_data)
end
it_behaves_like 'appends a trace'
......@@ -349,7 +349,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is smaller than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize / 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it_behaves_like 'truncates a trace'
......@@ -358,7 +358,7 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
context 'when buffer size is larger than file size' do
before do
stub_buffer_size(sample_trace_raw.bytesize * 2)
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it_behaves_like 'truncates a trace'
......@@ -370,14 +370,14 @@ describe Gitlab::Ci::Trace::ChunkedIO, :clean_gitlab_redis_cache do
subject { chunked_io.destroy! }
before do
job.trace.set(sample_trace_raw)
build.trace.set(sample_trace_raw)
end
it 'deletes' do
expect { subject }.to change { chunked_io.size }
.from(sample_trace_raw.bytesize).to(0)
expect(Ci::JobTraceChunk.where(job: job).count).to eq(0)
expect(Ci::BuildTraceChunk.where(build: build).count).to eq(0)
end
end
end
require 'spec_helper'
describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
set(:job) { create(:ci_build, :running) }
set(:build) { create(:ci_build, :running) }
before do
stub_feature_flags(ci_enable_live_trace: true)
......@@ -83,7 +83,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do
let(:stream) do
described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io|
Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write((1..8).to_a.join("\n"))
chunked_io.seek(0, IO::SEEK_SET)
end
......@@ -137,7 +137,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do
let(:stream) do
described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io|
Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write('12345678')
chunked_io.seek(0, IO::SEEK_SET)
end
......@@ -175,7 +175,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do
let(:stream) do
described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io|
Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write('12345678')
chunked_io.seek(0, IO::SEEK_SET)
end
......@@ -234,7 +234,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do
let(:stream) do
described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io|
Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write(File.binread(path))
chunked_io.seek(0, IO::SEEK_SET)
end
......@@ -283,7 +283,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do
let(:stream) do
described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io|
Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write("1234")
chunked_io.seek(0, IO::SEEK_SET)
end
......@@ -318,7 +318,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do
let(:stream) do
described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io|
Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write("12\n34\n56")
chunked_io.seek(0, IO::SEEK_SET)
end
......@@ -473,7 +473,7 @@ describe Gitlab::Ci::Trace::Stream, :clean_gitlab_redis_cache do
context 'when stream is ChunkedIO' do
let(:stream) do
described_class.new do
Gitlab::Ci::Trace::ChunkedIO.new(job).tap do |chunked_io|
Gitlab::Ci::Trace::ChunkedIO.new(build).tap do |chunked_io|
chunked_io.write(data)
chunked_io.seek(0, IO::SEEK_SET)
end
......
......@@ -436,7 +436,7 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do
it "can be erased" do
trace.erase!
expect(trace.exist?).to be(false)
expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
end
it "returns live trace data" do
......@@ -512,7 +512,7 @@ describe Gitlab::Ci::Trace, :clean_gitlab_redis_cache do
expect(build.trace.exist?).to be_truthy
expect(build.job_artifacts_trace.file.exists?).to be_truthy
expect(build.job_artifacts_trace.file.filename).to eq('job.log')
expect(Ci::JobTraceChunk.where(job: build)).not_to be_exist
expect(Ci::BuildTraceChunk.where(build: build)).not_to be_exist
expect(src_checksum)
.to eq(Digest::SHA256.file(build.job_artifacts_trace.file.path).hexdigest)
expect(build.job_artifacts_trace.file_sha256).to eq(src_checksum)
......
......@@ -689,7 +689,7 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
after do
Gitlab::Shell.new.remove_repository(storage_path, 'my_project')
Gitlab::Shell.new.remove_repository('default', 'my_project')
end
shared_examples 'repository mirror fecthing' do
......
......@@ -24,8 +24,8 @@ describe Gitlab::ImportExport::WikiRestorer do
after do
FileUtils.rm_rf(export_path)
Gitlab::Shell.new.remove_repository(project_with_wiki.wiki.repository_storage_path, project_with_wiki.wiki.disk_path)
Gitlab::Shell.new.remove_repository(project.wiki.repository_storage_path, project.wiki.disk_path)
Gitlab::Shell.new.remove_repository(project_with_wiki.wiki.repository_storage, project_with_wiki.wiki.disk_path)
Gitlab::Shell.new.remove_repository(project.wiki.repository_storage, project.wiki.disk_path)
end
it 'restores the wiki repo successfully' do
......
......@@ -447,18 +447,18 @@ describe Gitlab::Shell do
let(:disk_path) { "#{project.disk_path}.git" }
it 'returns true when the command succeeds' do
expect(gitlab_shell.exists?(project.repository_storage_path, disk_path)).to be(true)
expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(true)
expect(gitlab_shell.remove_repository(project.repository_storage_path, project.disk_path)).to be(true)
expect(gitlab_shell.remove_repository(project.repository_storage, project.disk_path)).to be(true)
expect(gitlab_shell.exists?(project.repository_storage_path, disk_path)).to be(false)
expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(false)
end
it 'keeps the namespace directory' do
gitlab_shell.remove_repository(project.repository_storage_path, project.disk_path)
gitlab_shell.remove_repository(project.repository_storage, project.disk_path)
expect(gitlab_shell.exists?(project.repository_storage_path, disk_path)).to be(false)
expect(gitlab_shell.exists?(project.repository_storage_path, project.disk_path.gsub(project.name, ''))).to be(true)
expect(gitlab_shell.exists?(project.repository_storage, disk_path)).to be(false)
expect(gitlab_shell.exists?(project.repository_storage, project.disk_path.gsub(project.name, ''))).to be(true)
end
end
......@@ -469,18 +469,18 @@ describe Gitlab::Shell do
old_path = project2.disk_path
new_path = "project/new_path"
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{old_path}.git")).to be(true)
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{new_path}.git")).to be(false)
expect(gitlab_shell.exists?(project2.repository_storage, "#{old_path}.git")).to be(true)
expect(gitlab_shell.exists?(project2.repository_storage, "#{new_path}.git")).to be(false)
expect(gitlab_shell.mv_repository(project2.repository_storage_path, old_path, new_path)).to be_truthy
expect(gitlab_shell.mv_repository(project2.repository_storage, old_path, new_path)).to be_truthy
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{old_path}.git")).to be(false)
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{new_path}.git")).to be(true)
expect(gitlab_shell.exists?(project2.repository_storage, "#{old_path}.git")).to be(false)
expect(gitlab_shell.exists?(project2.repository_storage, "#{new_path}.git")).to be(true)
end
it 'returns false when the command fails' do
expect(gitlab_shell.mv_repository(project2.repository_storage_path, project2.disk_path, '')).to be_falsy
expect(gitlab_shell.exists?(project2.repository_storage_path, "#{project2.disk_path}.git")).to be(true)
expect(gitlab_shell.mv_repository(project2.repository_storage, project2.disk_path, '')).to be_falsy
expect(gitlab_shell.exists?(project2.repository_storage, "#{project2.disk_path}.git")).to be(true)
end
end
......@@ -679,48 +679,48 @@ describe Gitlab::Shell do
describe 'namespace actions' do
subject { described_class.new }
let(:storage_path) { Gitlab.config.repositories.storages.default.legacy_disk_path }
let(:storage) { Gitlab.config.repositories.storages.keys.first }
describe '#add_namespace' do
it 'creates a namespace' do
subject.add_namespace(storage_path, "mepmep")
subject.add_namespace(storage, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true)
expect(subject.exists?(storage, "mepmep")).to be(true)
end
end
describe '#exists?' do
context 'when the namespace does not exist' do
it 'returns false' do
expect(subject.exists?(storage_path, "non-existing")).to be(false)
expect(subject.exists?(storage, "non-existing")).to be(false)
end
end
context 'when the namespace exists' do
it 'returns true' do
subject.add_namespace(storage_path, "mepmep")
subject.add_namespace(storage, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true)
expect(subject.exists?(storage, "mepmep")).to be(true)
end
end
end
describe '#remove' do
it 'removes the namespace' do
subject.add_namespace(storage_path, "mepmep")
subject.rm_namespace(storage_path, "mepmep")
subject.add_namespace(storage, "mepmep")
subject.rm_namespace(storage, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(false)
expect(subject.exists?(storage, "mepmep")).to be(false)
end
end
describe '#mv_namespace' do
it 'renames the namespace' do
subject.add_namespace(storage_path, "mepmep")
subject.mv_namespace(storage_path, "mepmep", "2mep")
subject.add_namespace(storage, "mepmep")
subject.mv_namespace(storage, "mepmep", "2mep")
expect(subject.exists?(storage_path, "mepmep")).to be(false)
expect(subject.exists?(storage_path, "2mep")).to be(true)
expect(subject.exists?(storage, "mepmep")).to be(false)
expect(subject.exists?(storage, "2mep")).to be(true)
end
end
end
......
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20180425131009_assure_commits_count_for_merge_request_diff.rb')
describe AssureCommitsCountForMergeRequestDiff, :migration, :sidekiq, :redis do
let(:migration) { spy('migration') }
before do
allow(Gitlab::BackgroundMigration::AddMergeRequestDiffCommitsCount)
.to receive(:new).and_return(migration)
end
context 'when there are still unmigrated commit_counts afterwards' do
let(:namespaces) { table('namespaces') }
let(:projects) { table('projects') }
let(:merge_requests) { table('merge_requests') }
let(:diffs) { table('merge_request_diffs') }
before do
namespace = namespaces.create(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
merge_request = merge_requests.create!(source_branch: 'x', target_branch: 'y', target_project_id: project.id)
diffs.create!(commits_count: nil, merge_request_id: merge_request.id)
diffs.create!(commits_count: nil, merge_request_id: merge_request.id)
end
it 'migrates commit_counts sequentially in batches' do
migrate!
expect(migration).to have_received(:perform).once
end
end
end
require 'spec_helper'
describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
set(:job) { create(:ci_build, :running) }
describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
set(:build) { create(:ci_build, :running) }
let(:chunk_index) { 0 }
let(:data_store) { :redis }
let(:raw_data) { nil }
let(:job_trace_chunk) do
described_class.new(job: job, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data)
let(:build_trace_chunk) do
described_class.new(build: build, chunk_index: chunk_index, data_store: data_store, raw_data: raw_data)
end
describe 'CHUNK_SIZE' do
......@@ -17,13 +17,13 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end
describe '#data' do
subject { job_trace_chunk.data }
subject { build_trace_chunk.data }
context 'when data_store is redis' do
let(:data_store) { :redis }
before do
job_trace_chunk.send(:redis_set_data, 'Sample data in redis')
build_trace_chunk.send(:redis_set_data, 'Sample data in redis')
end
it { is_expected.to eq('Sample data in redis') }
......@@ -38,7 +38,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is others' do
before do
job_trace_chunk.send(:write_attribute, :data_store, -1)
build_trace_chunk.send(:write_attribute, :data_store, -1)
end
it { expect { subject }.to raise_error('Unsupported data store') }
......@@ -46,7 +46,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end
describe '#set_data' do
subject { job_trace_chunk.set_data(value) }
subject { build_trace_chunk.set_data(value) }
let(:value) { 'Sample data' }
......@@ -60,11 +60,11 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :redis }
it do
expect(job_trace_chunk.send(:redis_data)).to be_nil
expect(build_trace_chunk.send(:redis_data)).to be_nil
subject
expect(job_trace_chunk.send(:redis_data)).to eq(value)
expect(build_trace_chunk.send(:redis_data)).to eq(value)
end
context 'when fullfilled chunk size' do
......@@ -82,26 +82,26 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data_store) { :db }
it 'sets data' do
expect(job_trace_chunk.raw_data).to be_nil
expect(build_trace_chunk.raw_data).to be_nil
subject
expect(job_trace_chunk.raw_data).to eq(value)
expect(job_trace_chunk.persisted?).to be_truthy
expect(build_trace_chunk.raw_data).to eq(value)
expect(build_trace_chunk.persisted?).to be_truthy
end
context 'when raw_data is not changed' do
it 'does not execute UPDATE' do
expect(job_trace_chunk.raw_data).to be_nil
job_trace_chunk.save!
expect(build_trace_chunk.raw_data).to be_nil
build_trace_chunk.save!
# First set
expect(ActiveRecord::QueryRecorder.new { subject }.count).to be > 0
expect(job_trace_chunk.raw_data).to eq(value)
expect(job_trace_chunk.persisted?).to be_truthy
expect(build_trace_chunk.raw_data).to eq(value)
expect(build_trace_chunk.persisted?).to be_truthy
# Second set
job_trace_chunk.reload
build_trace_chunk.reload
expect(ActiveRecord::QueryRecorder.new { subject }.count).to be(0)
end
end
......@@ -117,7 +117,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
context 'when data_store is others' do
before do
job_trace_chunk.send(:write_attribute, :data_store, -1)
build_trace_chunk.send(:write_attribute, :data_store, -1)
end
it { expect { subject }.to raise_error('Unsupported data store') }
......@@ -125,7 +125,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end
describe '#truncate' do
subject { job_trace_chunk.truncate(offset) }
subject { build_trace_chunk.truncate(offset) }
shared_examples_for 'truncates' do
context 'when offset is negative' do
......@@ -146,7 +146,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
it 'truncates' do
subject
expect(job_trace_chunk.data).to eq(data.byteslice(0, offset))
expect(build_trace_chunk.data).to eq(data.byteslice(0, offset))
end
end
end
......@@ -156,7 +156,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
job_trace_chunk.send(:redis_set_data, data)
build_trace_chunk.send(:redis_set_data, data)
end
it_behaves_like 'truncates'
......@@ -172,7 +172,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end
describe '#append' do
subject { job_trace_chunk.append(new_data, offset) }
subject { build_trace_chunk.append(new_data, offset) }
let(:new_data) { 'Sample new data' }
let(:offset) { 0 }
......@@ -203,7 +203,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
it 'appends' do
subject
expect(job_trace_chunk.data).to eq(total_data)
expect(build_trace_chunk.data).to eq(total_data)
end
end
......@@ -213,7 +213,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
it 'appends' do
subject
expect(job_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
expect(build_trace_chunk.data).to eq(data.byteslice(0, offset) + new_data)
end
end
end
......@@ -223,7 +223,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
job_trace_chunk.send(:redis_set_data, data)
build_trace_chunk.send(:redis_set_data, data)
end
it_behaves_like 'appends'
......@@ -239,7 +239,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end
describe '#size' do
subject { job_trace_chunk.size }
subject { build_trace_chunk.size }
context 'when data_store is redis' do
let(:data_store) { :redis }
......@@ -248,7 +248,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
job_trace_chunk.send(:redis_set_data, data)
build_trace_chunk.send(:redis_set_data, data)
end
it { is_expected.to eq(data.bytesize) }
......@@ -276,7 +276,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end
describe '#use_database!' do
subject { job_trace_chunk.use_database! }
subject { build_trace_chunk.use_database! }
context 'when data_store is redis' do
let(:data_store) { :redis }
......@@ -285,19 +285,19 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
let(:data) { 'Sample data in redis' }
before do
job_trace_chunk.send(:redis_set_data, data)
build_trace_chunk.send(:redis_set_data, data)
end
it 'stashes the data' do
expect(job_trace_chunk.data_store).to eq('redis')
expect(job_trace_chunk.send(:redis_data)).to eq(data)
expect(job_trace_chunk.raw_data).to be_nil
expect(build_trace_chunk.data_store).to eq('redis')
expect(build_trace_chunk.send(:redis_data)).to eq(data)
expect(build_trace_chunk.raw_data).to be_nil
subject
expect(job_trace_chunk.data_store).to eq('db')
expect(job_trace_chunk.send(:redis_data)).to be_nil
expect(job_trace_chunk.raw_data).to eq(data)
expect(build_trace_chunk.data_store).to eq('db')
expect(build_trace_chunk.send(:redis_data)).to be_nil
expect(build_trace_chunk.raw_data).to eq(data)
end
end
......@@ -320,11 +320,11 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
describe 'ExclusiveLock' do
before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain) { nil }
stub_const('Ci::JobTraceChunk::LOCK_RETRY', 1)
stub_const('Ci::BuildTraceChunk::LOCK_RETRY', 1)
end
it 'raise an error' do
expect { job_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock')
expect { build_trace_chunk.append('ABC', 0) }.to raise_error('Failed to obtain write lock')
end
end
......@@ -338,7 +338,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
create(:ci_build, :running, :trace_live, pipeline: pipeline, project: project)
end
shared_examples_for 'deletes all job_trace_chunk and data in redis' do
shared_examples_for 'deletes all build_trace_chunk and data in redis' do
it do
project.builds.each do |build|
Gitlab::Redis::SharedState.with do |redis|
......@@ -364,20 +364,20 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
end
end
context 'when job_trace_chunk is destroyed' do
context 'when build_trace_chunk is destroyed' do
let(:subject) do
project.builds.each { |build| build.chunks.destroy_all }
end
it_behaves_like 'deletes all job_trace_chunk and data in redis'
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
context 'when job is destroyed' do
context 'when build is destroyed' do
let(:subject) do
project.builds.destroy_all
end
it_behaves_like 'deletes all job_trace_chunk and data in redis'
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
context 'when project is destroyed' do
......@@ -385,7 +385,7 @@ describe Ci::JobTraceChunk, :clean_gitlab_redis_shared_state do
project.destroy!
end
it_behaves_like 'deletes all job_trace_chunk and data in redis'
it_behaves_like 'deletes all build_trace_chunk and data in redis'
end
end
end
......@@ -5,6 +5,7 @@ describe Namespace do
let!(:namespace) { create(:namespace) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:repository_storage) { 'default' }
describe 'associations' do
it { is_expected.to have_many :projects }
......@@ -201,7 +202,7 @@ describe Namespace do
it "moves dir if path changed" do
namespace.update_attributes(path: namespace.full_path + '_new')
expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{namespace.path}/#{project.path}.git")).to be_truthy
end
context 'with subgroups', :nested_groups do
......@@ -281,7 +282,7 @@ describe Namespace do
namespace.update_attributes(path: namespace.full_path + '_new')
expect(before_disk_path).to eq(project.disk_path)
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_truthy
end
end
......@@ -322,7 +323,7 @@ describe Namespace do
end
it 'schedules the namespace for deletion' do
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage, deleted_path)
namespace.destroy
end
......@@ -344,7 +345,7 @@ describe Namespace do
end
it 'schedules the namespace for deletion' do
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage_path, deleted_path)
expect(GitlabShellWorker).to receive(:perform_in).with(5.minutes, :rm_namespace, repository_storage, deleted_path)
child.destroy
end
......
......@@ -449,14 +449,6 @@ describe Project do
end
end
describe '#repository_storage_path' do
let(:project) { create(:project) }
it 'returns the repository storage path' do
expect(Dir.exist?(project.repository_storage_path)).to be(true)
end
end
it 'returns valid url to repo' do
project = described_class.new(path: 'somewhere')
expect(project.url_to_repo).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + 'somewhere.git')
......@@ -1108,7 +1100,7 @@ describe Project do
end
context 'repository storage by default' do
let(:project) { create(:project) }
let(:project) { build(:project) }
before do
storages = {
......@@ -1461,7 +1453,7 @@ describe Project do
.and_return(false)
allow(shell).to receive(:create_repository)
.with(project.repository_storage_path, project.disk_path)
.with(project.repository_storage, project.disk_path)
.and_return(true)
expect(project).to receive(:create_repository).with(force: true)
......@@ -2636,7 +2628,7 @@ describe Project do
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, project.base_dir)
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage, project.base_dir)
project.ensure_storage_path_exists
end
......@@ -2675,12 +2667,12 @@ describe Project do
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.with(project.repository_storage, "#{project.namespace.full_path}/foo", "#{project.full_path}")
.and_return(true)
expect(gitlab_shell).to receive(:mv_repository)
.ordered
.with(project.repository_storage_path, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.with(project.repository_storage, "#{project.namespace.full_path}/foo.wiki", "#{project.full_path}.wiki")
.and_return(true)
expect_any_instance_of(SystemHooksService)
......@@ -2829,7 +2821,7 @@ describe Project do
it 'delegates to gitlab_shell to ensure namespace is created' do
allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, hashed_prefix)
expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage, hashed_prefix)
project.ensure_storage_path_exists
end
......
......@@ -11,7 +11,7 @@ describe ProjectWiki do
subject { project_wiki }
it { is_expected.to delegate_method(:empty?).to :pages }
it { is_expected.to delegate_method(:repository_storage_path).to :project }
it { is_expected.to delegate_method(:repository_storage).to :project }
it { is_expected.to delegate_method(:hashed_storage?).to :project }
describe "#full_path" do
......
......@@ -53,8 +53,8 @@ describe Groups::DestroyService do
end
it 'verifies that paths have been deleted' do
expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, group.path)).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
end
end
end
......@@ -71,13 +71,13 @@ describe Groups::DestroyService do
after do
# Clean up stale directories
gitlab_shell.rm_namespace(project.repository_storage_path, group.path)
gitlab_shell.rm_namespace(project.repository_storage_path, remove_path)
gitlab_shell.rm_namespace(project.repository_storage, group.path)
gitlab_shell.rm_namespace(project.repository_storage, remove_path)
end
it 'verifies original paths and projects still exist' do
expect(gitlab_shell.exists?(project.repository_storage_path, group.path)).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, group.path)).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
expect(Project.unscoped.count).to eq(1)
expect(Group.unscoped.count).to eq(2)
end
......@@ -144,7 +144,7 @@ describe Groups::DestroyService do
let!(:project) { create(:project, :legacy_storage, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end
end
......@@ -152,7 +152,7 @@ describe Groups::DestroyService do
let!(:project) { create(:project, :empty_repo, namespace: group) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end
end
end
......
......@@ -171,7 +171,6 @@ describe Projects::CreateService, '#execute' do
context 'when another repository already exists on disk' do
let(:repository_storage) { 'default' }
let(:repository_storage_path) { Gitlab.config.repositories.storages[repository_storage].legacy_disk_path }
let(:opts) do
{
......@@ -186,7 +185,7 @@ describe Projects::CreateService, '#execute' do
end
after do
gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
gitlab_shell.remove_repository(repository_storage, "#{user.namespace.full_path}/existing")
end
it 'does not allow to create a project when path matches existing repository on disk' do
......@@ -222,7 +221,7 @@ describe Projects::CreateService, '#execute' do
end
after do
gitlab_shell.remove_repository(repository_storage_path, hashed_path)
gitlab_shell.remove_repository(repository_storage, hashed_path)
end
it 'does not allow to create a project when path matches existing repository on disk' do
......
......@@ -18,8 +18,8 @@ describe Projects::DestroyService do
it 'deletes the project' do
expect(Project.unscoped.all).not_to include(project)
expect(project.gitlab_shell.exists?(project.repository_storage_path, path + '.git')).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path + '.git')).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage, path + '.git')).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage, remove_path + '.git')).to be_falsey
end
end
......@@ -252,21 +252,21 @@ describe Projects::DestroyService do
let(:path) { project.disk_path + '.git' }
before do
expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_truthy
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_truthy
expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
# Dont run sidekiq to check if renamed repository exists
Sidekiq::Testing.fake! { destroy_project(project, user, {}) }
expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_truthy
expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_truthy
end
it 'restores the repositories' do
Sidekiq::Testing.fake! { described_class.new(project, user).attempt_repositories_rollback }
expect(project.gitlab_shell.exists?(project.repository_storage_path, path)).to be_truthy
expect(project.gitlab_shell.exists?(project.repository_storage_path, remove_path)).to be_falsey
expect(project.gitlab_shell.exists?(project.repository_storage, path)).to be_truthy
expect(project.gitlab_shell.exists?(project.repository_storage, remove_path)).to be_falsey
end
end
......
......@@ -112,7 +112,7 @@ describe Projects::ForkService do
end
after do
gitlab_shell.remove_repository(repository_storage_path, "#{@to_user.namespace.full_path}/#{@from_project.path}")
gitlab_shell.remove_repository(repository_storage, "#{@to_user.namespace.full_path}/#{@from_project.path}")
end
it 'does not allow creation' do
......
......@@ -16,8 +16,8 @@ describe Projects::HashedStorage::MigrateRepositoryService do
it 'renames project and wiki repositories' do
service.execute
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.git")).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.wiki.git")).to be_truthy
end
it 'updates project to be hashed and not read-only' do
......@@ -52,8 +52,8 @@ describe Projects::HashedStorage::MigrateRepositoryService do
service.execute
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage_path, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, "#{hashed_storage.disk_path}.wiki.git")).to be_falsey
expect(project.repository_read_only?).to be_falsey
end
......@@ -63,11 +63,11 @@ describe Projects::HashedStorage::MigrateRepositoryService do
before do
hashed_storage.ensure_storage_path_exists
gitlab_shell.mv_repository(project.repository_storage_path, from_name, to_name)
gitlab_shell.mv_repository(project.repository_storage, from_name, to_name)
end
it 'does not try to move nil repository over hashed' do
expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name)
expect(gitlab_shell).not_to receive(:mv_repository).with(project.repository_storage, from_name, to_name)
expect_move_repository("#{project.disk_path}.wiki", "#{hashed_storage.disk_path}.wiki")
service.execute
......@@ -76,7 +76,7 @@ describe Projects::HashedStorage::MigrateRepositoryService do
end
def expect_move_repository(from_name, to_name)
expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage_path, from_name, to_name).and_call_original
expect(gitlab_shell).to receive(:mv_repository).with(project.repository_storage, from_name, to_name).and_call_original
end
end
end
......@@ -84,7 +84,7 @@ describe Projects::TransferService do
end
def project_path(project)
File.join(project.repository_storage_path, "#{project.disk_path}.git")
project.repository.path_to_repo
end
def current_path
......@@ -94,7 +94,7 @@ describe Projects::TransferService do
it 'rolls back repo location' do
attempt_project_transfer
expect(Dir.exist?(original_path)).to be_truthy
expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be(true)
expect(original_path).to eq current_path
end
......@@ -165,7 +165,7 @@ describe Projects::TransferService do
end
after do
gitlab_shell.remove_repository(repository_storage_path, "#{group.full_path}/#{project.path}")
gitlab_shell.remove_repository(repository_storage, "#{group.full_path}/#{project.path}")
end
it { expect(@result).to eq false }
......
......@@ -200,7 +200,7 @@ describe Projects::UpdateService do
end
after do
gitlab_shell.remove_repository(repository_storage_path, "#{user.namespace.full_path}/existing")
gitlab_shell.remove_repository(repository_storage, "#{user.namespace.full_path}/existing")
end
it 'does not allow renaming when new path matches existing repository on disk' do
......
......@@ -176,7 +176,7 @@ describe Users::DestroyService do
let!(:project) { create(:project, :empty_repo, :legacy_storage, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end
end
......@@ -184,7 +184,7 @@ describe Users::DestroyService do
let!(:project) { create(:project, :empty_repo, namespace: user.namespace) }
it 'removes repository' do
expect(gitlab_shell.exists?(project.repository_storage_path, "#{project.disk_path}.git")).to be_falsey
expect(gitlab_shell.exists?(project.repository_storage, "#{project.disk_path}.git")).to be_falsey
end
end
end
......
......@@ -113,10 +113,10 @@ RSpec.configure do |config|
m.call(*args)
shard_name, repository_relative_path = args
shard_path = Gitlab.config.repositories.storages.fetch(shard_name).legacy_disk_path
# We can't leave the hooks in place after a fork, as those would fail in tests
# The "internal" API is not available
FileUtils.rm_rf(File.join(shard_path, repository_relative_path, 'hooks'))
Gitlab::Shell.new.rm_directory(shard_name,
File.join(repository_relative_path, 'hooks'))
end
# Enable all features by default for testing
......
......@@ -5,7 +5,7 @@ module ChunkedIOHelpers
end
def stub_buffer_size(size)
stub_const('Ci::JobTraceChunk::CHUNK_SIZE', size)
stub_const('Ci::BuildTraceChunk::CHUNK_SIZE', size)
stub_const('Gitlab::Ci::Trace::ChunkedIO::CHUNK_SIZE', size)
end
......
......@@ -31,7 +31,7 @@ module JavaScriptFixturesHelpers
end
def remove_repository(project)
Gitlab::Shell.new.remove_repository(project.repository_storage_path, project.disk_path)
Gitlab::Shell.new.remove_repository(project.repository_storage, project.disk_path)
end
private
......
......@@ -218,7 +218,8 @@ module TestEnv
end
def copy_repo(project, bare_repo:, refs:)
target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.disk_path}.git")
target_repo_path = File.expand_path(repos_path + "/#{project.disk_path}.git")
FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path
......@@ -226,7 +227,7 @@ module TestEnv
end
def repos_path
Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
@repos_path ||= Gitlab.config.repositories.storages[REPOS_STORAGE].legacy_disk_path
end
def backup_path
......
......@@ -195,15 +195,12 @@ describe 'gitlab:app namespace rake task' do
end
context 'multiple repository storages' do
let(:storage_default) do
Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/default_storage'))
end
let(:test_second_storage) do
Gitlab::GitalyClient::StorageSettings.new(@default_storage_hash.merge('path' => 'tmp/tests/custom_storage'))
end
let(:storages) do
{
'default' => storage_default,
'default' => Gitlab.config.repositories.storages.default,
'test_second_storage' => test_second_storage
}
end
......@@ -215,8 +212,7 @@ describe 'gitlab:app namespace rake task' do
before do
# We only need a backup of the repositories for this test
stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry')
FileUtils.mkdir(Settings.absolute('tmp/tests/default_storage'))
FileUtils.mkdir(Settings.absolute('tmp/tests/custom_storage'))
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
# Avoid asking gitaly about the root ref (which will fail beacuse of the
......@@ -225,14 +221,23 @@ describe 'gitlab:app namespace rake task' do
end
after do
FileUtils.rm_rf(Settings.absolute('tmp/tests/default_storage'))
FileUtils.rm_rf(Settings.absolute('tmp/tests/custom_storage'))
end
it 'includes repositories in all repository storages' do
project_a = create(:project, :repository, repository_storage: 'default')
project_a = create(:project, :repository)
project_b = create(:project, :repository, repository_storage: 'test_second_storage')
b_storage_dir = File.join(Settings.absolute('tmp/tests/custom_storage'), File.dirname(project_b.disk_path))
FileUtils.mkdir_p(b_storage_dir)
# Even when overriding the storage, we have to move it there, so it exists
FileUtils.mv(
File.join(Settings.absolute(storages['default'].legacy_disk_path), project_b.repository.disk_path + '.git'),
Rails.root.join(storages['test_second_storage'].legacy_disk_path, project_b.repository.disk_path + '.git')
)
expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
tar_contents, exit_status = Gitlab::Popen.popen(
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment