Commit 28a98db7 authored by Nick Thomas's avatar Nick Thomas

Import gitlab_projects.rb from gitlab-shell

By importing this Ruby code into gitlab-rails (and gitaly-ruby), we avoid
200ms of startup time for each gitlab_projects subprocess we are eliminating.

By not having a gitlab_projects subprocess between gitlab-rails / sidekiq and
any git subprocesses (e.g. for fork_project, fetch_remote, etc, calls), we can
also manage these git processes more cleanly, and avoid sending SIGKILL to them
parent 4ea931c2
---
title: Import some code and functionality from gitlab-shell to improve subprocess
handling
merge_request:
author:
type: other
module Gitlab
module Git
class GitlabProjects
include Gitlab::Git::Popen
# Absolute path to directory where repositories are stored.
# Example: /home/git/repositories
attr_reader :shard_path
# Relative path is a directory name for repository with .git at the end.
# Example: gitlab-org/gitlab-test.git
attr_reader :repository_relative_path
# Absolute path to the repository.
# Example: /home/git/repositorities/gitlab-org/gitlab-test.git
attr_reader :repository_absolute_path
# This is the path at which the gitlab-shell hooks directory can be found.
# It's essential for integration between git and GitLab proper. All new
# repositories should have their hooks directory symlinked here.
attr_reader :global_hooks_path
attr_reader :logger
def initialize(shard_path, repository_relative_path, global_hooks_path:, logger:)
@shard_path = shard_path
@repository_relative_path = repository_relative_path
@logger = logger
@global_hooks_path = global_hooks_path
@repository_absolute_path = File.join(shard_path, repository_relative_path)
@output = StringIO.new
end
def output
io = @output.dup
io.rewind
io.read
end
def rm_project
logger.info "Removing repository <#{repository_absolute_path}>."
FileUtils.rm_rf(repository_absolute_path)
end
# Move repository from one directory to another
#
# Example: gitlab/gitlab-ci.git -> randx/six.git
#
# Won't work if target namespace directory does not exist
#
def mv_project(new_path)
new_absolute_path = File.join(shard_path, new_path)
# verify that the source repo exists
unless File.exist?(repository_absolute_path)
logger.error "mv-project failed: source path <#{repository_absolute_path}> does not exist."
return false
end
# ...and that the target repo does not exist
if File.exist?(new_absolute_path)
logger.error "mv-project failed: destination path <#{new_absolute_path}> already exists."
return false
end
logger.info "Moving repository from <#{repository_absolute_path}> to <#{new_absolute_path}>."
FileUtils.mv(repository_absolute_path, new_absolute_path)
end
# Import project via git clone --bare
# URL must be publicly cloneable
def import_project(source, timeout)
# Skip import if repo already exists
return false if File.exist?(repository_absolute_path)
masked_source = mask_password_in_url(source)
logger.info "Importing project from <#{masked_source}> to <#{repository_absolute_path}>."
cmd = %W(git clone --bare -- #{source} #{repository_absolute_path})
success = run_with_timeout(cmd, timeout, nil)
unless success
logger.error("Importing project from <#{masked_source}> to <#{repository_absolute_path}> failed.")
FileUtils.rm_rf(repository_absolute_path)
return false
end
Gitlab::Git::Repository.create_hooks(repository_absolute_path, global_hooks_path)
# The project was imported successfully.
# Remove the origin URL since it may contain password.
remove_origin_in_repo
true
end
def fork_repository(new_shard_path, new_repository_relative_path)
from_path = repository_absolute_path
to_path = File.join(new_shard_path, new_repository_relative_path)
# The repository cannot already exist
if File.exist?(to_path)
logger.error "fork-repository failed: destination repository <#{to_path}> already exists."
return false
end
# Ensure the namepsace / hashed storage directory exists
FileUtils.mkdir_p(File.dirname(to_path), mode: 0770)
logger.info "Forking repository from <#{from_path}> to <#{to_path}>."
cmd = %W(git clone --bare --no-local -- #{from_path} #{to_path})
run(cmd, nil) && Gitlab::Git::Repository.create_hooks(to_path, global_hooks_path)
end
def fetch_remote(name, timeout, force:, tags:, ssh_key: nil, known_hosts: nil)
tags_option = tags ? '--tags' : '--no-tags'
logger.info "Fetching remote #{name} for repository #{repository_absolute_path}."
cmd = %W(git fetch #{name} --prune --quiet)
cmd << '--force' if force
cmd << tags_option
setup_ssh_auth(ssh_key, known_hosts) do |env|
success = run_with_timeout(cmd, timeout, repository_absolute_path, env)
unless success
logger.error "Fetching remote #{name} for repository #{repository_absolute_path} failed."
end
success
end
end
def push_branches(remote_name, timeout, force, branch_names)
logger.info "Pushing branches from #{repository_absolute_path} to remote #{remote_name}: #{branch_names}"
cmd = %w(git push)
cmd << '--force' if force
cmd += %W(-- #{remote_name}).concat(branch_names)
success = run_with_timeout(cmd, timeout, repository_absolute_path)
unless success
logger.error("Pushing branches to remote #{remote_name} failed.")
end
success
end
def delete_remote_branches(remote_name, branch_names)
branches = branch_names.map { |branch_name| ":#{branch_name}" }
logger.info "Pushing deleted branches from #{repository_absolute_path} to remote #{remote_name}: #{branch_names}"
cmd = %W(git push -- #{remote_name}).concat(branches)
success = run(cmd, repository_absolute_path)
unless success
logger.error("Pushing deleted branches to remote #{remote_name} failed.")
end
success
end
protected
def run(*args)
output, exitstatus = popen(*args)
@output << output
exitstatus&.zero?
end
def run_with_timeout(*args)
output, exitstatus = popen_with_timeout(*args)
@output << output
exitstatus&.zero?
rescue Timeout::Error
@output.puts('Timed out')
false
end
def mask_password_in_url(url)
result = URI(url)
result.password = "*****" unless result.password.nil?
result.user = "*****" unless result.user.nil? # it's needed for oauth access_token
result
rescue
url
end
def remove_origin_in_repo
cmd = %w(git remote rm origin)
run(cmd, repository_absolute_path)
end
# Builds a small shell script that can be used to execute SSH with a set of
# custom options.
#
# Options are expanded as `'-oKey="Value"'`, so SSH will correctly interpret
# paths with spaces in them. We trust the user not to embed single or double
# quotes in the key or value.
def custom_ssh_script(options = {})
args = options.map { |k, v| %Q{'-o#{k}="#{v}"'} }.join(' ')
[
"#!/bin/sh",
"exec ssh #{args} \"$@\""
].join("\n")
end
# Known hosts data and private keys can be passed to gitlab-shell in the
# environment. If present, this method puts them into temporary files, writes
# a script that can substitute as `ssh`, setting the options to respect those
# files, and yields: { "GIT_SSH" => "/tmp/myScript" }
def setup_ssh_auth(key, known_hosts)
options = {}
if key
key_file = Tempfile.new('gitlab-shell-key-file')
key_file.chmod(0o400)
key_file.write(key)
key_file.close
options['IdentityFile'] = key_file.path
options['IdentitiesOnly'] = 'yes'
end
if known_hosts
known_hosts_file = Tempfile.new('gitlab-shell-known-hosts')
known_hosts_file.chmod(0o400)
known_hosts_file.write(known_hosts)
known_hosts_file.close
options['StrictHostKeyChecking'] = 'yes'
options['UserKnownHostsFile'] = known_hosts_file.path
end
return yield({}) if options.empty?
script = Tempfile.new('gitlab-shell-ssh-wrapper')
script.chmod(0o755)
script.write(custom_ssh_script(options))
script.close
yield('GIT_SSH' => script.path)
ensure
key_file&.close!
known_hosts_file&.close!
script&.close!
end
end
end
end
......@@ -18,7 +18,6 @@ module Gitlab
GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE
].freeze
SEARCH_CONTEXT_LINES = 3
GITALY_INTERNAL_URL = 'ssh://gitaly/internal.git'.freeze
REBASE_WORKTREE_PREFIX = 'rebase'.freeze
SQUASH_WORKTREE_PREFIX = 'squash'.freeze
GITALY_INTERNAL_URL = 'ssh://gitaly/internal.git'.freeze
......@@ -40,10 +39,31 @@ module Gitlab
repo = Rugged::Repository.init_at(repo_path, bare)
repo.close
if symlink_hooks_to.present?
hooks_path = File.join(repo_path, 'hooks')
FileUtils.rm_rf(hooks_path)
FileUtils.ln_s(symlink_hooks_to, hooks_path)
create_hooks(repo_path, symlink_hooks_to) if symlink_hooks_to.present?
true
end
def create_hooks(repo_path, global_hooks_path)
local_hooks_path = File.join(repo_path, 'hooks')
real_local_hooks_path = :not_found
begin
real_local_hooks_path = File.realpath(local_hooks_path)
rescue Errno::ENOENT
# real_local_hooks_path == :not_found
end
# Do nothing if hooks already exist
unless real_local_hooks_path == File.realpath(global_hooks_path)
# Move the existing hooks somewhere safe
FileUtils.mv(
local_hooks_path,
"#{local_hooks_path}.old.#{Time.now.to_i}"
) if File.exist?(local_hooks_path)
# Create the hooks symlink
FileUtils.ln_sf(global_hooks_path, local_hooks_path)
end
true
......
......@@ -66,7 +66,7 @@ module Gitlab
# Init new repository
#
# storage - project's storage name
# name - project path with namespace
# name - project disk path
#
# Ex.
# add_repository("/path/to/storage", "gitlab/gitlab-ci")
......@@ -94,17 +94,21 @@ module Gitlab
# Import repository
#
# storage - project's storage path
# name - project path with namespace
# name - project disk path
# url - URL to import from
#
# Ex.
# import_repository("/path/to/storage", "gitlab/gitlab-ci", "https://github.com/randx/six.git")
# import_repository("/path/to/storage", "gitlab/gitlab-ci", "https://gitlab.com/gitlab-org/gitlab-test.git")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
def import_repository(storage, name, url)
# The timeout ensures the subprocess won't hang forever
cmd = [gitlab_shell_projects_path, 'import-project',
storage, "#{name}.git", url, "#{Gitlab.config.gitlab_shell.git_timeout}"]
gitlab_shell_fast_execute_raise_error(cmd)
cmd = gitlab_projects(storage, "#{name}.git")
success = cmd.import_project(url, git_timeout)
raise Error, cmd.output unless success
success
end
# Fetch remote for repository
......@@ -132,16 +136,15 @@ module Gitlab
# Move repository
# storage - project's storage path
# path - project path with namespace
# new_path - new project path with namespace
# path - project disk path
# new_path - new project disk path
#
# Ex.
# mv_repository("/path/to/storage", "gitlab/gitlab-ci", "randx/gitlab-ci-new")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
def mv_repository(storage, path, new_path)
gitlab_shell_fast_execute([gitlab_shell_projects_path, 'mv-project',
storage, "#{path}.git", "#{new_path}.git"])
gitlab_projects(storage, "#{path}.git").mv_project("#{new_path}.git")
end
# Fork repository to new path
......@@ -155,30 +158,21 @@ module Gitlab
#
# Gitaly note: JV: not easy to migrate because this involves two Gitaly servers, not one.
def fork_repository(forked_from_storage, forked_from_disk_path, forked_to_storage, forked_to_disk_path)
gitlab_shell_fast_execute(
[
gitlab_shell_projects_path,
'fork-repository',
forked_from_storage,
"#{forked_from_disk_path}.git",
forked_to_storage,
"#{forked_to_disk_path}.git"
]
)
gitlab_projects(forked_from_storage, "#{forked_from_disk_path}.git")
.fork_repository(forked_to_storage, "#{forked_to_disk_path}.git")
end
# Remove repository from file system
#
# storage - project's storage path
# name - project path with namespace
# name - project disk path
#
# Ex.
# remove_repository("/path/to/storage", "gitlab/gitlab-ci")
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/387
def remove_repository(storage, name)
gitlab_shell_fast_execute([gitlab_shell_projects_path,
'rm-project', storage, "#{name}.git"])
gitlab_projects(storage, "#{name}.git").rm_project
end
# Add new key to gitlab-shell
......@@ -370,56 +364,43 @@ module Gitlab
end
end
# Create (if necessary) and link the secret token file
def generate_and_link_secret_token
secret_file = Gitlab.config.gitlab_shell.secret_file
unless File.size?(secret_file)
# Generate a new token of 16 random hexadecimal characters and store it in secret_file.
token = SecureRandom.hex(16)
File.write(secret_file, token)
end
link_path = File.join(gitlab_shell_path, '.gitlab_shell_secret')
if File.exist?(gitlab_shell_path) && !File.exist?(link_path)
FileUtils.symlink(secret_file, link_path)
end
end
# Push branch to remote repository
#
# project_name - project's name with namespace
# storage - project's storage path
# project_name - project's disk path
# remote_name - remote name
# branch_name - remote branch name
# branch_names - remote branch names to push
# forced - should we use --force flag
#
# Ex.
# push_remote_branches('upstream', 'feature')
# push_remote_branches('/path/to/storage', 'gitlab-org/gitlab-test' 'upstream', ['feature'])
#
def push_remote_branches(storage, project_name, remote_name, branch_names, forced: true)
args = [gitlab_shell_projects_path, 'push-branches', storage, "#{project_name}.git", remote_name, '600']
args << '--force' if forced
args += [*branch_names]
cmd = gitlab_projects(storage, "#{project_name}.git")
output, status = Popen.popen(args)
raise Error, output unless status.zero?
success = cmd.push_branches(remote_name, git_timeout, forced, branch_names)
true
raise Error, cmd.output unless success
success
end
# Delete branch from remote repository
#
# project_name - project's name with namespace
# storage - project's storage path
# project_name - project's disk path
# remote_name - remote name
# branch_name - remote branch name
# branch_names - remote branch names
#
# Ex.
# delete_remote_branches('upstream', 'feature')
# delete_remote_branches('/path/to/storage', 'gitlab-org/gitlab-test', 'upstream', ['feature'])
#
def delete_remote_branches(storage, project_name, remote_name, branch_names)
args = [gitlab_shell_projects_path, 'delete-remote-branches', storage, "#{project_name}.git", remote_name, *branch_names]
output, status = Popen.popen(args)
raise Error, output unless status.zero?
cmd = gitlab_projects(storage, "#{project_name}.git")
true
success = cmd.delete_remote_branches(remote_name, branch_names)
raise Error, cmd.output unless success
success
end
protected
......@@ -460,24 +441,35 @@ module Gitlab
private
def local_fetch_remote(storage, name, remote, ssh_auth: nil, forced: false, no_tags: false)
args = [gitlab_shell_projects_path, 'fetch-remote', storage, name, remote, "#{Gitlab.config.gitlab_shell.git_timeout}"]
args << '--force' if forced
args << '--no-tags' if no_tags
def gitlab_projects(shard_path, disk_path)
Gitlab::Git::GitlabProjects.new(
shard_path,
disk_path,
global_hooks_path: Gitlab.config.gitlab_shell.hooks_path,
logger: Rails.logger
)
end
vars = {}
def local_fetch_remote(storage_path, repository_relative_path, remote, ssh_auth: nil, forced: false, no_tags: false)
vars = { force: forced, tags: !no_tags }
if ssh_auth&.ssh_import?
if ssh_auth.ssh_key_auth? && ssh_auth.ssh_private_key.present?
vars['GITLAB_SHELL_SSH_KEY'] = ssh_auth.ssh_private_key
vars[:ssh_key] = ssh_auth.ssh_private_key
end
if ssh_auth.ssh_known_hosts.present?
vars['GITLAB_SHELL_KNOWN_HOSTS'] = ssh_auth.ssh_known_hosts
vars[:known_hosts] = ssh_auth.ssh_known_hosts
end
end
gitlab_shell_fast_execute_raise_error(args, vars)
cmd = gitlab_projects(storage_path, repository_relative_path)
success = cmd.fetch_remote(remote, git_timeout, vars)
raise Error, cmd.output unless success
success
end
def gitlab_shell_fast_execute(cmd)
......@@ -513,6 +505,10 @@ module Gitlab
Gitlab::GitalyClient::NamespaceService.new(storage)
end
def git_timeout
Gitlab.config.gitlab_shell.git_timeout
end
def gitaly_migrate(method, &block)
Gitlab::GitalyClient.migrate(method, &block)
rescue GRPC::NotFound, GRPC::BadStatus => e
......
require 'spec_helper'
describe Gitlab::Git::GitlabProjects do
after do
TestEnv.clean_test_path
end
let(:project) { create(:project, :repository) }
if $VERBOSE
let(:logger) { Logger.new(STDOUT) }
else
let(:logger) { double('logger').as_null_object }
end
let(:tmp_repos_path) { TestEnv.repos_path }
let(:repo_name) { project.disk_path + '.git' }
let(:tmp_repo_path) { File.join(tmp_repos_path, repo_name) }
let(:gl_projects) { build_gitlab_projects(tmp_repos_path, repo_name) }
describe '#initialize' do
it { expect(gl_projects.shard_path).to eq(tmp_repos_path) }
it { expect(gl_projects.repository_relative_path).to eq(repo_name) }
it { expect(gl_projects.repository_absolute_path).to eq(File.join(tmp_repos_path, repo_name)) }
it { expect(gl_projects.logger).to eq(logger) }
end
describe '#mv_project' do
let(:new_repo_path) { File.join(tmp_repos_path, 'repo.git') }
it 'moves a repo directory' do
expect(File.exist?(tmp_repo_path)).to be_truthy
message = "Moving repository from <#{tmp_repo_path}> to <#{new_repo_path}>."
expect(logger).to receive(:info).with(message)
expect(gl_projects.mv_project('repo.git')).to be_truthy
expect(File.exist?(tmp_repo_path)).to be_falsy
expect(File.exist?(new_repo_path)).to be_truthy
end
it "fails if the source path doesn't exist" do
expect(logger).to receive(:error).with("mv-project failed: source path <#{tmp_repos_path}/bad-src.git> does not exist.")
result = build_gitlab_projects(tmp_repos_path, 'bad-src.git').mv_project('repo.git')
expect(result).to be_falsy
end
it 'fails if the destination path already exists' do
FileUtils.mkdir_p(File.join(tmp_repos_path, 'already-exists.git'))
message = "mv-project failed: destination path <#{tmp_repos_path}/already-exists.git> already exists."
expect(logger).to receive(:error).with(message)
expect(gl_projects.mv_project('already-exists.git')).to be_falsy
end
end
describe '#rm_project' do
it 'removes a repo directory' do
expect(File.exist?(tmp_repo_path)).to be_truthy
expect(logger).to receive(:info).with("Removing repository <#{tmp_repo_path}>.")
expect(gl_projects.rm_project).to be_truthy
expect(File.exist?(tmp_repo_path)).to be_falsy
end
end
describe '#push_branches' do
let(:remote_name) { 'remote-name' }
let(:branch_name) { 'master' }
let(:cmd) { %W(git push -- #{remote_name} #{branch_name}) }
let(:force) { false }
subject { gl_projects.push_branches(remote_name, 600, force, [branch_name]) }
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, success: true)
is_expected.to be_truthy
end
it 'fails' do
stub_spawn(cmd, 600, tmp_repo_path, success: false)
is_expected.to be_falsy
end
context 'with --force' do
let(:cmd) { %W(git push --force -- #{remote_name} #{branch_name}) }
let(:force) { true }
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, success: true)
is_expected.to be_truthy
end
end
end
describe '#fetch_remote' do
let(:remote_name) { 'remote-name' }
let(:branch_name) { 'master' }
let(:force) { false }
let(:tags) { true }
let(:args) { { force: force, tags: tags }.merge(extra_args) }
let(:extra_args) { {} }
let(:cmd) { %W(git fetch #{remote_name} --prune --quiet --tags) }
subject { gl_projects.fetch_remote(remote_name, 600, args) }
def stub_tempfile(name, filename, opts = {})
chmod = opts.delete(:chmod)
file = StringIO.new
allow(file).to receive(:close!)
allow(file).to receive(:path).and_return(name)
expect(Tempfile).to receive(:new).with(filename).and_return(file)
expect(file).to receive(:chmod).with(chmod) if chmod
file
end
context 'with default args' do
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: true)
is_expected.to be_truthy
end
it 'fails' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: false)
is_expected.to be_falsy
end
end
context 'with --force' do
let(:force) { true }
let(:cmd) { %W(git fetch #{remote_name} --prune --quiet --force --tags) }
it 'executes the command with forced option' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: true)
is_expected.to be_truthy
end
end
context 'with --no-tags' do
let(:tags) { false }
let(:cmd) { %W(git fetch #{remote_name} --prune --quiet --no-tags) }
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: true)
is_expected.to be_truthy
end
end
describe 'with an SSH key' do
let(:extra_args) { { ssh_key: 'SSH KEY' } }
it 'sets GIT_SSH to a custom script' do
script = stub_tempfile('scriptFile', 'gitlab-shell-ssh-wrapper', chmod: 0o755)
key = stub_tempfile('/tmp files/keyFile', 'gitlab-shell-key-file', chmod: 0o400)
stub_spawn(cmd, 600, tmp_repo_path, { 'GIT_SSH' => 'scriptFile' }, success: true)
is_expected.to be_truthy
expect(script.string).to eq("#!/bin/sh\nexec ssh '-oIdentityFile=\"/tmp files/keyFile\"' '-oIdentitiesOnly=\"yes\"' \"$@\"")
expect(key.string).to eq('SSH KEY')
end
end
describe 'with known_hosts data' do
let(:extra_args) { { known_hosts: 'KNOWN HOSTS' } }
it 'sets GIT_SSH to a custom script' do
script = stub_tempfile('scriptFile', 'gitlab-shell-ssh-wrapper', chmod: 0o755)
key = stub_tempfile('/tmp files/knownHosts', 'gitlab-shell-known-hosts', chmod: 0o400)
stub_spawn(cmd, 600, tmp_repo_path, { 'GIT_SSH' => 'scriptFile' }, success: true)
is_expected.to be_truthy
expect(script.string).to eq("#!/bin/sh\nexec ssh '-oStrictHostKeyChecking=\"yes\"' '-oUserKnownHostsFile=\"/tmp files/knownHosts\"' \"$@\"")
expect(key.string).to eq('KNOWN HOSTS')
end
end
end
describe '#import_project' do
let(:project) { create(:project) }
let(:import_url) { TestEnv.factory_repo_path_bare }
let(:cmd) { %W(git clone --bare -- #{import_url} #{tmp_repo_path}) }
let(:timeout) { 600 }
subject { gl_projects.import_project(import_url, timeout) }
context 'success import' do
it 'imports a repo' do
expect(File.exist?(File.join(tmp_repo_path, 'HEAD'))).to be_falsy
message = "Importing project from <#{import_url}> to <#{tmp_repo_path}>."
expect(logger).to receive(:info).with(message)
is_expected.to be_truthy
expect(File.exist?(File.join(tmp_repo_path, 'HEAD'))).to be_truthy
end
end
context 'already exists' do
it "doesn't import" do
FileUtils.mkdir_p(tmp_repo_path)
is_expected.to be_falsy
end
end
context 'timeout' do
it 'does not import a repo' do
stub_spawn_timeout(cmd, timeout, nil)
message = "Importing project from <#{import_url}> to <#{tmp_repo_path}> failed."
expect(logger).to receive(:error).with(message)
is_expected.to be_falsy
expect(gl_projects.output).to eq("Timed out\n")
expect(File.exist?(File.join(tmp_repo_path, 'HEAD'))).to be_falsy
end
end
end
describe '#fork_repository' do
let(:dest_repos_path) { tmp_repos_path }
let(:dest_repo_name) { File.join('@hashed', 'aa', 'bb', 'xyz.git') }
let(:dest_repo) { File.join(dest_repos_path, dest_repo_name) }
let(:dest_namespace) { File.dirname(dest_repo) }
subject { gl_projects.fork_repository(dest_repos_path, dest_repo_name) }
before do
FileUtils.mkdir_p(dest_repos_path)
end
after do
FileUtils.rm_rf(dest_repos_path)
end
it 'forks the repository' do
message = "Forking repository from <#{tmp_repo_path}> to <#{dest_repo}>."
expect(logger).to receive(:info).with(message)
is_expected.to be_truthy
expect(File.exist?(dest_repo)).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
end
it 'does not fork if a project of the same name already exists' do
# create a fake project at the intended destination
FileUtils.mkdir_p(dest_repo)
# trying to fork again should fail as the repo already exists
message = "fork-repository failed: destination repository <#{dest_repo}> already exists."
expect(logger).to receive(:error).with(message)
is_expected.to be_falsy
end
context 'different storages' do
let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), 'alternative') }
it 'forks the repo' do
is_expected.to be_truthy
expect(File.exist?(dest_repo)).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
end
end
end
def build_gitlab_projects(*args)
described_class.new(
*args,
global_hooks_path: Gitlab.config.gitlab_shell.hooks_path,
logger: logger
)
end
def stub_spawn(*args, success: true)
exitstatus = success ? 0 : nil
expect(gl_projects).to receive(:popen_with_timeout).with(*args)
.and_return(["output", exitstatus])
end
def stub_spawn_timeout(*args)
expect(gl_projects).to receive(:popen_with_timeout).with(*args)
.and_raise(Timeout::Error)
end
end
......@@ -19,6 +19,51 @@ describe Gitlab::Git::Repository, seed_helper: true do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
describe '.create_hooks' do
let(:repo_path) { File.join(TestEnv.repos_path, 'hook-test.git') }
let(:hooks_dir) { File.join(repo_path, 'hooks') }
let(:target_hooks_dir) { Gitlab.config.gitlab_shell.hooks_path }
let(:existing_target) { File.join(repo_path, 'foobar') }
before do
FileUtils.rm_rf(repo_path)
FileUtils.mkdir_p(repo_path)
end
context 'hooks is a directory' do
let(:existing_file) { File.join(hooks_dir, 'my-file') }
before do
FileUtils.mkdir_p(hooks_dir)
FileUtils.touch(existing_file)
described_class.create_hooks(repo_path, target_hooks_dir)
end
it { expect(File.readlink(hooks_dir)).to eq(target_hooks_dir) }
it { expect(Dir[File.join(repo_path, "hooks.old.*/my-file")].count).to eq(1) }
end
context 'hooks is a valid symlink' do
before do
FileUtils.mkdir_p existing_target
File.symlink(existing_target, hooks_dir)
described_class.create_hooks(repo_path, target_hooks_dir)
end
it { expect(File.readlink(hooks_dir)).to eq(target_hooks_dir) }
end
context 'hooks is a broken symlink' do
before do
FileUtils.rm_f(existing_target)
File.symlink(existing_target, hooks_dir)
described_class.create_hooks(repo_path, target_hooks_dir)
end
it { expect(File.readlink(hooks_dir)).to eq(target_hooks_dir) }
end
end
describe "Respond to" do
subject { repository }
......
This diff is collapsed.
......@@ -17,6 +17,7 @@ module StubENV
def add_stubbed_value(key, value)
allow(ENV).to receive(:[]).with(key).and_return(value)
allow(ENV).to receive(:key?).with(key).and_return(true)
allow(ENV).to receive(:fetch).with(key).and_return(value)
allow(ENV).to receive(:fetch).with(key, anything()) do |_, default_val|
value || default_val
......@@ -29,6 +30,7 @@ module StubENV
def init_stub
allow(ENV).to receive(:[]).and_call_original
allow(ENV).to receive(:key?).and_call_original
allow(ENV).to receive(:fetch).and_call_original
add_stubbed_value(STUBBED_KEY, true)
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment