Commit 84574633 authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-09-12

# Conflicts:
#	doc/api/projects.md
#	lib/api/helpers/projects_helpers.rb

[ci skip]
parents 3afd4ab9 d2cc536d
......@@ -133,7 +133,7 @@ GEM
coderay (1.1.2)
coercible (1.0.0)
descendants_tracker (~> 0.0.1)
commonmarker (0.17.8)
commonmarker (0.17.13)
ruby-enum (~> 0.5)
concord (0.1.5)
adamantium (~> 0.2.0)
......
import Vue from 'vue';
import Pagination from '@gitlab-org/gitlab-ui/dist/components/base/pagination';
import progressBar from '@gitlab-org/gitlab-ui/dist/components/base/progress_bar';
import modal from '@gitlab-org/gitlab-ui/dist/components/base/modal';
import loadingIcon from '@gitlab-org/gitlab-ui/dist/components/base/loading_icon';
......@@ -6,6 +7,7 @@ import loadingIcon from '@gitlab-org/gitlab-ui/dist/components/base/loading_icon
import dModal from '@gitlab-org/gitlab-ui/dist/directives/modal';
import dTooltip from '@gitlab-org/gitlab-ui/dist/directives/tooltip';
Vue.component('gl-pagination', Pagination);
Vue.component('gl-progress-bar', progressBar);
Vue.component('gl-ui-modal', modal);
Vue.component('gl-loading-icon', loadingIcon);
......
<script>
import tablePagination from '~/vue_shared/components/table_pagination.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import eventHub from '../event_hub';
import { getParameterByName } from '../../lib/utils/common_utils';
export default {
components: {
tablePagination,
PaginationLinks,
},
props: {
groups: {
......@@ -49,15 +49,18 @@ export default {
>
{{ searchEmptyMessage }}
</div>
<template
v-else
>
<group-folder
v-if="!searchEmpty"
:groups="groups"
:action="action"
/>
<table-pagination
v-if="!searchEmpty"
<pagination-links
:change="change"
:page-info="pageInfo"
class="d-flex justify-content-center prepend-top-default"
/>
</template>
</div>
</template>
<script>
import { s__ } from '../../locale';
export default {
props: {
change: {
type: Function,
required: true,
},
pageInfo: {
type: Object,
required: true,
},
},
firstText: s__('Pagination|« First'),
prevText: s__('Pagination|Prev'),
nextText: s__('Pagination|Next'),
lastText: s__('Pagination|Last »'),
};
</script>
<template>
<gl-pagination
v-bind="$attrs"
:change="change"
:page="pageInfo.page"
:per-page="pageInfo.perPage"
:total-items="pageInfo.total"
:first-text="$options.firstText"
:prev-text="$options.prevText"
:next-text="$options.nextText"
:last-text="$options.lastText"
/>
</template>
......@@ -340,6 +340,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
@source_project = @merge_request.source_project
@target_project = @merge_request.target_project
@target_branches = @merge_request.target_project.repository.branch_names
@noteable = @merge_request
end
def finder_type
......
......@@ -49,6 +49,7 @@ class ProjectsFinder < UnionFinder
collection = by_search(collection)
collection = by_archived(collection)
collection = by_custom_attributes(collection)
collection = by_deleted_status(collection)
sort(collection)
end
......@@ -131,6 +132,10 @@ class ProjectsFinder < UnionFinder
params[:search].present? ? items.search(params[:search]) : items
end
def by_deleted_status(items)
params[:without_deleted].present? ? items.without_deleted : items
end
def sort(items)
params[:sort].present? ? items.sort_by_attribute(params[:sort]) : items.order_id_desc
end
......
......@@ -44,6 +44,7 @@ module Ci
delegate :url, to: :runner_session, prefix: true, allow_nil: true
delegate :terminal_specification, to: :runner_session, allow_nil: true
delegate :gitlab_deploy_token, to: :project
delegate :trigger_short_token, to: :trigger_request, allow_nil: true
##
# The "environment" field for builds is a String, and is the unexpanded name!
......
......@@ -8,6 +8,8 @@ module Ci
belongs_to :pipeline, foreign_key: :commit_id
has_many :builds
delegate :short_token, to: :trigger, prefix: true, allow_nil: true
# We switched to Ci::PipelineVariable from Ci::TriggerRequest.variables.
# Ci::TriggerRequest doesn't save variables anymore.
validates :variables, absence: true
......
......@@ -73,10 +73,19 @@ module Clusters
"clientSecret" => oauth_application.secret,
"callbackUrl" => callback_url
}
},
"singleuser" => {
"extraEnv" => {
"GITLAB_PROJECT_ID" => project_id
}
}
}
end
def project_id
cluster&.project&.id
end
def gitlab_url
Gitlab.config.gitlab.url
end
......
......@@ -582,7 +582,6 @@ class Project < ActiveRecord::Base
end
def cleanup
@repository&.cleanup
@repository = nil
end
......
......@@ -87,10 +87,6 @@ class Repository
alias_method :raw, :raw_repository
def cleanup
@raw_repository&.cleanup
end
# Don't use this! It's going away. Use Gitaly to read or write from repos.
def path_to_repo
@path_to_repo ||=
......
......@@ -59,6 +59,12 @@ class BuildDetailsEntity < JobEntity
raw_project_job_path(project, build)
end
expose :trigger, if: -> (*) { build.trigger_request } do
expose :trigger_short_token, as: :short_token
expose :trigger_variables, as: :variables, using: TriggerVariableEntity
end
private
def build_failed_issue_options
......
# frozen_string_literal: true
class TriggerVariableEntity < Grape::Entity
include RequestAwareEntity
expose :key, :value, :public
end
......@@ -7,6 +7,10 @@ class ProjectServiceWorker
def perform(hook_id, data)
data = data.with_indifferent_access
Service.find(hook_id).execute(data)
service = Service.find(hook_id)
service.execute(data)
rescue => error
service_class = service&.class&.name || "Not Found"
logger.error class: self.class.name, service_class: service_class, message: error.message
end
end
---
title: Excludes project marked from deletion to projects API
merge_request: 21542
author: Jacopo Beschi @jacopo-beschi
type: changed
---
title: Add trigger information in job API
merge_request: 21495
author:
type: other
---
title: Remove Rugged and shell code from Gitlab::Git
merge_request: 21488
author:
type: other
---
title: Fixed mention autocomplete in edit merge request.
merge_request:
author:
type: fixed
---
title: Log project services errors when executing async
merge_request:
author:
type: other
---
title: Update GitLab Shell to v8.3.2
merge_request: 21701
author:
type: fixed
......@@ -6,20 +6,6 @@ class Gitlab::Seeder::CycleAnalytics
@project = project
@user = User.admins.first
@issue_count = perf ? 1000 : 5
stub_git_pre_receive!
end
# The GitLab API needn't be running for the fixtures to be
# created. Since we're performing a number of git actions
# here (like creating a branch or committing a file), we need
# to disable the `pre_receive` hook in order to remove this
# dependency on the GitLab API.
def stub_git_pre_receive!
Gitlab::Git::HooksService.class_eval do
def run_hook(name)
[true, '']
end
end
end
def seed_metrics!
......
......@@ -97,10 +97,10 @@ For a more fully featured dashboard, Grafana can be used and has
Sample Prometheus queries:
- **% Memory used:** `(1 - ((node_memory_MemFree + node_memory_Cached) / node_memory_MemTotal)) * 100`
- **% CPU load:** `1 - rate(node_cpu{mode="idle"}[5m])`
- **Data transmitted:** `irate(node_network_transmit_bytes[5m])`
- **Data received:** `irate(node_network_receive_bytes[5m])`
- **% Memory available:** `((node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes) or ((node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes) / node_memory_MemTotal_bytes)) * 100`
- **% CPU utilization:** `1 - avg without (mode,cpu) (rate(node_cpu_seconds_total{mode="idle"}[5m]))`
- **Data transmitted:** `rate(node_network_transmit_bytes_total{device!="lo"}[5m])`
- **Data received:** `rate(node_network_receive_bytes_total{device!="lo"}[5m])`
## Configuring Prometheus to monitor Kubernetes
......
......@@ -667,6 +667,7 @@ POST /projects
| `printing_merge_request_link_enabled` | boolean | no | Show link to create/view merge request when pushing from the command line |
| `ci_config_path` | string | no | The path to CI config file |
| `initialize_with_readme` | boolean | no | `false` by default |
<<<<<<< HEAD
| `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | How many approvers should approve merge request by default |
| `mirror` | boolean | no | Enables pull mirroring in a project |
......@@ -675,6 +676,8 @@ POST /projects
>**Note**: If your HTTP repository is not publicly accessible,
add authentication information to the URL: `https://username:password@gitlab.company.com/group/project.git`
where `password` is a public access key with the `api` scope enabled.
=======
>>>>>>> upstream/master
## Create project for user
......
......@@ -27,8 +27,9 @@ There are a few rules to get your merge request accepted:
ask or assign any [reviewers][projects] for a first review.
1. If you need some guidance (e.g. it's your first merge request), feel free
to ask one of the [Merge request coaches][team].
1. The reviewer will assign the merge request to a maintainer once the
reviewer is satisfied with the state of the merge request.
1. It is recommended that you assign a maintainer that is from a different team than your own.
This ensures that all code across GitLab is consistent and can be easily understood by all contributors.
1. Keep in mind that maintainers are also going to perform a final code review.
The ideal scenario is that the reviewer has already addressed any concerns
the maintainer would have found, and the maintainer only has to perform the
......
......@@ -15,7 +15,7 @@ We're constantly moving Rugged calls to Gitaly and the progress can be followed
When refreshing a Merge Request (pushing to a source branch, force-pushing to target branch, or if the target branch now contains any commits from the MR)
we fetch the comparison information using `Gitlab::Git::Compare`, which fetches `base` and `head` data using Gitaly and diff between them through
`Gitlab::Git::Diff.between` (which uses _Gitaly_ if it's enabled, otherwise _Rugged_).
`Gitlab::Git::Diff.between`.
The diffs fetching process _limits_ single file diff sizes and the overall size of the whole diff through a series of constant values. Raw diff files are
then persisted on `merge_request_diff_files` table.
......
......@@ -106,10 +106,12 @@ module API
end
def find_project(id)
projects = Project.without_deleted
if id.is_a?(Integer) || id =~ /^\d+$/
Project.find_by(id: id)
projects.find_by(id: id)
elsif id.include?("/")
Project.find_by_full_path(id)
projects.find_by_full_path(id)
end
end
......@@ -399,7 +401,7 @@ module API
end
def project_finder_params
finder_params = {}
finder_params = { without_deleted: true }
finder_params[:owned] = true if params[:owned].present?
finder_params[:non_public] = true if params[:membership].present?
finder_params[:starred] = true if params[:starred].present?
......
......@@ -27,6 +27,7 @@ module API
optional :printing_merge_request_link_enabled, type: Boolean, desc: 'Show link to create/view merge request when pushing from the command line'
optional :merge_method, type: String, values: %w(ff rebase_merge merge), desc: 'The merge method used when merging merge requests'
optional :initialize_with_readme, type: Boolean, desc: "Initialize a project with a README.md"
<<<<<<< HEAD
end
params :optional_project_params_ee do
......@@ -35,6 +36,8 @@ module API
optional :external_authorization_classification_label, type: String, desc: 'The classification label for the project'
optional :mirror, type: Boolean, desc: 'Enables pull mirroring in a project'
optional :mirror_trigger_builds, type: Boolean, desc: 'Pull mirroring triggers builds'
=======
>>>>>>> upstream/master
end
params :optional_project_params do
......
module Gitlab
module Git
class CommitterWithHooks < Gollum::Committer
attr_reader :gl_wiki
def initialize(gl_wiki, options = {})
@gl_wiki = gl_wiki
super(gl_wiki.gollum_wiki, options)
end
def commit
# TODO: Remove after 10.8
return super unless allowed_to_run_hooks?
result = Gitlab::Git::OperationService.new(git_user, gl_wiki.repository).with_branch(
@wiki.ref,
start_branch_name: @wiki.ref
) do |start_commit|
super(false)
end
result[:newrev]
rescue Gitlab::Git::PreReceiveError => e
message = "Custom Hook failed: #{e.message}"
raise Gitlab::Git::Wiki::OperationError, message
end
private
# TODO: Remove after 10.8
def allowed_to_run_hooks?
@options[:user_id] != 0 && @options[:username].present?
end
def git_user
@git_user ||= Gitlab::Git::User.new(@options[:username],
@options[:name],
@options[:email],
gitlab_id)
end
def gitlab_id
Gitlab::GlId.gl_id_from_id_value(@options[:user_id])
end
end
end
end
# Gitaly note: JV: needs RPC for Gitlab::Git::Diff.between.
# Gitlab::Git::Diff is a wrapper around native Rugged::Diff object
module Gitlab
module Git
class Diff
......@@ -52,20 +49,31 @@ module Gitlab
repo.diff(common_commit, head, actual_options, *paths)
end
# Return a copy of the +options+ hash containing only keys that can be
# passed to Rugged. Allowed options are:
# Return a copy of the +options+ hash containing only recognized keys.
# Allowed options are:
#
# :ignore_whitespace_change ::
# If true, changes in amount of whitespace will be ignored.
#
# :disable_pathspec_match ::
# If true, the given +*paths+ will be applied as exact matches,
# instead of as fnmatch patterns.
# :max_files ::
# Limit how many files will patches be allowed for before collapsing
#
# :max_lines ::
# Limit how many patch lines (across all files) will be allowed for
# before collapsing
#
# :limits ::
# A hash with additional limits to check before collapsing patches.
# Allowed keys are: `max_bytes`, `safe_max_files`, `safe_max_lines`
# and `safe_max_bytes`
#
# :expanded ::
# If true, patch raw data will not be included in the diff after
# `max_files`, `max_lines` or any of the limits in `limits` are
# exceeded
def filter_diff_options(options, default_options = {})
allowed_options = [:ignore_whitespace_change,
:disable_pathspec_match, :paths,
:max_files, :max_lines, :limits, :expanded]
allowed_options = [:ignore_whitespace_change, :max_files, :max_lines,
:limits, :expanded]
if default_options
actual_defaults = default_options.dup
......@@ -93,7 +101,7 @@ module Gitlab
#
# "Binary files a/file/path and b/file/path differ\n"
# This is used when we detect that a diff is binary
# using CharlockHolmes when Rugged treats it as text.
# using CharlockHolmes.
def binary_message(old_path, new_path)
"Binary files #{old_path} and #{new_path} differ\n"
end
......@@ -106,8 +114,6 @@ module Gitlab
when Hash
init_from_hash(raw_diff)
prune_diff_if_eligible
when Rugged::Patch, Rugged::Diff::Delta
init_from_rugged(raw_diff)
when Gitlab::GitalyClient::Diff
init_from_gitaly(raw_diff)
prune_diff_if_eligible
......@@ -184,31 +190,6 @@ module Gitlab
private
def init_from_rugged(rugged)
if rugged.is_a?(Rugged::Patch)
init_from_rugged_patch(rugged)
d = rugged.delta
else
d = rugged
end
@new_path = encode!(d.new_file[:path])
@old_path = encode!(d.old_file[:path])
@a_mode = d.old_file[:mode].to_s(8)
@b_mode = d.new_file[:mode].to_s(8)
@new_file = d.added?
@renamed_file = d.renamed?
@deleted_file = d.deleted?
end
def init_from_rugged_patch(patch)
# Don't bother initializing diffs that are too large. If a diff is
# binary we're not going to display anything so we skip the size check.
return if !patch.delta.binary? && prune_large_patch(patch)
@diff = encode!(strip_diff_headers(patch.to_s))
end
def init_from_hash(hash)
raw_diff = hash.symbolize_keys
......@@ -262,23 +243,6 @@ module Gitlab
false
end
# Strip out the information at the beginning of the patch's text to match
# Grit's output
def strip_diff_headers(diff_text)
# Delete everything up to the first line that starts with '---' or
# 'Binary'
diff_text.sub!(/\A.*?^(---|Binary)/m, '\1')
if diff_text.start_with?('---', 'Binary')
diff_text
else
# If the diff_text did not contain a line starting with '---' or
# 'Binary', return the empty string. No idea why; we are just
# preserving behavior from before the refactor.
''
end
end
end
end
end
module Gitlab
module Git
class GitlabProjects
include Gitlab::Git::Popen
include Gitlab::Utils::StrongMemoize
# Name of shard where repositories are stored.
# Example: nfs-file06
attr_reader :shard_name
# Relative path is a directory name for repository with .git at the end.
# Example: gitlab-org/gitlab-test.git
attr_reader :repository_relative_path
# This is the path at which the gitlab-shell hooks directory can be found.
# It's essential for integration between git and GitLab proper. All new
# repositories should have their hooks directory symlinked here.
attr_reader :global_hooks_path
attr_reader :logger
def initialize(shard_name, repository_relative_path, global_hooks_path:, logger:)
@shard_name = shard_name
@repository_relative_path = repository_relative_path
@logger = logger
@global_hooks_path = global_hooks_path
@output = StringIO.new
end
def output
io = @output.dup
io.rewind
io.read
end
# Absolute path to the repository.
# Example: /home/git/repositorities/gitlab-org/gitlab-test.git
# Probably will be removed when we fully migrate to Gitaly, part of
# https://gitlab.com/gitlab-org/gitaly/issues/1124.
def repository_absolute_path
strong_memoize(:repository_absolute_path) do
File.join(shard_path, repository_relative_path)
end
end
def shard_path
strong_memoize(:shard_path) do
Gitlab.config.repositories.storages.fetch(shard_name).legacy_disk_path
end
end
# Import project via git clone --bare
# URL must be publicly cloneable
def import_project(source, timeout)
git_import_repository(source, timeout)
end
def fork_repository(new_shard_name, new_repository_relative_path)
git_fork_repository(new_shard_name, new_repository_relative_path)
end
def fetch_remote(name, timeout, force:, tags:, ssh_key: nil, known_hosts: nil, prune: true)
logger.info "Fetching remote #{name} for repository #{repository_absolute_path}."
cmd = fetch_remote_command(name, tags, prune, force)
setup_ssh_auth(ssh_key, known_hosts) do |env|
run_with_timeout(cmd, timeout, repository_absolute_path, env).tap do |success|
unless success
logger.error "Fetching remote #{name} for repository #{repository_absolute_path} failed."
end
end
end
end
def push_branches(remote_name, timeout, force, branch_names)
logger.info "Pushing branches from #{repository_absolute_path} to remote #{remote_name}: #{branch_names}"
cmd = %W(#{Gitlab.config.git.bin_path} push)
cmd << '--force' if force
cmd += %W(-- #{remote_name}).concat(branch_names)
success = run_with_timeout(cmd, timeout, repository_absolute_path)
unless success
logger.error("Pushing branches to remote #{remote_name} failed.")
end
success
end
def delete_remote_branches(remote_name, branch_names)
branches = branch_names.map { |branch_name| ":#{branch_name}" }
logger.info "Pushing deleted branches from #{repository_absolute_path} to remote #{remote_name}: #{branch_names}"
cmd = %W(#{Gitlab.config.git.bin_path} push -- #{remote_name}).concat(branches)
success = run(cmd, repository_absolute_path)
unless success
logger.error("Pushing deleted branches to remote #{remote_name} failed.")
end
success
end
protected
def run(*args)
output, exitstatus = popen(*args)
@output << output
exitstatus&.zero?
end
def run_with_timeout(*args)
output, exitstatus = popen_with_timeout(*args)
@output << output
exitstatus&.zero?
rescue Timeout::Error
@output.puts('Timed out')
false
end
def mask_password_in_url(url)
result = URI(url)
result.password = "*****" unless result.password.nil?
result.user = "*****" unless result.user.nil? # it's needed for oauth access_token
result
rescue
url
end
def remove_origin_in_repo
cmd = %W(#{Gitlab.config.git.bin_path} remote rm origin)
run(cmd, repository_absolute_path)
end
# Builds a small shell script that can be used to execute SSH with a set of
# custom options.
#
# Options are expanded as `'-oKey="Value"'`, so SSH will correctly interpret
# paths with spaces in them. We trust the user not to embed single or double
# quotes in the key or value.
def custom_ssh_script(options = {})
args = options.map { |k, v| %Q{'-o#{k}="#{v}"'} }.join(' ')
[
"#!/bin/sh",
"exec ssh #{args} \"$@\""
].join("\n")
end
# Known hosts data and private keys can be passed to gitlab-shell in the
# environment. If present, this method puts them into temporary files, writes
# a script that can substitute as `ssh`, setting the options to respect those
# files, and yields: { "GIT_SSH" => "/tmp/myScript" }
def setup_ssh_auth(key, known_hosts)
options = {}
if key
key_file = Tempfile.new('gitlab-shell-key-file')
key_file.chmod(0o400)
key_file.write(key)
key_file.close
options['IdentityFile'] = key_file.path
options['IdentitiesOnly'] = 'yes'
end
if known_hosts
known_hosts_file = Tempfile.new('gitlab-shell-known-hosts')
known_hosts_file.chmod(0o400)
known_hosts_file.write(known_hosts)
known_hosts_file.close
options['StrictHostKeyChecking'] = 'yes'
options['UserKnownHostsFile'] = known_hosts_file.path
end
return yield({}) if options.empty?
script = Tempfile.new('gitlab-shell-ssh-wrapper')
script.chmod(0o755)
script.write(custom_ssh_script(options))
script.close
yield('GIT_SSH' => script.path)
ensure
key_file&.close!
known_hosts_file&.close!
script&.close!
end
private
def fetch_remote_command(name, tags, prune, force)
%W(#{Gitlab.config.git.bin_path} fetch #{name} --quiet).tap do |cmd|
cmd << '--prune' if prune
cmd << '--force' if force
cmd << (tags ? '--tags' : '--no-tags')
end
end
def git_import_repository(source, timeout)
# Skip import if repo already exists
return false if File.exist?(repository_absolute_path)
masked_source = mask_password_in_url(source)
logger.info "Importing project from <#{masked_source}> to <#{repository_absolute_path}>."
cmd = %W(#{Gitlab.config.git.bin_path} clone --bare -- #{source} #{repository_absolute_path})
success = run_with_timeout(cmd, timeout, nil)
unless success
logger.error("Importing project from <#{masked_source}> to <#{repository_absolute_path}> failed.")
FileUtils.rm_rf(repository_absolute_path)
return false
end
Gitlab::Git::Repository.create_hooks(repository_absolute_path, global_hooks_path)
# The project was imported successfully.
# Remove the origin URL since it may contain password.
remove_origin_in_repo
true
end
def git_fork_repository(new_shard_name, new_repository_relative_path)
from_path = repository_absolute_path
new_shard_path = Gitlab.config.repositories.storages.fetch(new_shard_name).legacy_disk_path
to_path = File.join(new_shard_path, new_repository_relative_path)
# The repository cannot already exist
if File.exist?(to_path)
logger.error "fork-repository failed: destination repository <#{to_path}> already exists."
return false
end
# Ensure the namepsace / hashed storage directory exists
FileUtils.mkdir_p(File.dirname(to_path), mode: 0770)
logger.info "Forking repository from <#{from_path}> to <#{to_path}>."
cmd = %W(#{Gitlab.config.git.bin_path} clone --bare --no-local -- #{from_path} #{to_path})
run(cmd, nil) && Gitlab::Git::Repository.create_hooks(to_path, global_hooks_path)
end
end
end
end
# Gitaly note: JV: looks like this is only used by Gitlab::Git::HooksService in
# app/services. We shouldn't bother migrating this until we know how
# Gitlab::Git::HooksService will be migrated.
module Gitlab
module Git
class Hook
GL_PROTOCOL = 'web'.freeze
attr_reader :name, :path, :repository
def initialize(name, repository)
@name = name
@repository = repository
@path = File.join(repo_path, 'hooks', name)
end
def repo_path
repository.path
end
def exists?
File.exist?(path)
end
def trigger(gl_id, gl_username, oldrev, newrev, ref)
return [true, nil] unless exists?
Bundler.with_clean_env do
case name
when "pre-receive", "post-receive"
call_receive_hook(gl_id, gl_username, oldrev, newrev, ref)
when "update"
call_update_hook(gl_id, gl_username, oldrev, newrev, ref)
end
end
end
private
def call_receive_hook(gl_id, gl_username, oldrev, newrev, ref)
changes = [oldrev, newrev, ref].join(" ")
exit_status = false
exit_message = nil
vars = {
'GL_ID' => gl_id,
'GL_USERNAME' => gl_username,
'PWD' => repo_path,
'GL_PROTOCOL' => GL_PROTOCOL,
'GL_REPOSITORY' => repository.gl_repository
}
options = {
chdir: repo_path
}
Open3.popen3(vars, path, options) do |stdin, stdout, stderr, wait_thr|
exit_status = true
stdin.sync = true
# in git, pre- and post- receive hooks may just exit without
# reading stdin. We catch the exception to avoid a broken pipe
# warning
begin
# inject all the changes as stdin to the hook
changes.lines do |line|
stdin.puts line
end
rescue Errno::EPIPE
end
stdin.close
unless wait_thr.value == 0
exit_status = false
exit_message = retrieve_error_message(stderr, stdout)
end
end
[exit_status, exit_message]
end
def call_update_hook(gl_id, gl_username, oldrev, newrev, ref)
env = {
'GL_ID' => gl_id,
'GL_USERNAME' => gl_username,
'PWD' => repo_path
}
options = {
chdir: repo_path
}
args = [ref, oldrev, newrev]
stdout, stderr, status = Open3.capture3(env, path, *args, options)
[status.success?, stderr.presence || stdout]
end
def retrieve_error_message(stderr, stdout)
err_message = stderr.read
err_message = err_message.blank? ? stdout.read : err_message
err_message
end
end
end
end
module Gitlab
module Git
class HooksService
attr_accessor :oldrev, :newrev, :ref
def execute(pusher, repository, oldrev, newrev, ref)
@repository = repository
@gl_id = pusher.gl_id
@gl_username = pusher.username
@oldrev = oldrev
@newrev = newrev
@ref = ref
%w(pre-receive update).each do |hook_name|
status, message = run_hook(hook_name)
unless status
raise PreReceiveError, message
end
end
yield(self).tap do
run_hook('post-receive')
end
end
private
def run_hook(name)
hook = Gitlab::Git::Hook.new(name, @repository)
hook.trigger(@gl_id, @gl_username, oldrev, newrev, ref)
end
end
end
end
# Gitaly note: JV: When the time comes I think we will want to copy this
# class into Gitaly. None of its methods look like they should be RPC's.
# The RPC's will be at a higher level.
module Gitlab
module Git
class Index
IndexError = Class.new(StandardError)
DEFAULT_MODE = 0o100644
ACTIONS = %w(create create_dir update move delete).freeze
ACTION_OPTIONS = %i(file_path previous_path content encoding).freeze
attr_reader :repository, :raw_index
def initialize(repository)
@repository = repository
@raw_index = repository.rugged.index
end
delegate :read_tree, :get, to: :raw_index
def apply(action, options)
validate_action!(action)
public_send(action, options.slice(*ACTION_OPTIONS)) # rubocop:disable GitlabSecurity/PublicSend
end
def write_tree
raw_index.write_tree(repository.rugged)
end
def dir_exists?(path)
raw_index.find { |entry| entry[:path].start_with?("#{path}/") }
end
def create(options)
options = normalize_options(options)
if get(options[:file_path])
raise IndexError, "A file with this name already exists"
end
add_blob(options)
end
def create_dir(options)
options = normalize_options(options)
if get(options[:file_path])
raise IndexError, "A file with this name already exists"
end
if dir_exists?(options[:file_path])
raise IndexError, "A directory with this name already exists"
end
options = options.dup
options[:file_path] += '/.gitkeep'
options[:content] = ''
add_blob(options)
end
def update(options)
options = normalize_options(options)
file_entry = get(options[:file_path])
unless file_entry
raise IndexError, "A file with this name doesn't exist"
end
add_blob(options, mode: file_entry[:mode])
end
def move(options)
options = normalize_options(options)
file_entry = get(options[:previous_path])
unless file_entry
raise IndexError, "A file with this name doesn't exist"
end
if get(options[:file_path])
raise IndexError, "A file with this name already exists"
end
raw_index.remove(options[:previous_path])
add_blob(options, mode: file_entry[:mode])
end
def delete(options)
options = normalize_options(options)
unless get(options[:file_path])
raise IndexError, "A file with this name doesn't exist"
end
raw_index.remove(options[:file_path])
end
private
def normalize_options(options)
options = options.dup
options[:file_path] = normalize_path(options[:file_path]) if options[:file_path]
options[:previous_path] = normalize_path(options[:previous_path]) if options[:previous_path]
options
end
def normalize_path(path)
unless path
raise IndexError, "You must provide a file path"
end
pathname = Gitlab::Git::PathHelper.normalize_path(path.dup)
pathname.each_filename do |segment|
if segment == '..'
raise IndexError, 'Path cannot include directory traversal'
end
end
pathname.to_s
end
def add_blob(options, mode: nil)
content = options[:content]
unless content
raise IndexError, "You must provide content"
end
content = Base64.decode64(content) if options[:encoding] == 'base64'
detect = CharlockHolmes::EncodingDetector.new.detect(content)
unless detect && detect[:type] == :binary
# When writing to the repo directly as we are doing here,
# the `core.autocrlf` config isn't taken into account.
content.gsub!("\r\n", "\n") if repository.autocrlf
end
oid = repository.rugged.write(content, :blob)
raw_index.add(path: options[:file_path], oid: oid, mode: mode || DEFAULT_MODE)
rescue Rugged::IndexError => e
raise IndexError, e.message
end
def validate_action!(action)
unless ACTIONS.include?(action.to_s)
raise ArgumentError, "Unknown action '#{action}'"
end
end
end
end
end
module Gitlab
module Git
class OperationService
include Gitlab::Git::Popen
BranchUpdate = Struct.new(:newrev, :repo_created, :branch_created) do
alias_method :repo_created?, :repo_created
alias_method :branch_created?, :branch_created
......@@ -17,177 +15,6 @@ module Gitlab
)
end
end
attr_reader :user, :repository
def initialize(user, new_repository)
if user
user = Gitlab::Git::User.from_gitlab(user) unless user.respond_to?(:gl_id)
@user = user
end
# Refactoring aid
Gitlab::Git.check_namespace!(new_repository)
@repository = new_repository
end
def add_branch(branch_name, newrev)
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch_name
oldrev = Gitlab::Git::BLANK_SHA
update_ref_in_hooks(ref, newrev, oldrev)
end
def rm_branch(branch)
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch.name
oldrev = branch.target
newrev = Gitlab::Git::BLANK_SHA
update_ref_in_hooks(ref, newrev, oldrev)
end
def add_tag(tag_name, newrev, options = {})
ref = Gitlab::Git::TAG_REF_PREFIX + tag_name
oldrev = Gitlab::Git::BLANK_SHA
with_hooks(ref, newrev, oldrev) do |service|
# We want to pass the OID of the tag object to the hooks. For an
# annotated tag we don't know that OID until after the tag object
# (raw_tag) is created in the repository. That is why we have to
# update the value after creating the tag object. Only the
# "post-receive" hook will receive the correct value in this case.
raw_tag = repository.rugged.tags.create(tag_name, newrev, options)
service.newrev = raw_tag.target_id
end
end
def rm_tag(tag)
ref = Gitlab::Git::TAG_REF_PREFIX + tag.name
oldrev = tag.target
newrev = Gitlab::Git::BLANK_SHA
update_ref_in_hooks(ref, newrev, oldrev) do
repository.rugged.tags.delete(tag_name)
end
end
# Whenever `start_branch_name` is passed, if `branch_name` doesn't exist,
# it would be created from `start_branch_name`.
# If `start_repository` is passed, and the branch doesn't exist,
# it would try to find the commits from it instead of current repository.
def with_branch(
branch_name,
start_branch_name: nil,
start_repository: repository,
&block)
Gitlab::Git.check_namespace!(start_repository)
start_repository = RemoteRepository.new(start_repository) unless start_repository.is_a?(RemoteRepository)
start_branch_name = nil if start_repository.empty?
if start_branch_name && !start_repository.branch_exists?(start_branch_name)
raise ArgumentError, "Cannot find branch #{start_branch_name} in #{start_repository.relative_path}"
end
update_branch_with_hooks(branch_name) do
repository.with_repo_branch_commit(
start_repository,
start_branch_name || branch_name,
&block)
end
end
def update_branch(branch_name, newrev, oldrev)
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch_name
update_ref_in_hooks(ref, newrev, oldrev)
end
private
# Returns [newrev, should_run_after_create, should_run_after_create_branch]
def update_branch_with_hooks(branch_name)
update_autocrlf_option
was_empty = repository.empty?
# Make commit
newrev = yield
unless newrev
raise Gitlab::Git::CommitError.new('Failed to create commit')
end
branch = repository.find_branch(branch_name)
oldrev = find_oldrev_from_branch(newrev, branch)
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch_name
update_ref_in_hooks(ref, newrev, oldrev)
BranchUpdate.new(newrev, was_empty, was_empty || Gitlab::Git.blank_ref?(oldrev))
end
def find_oldrev_from_branch(newrev, branch)
return Gitlab::Git::BLANK_SHA unless branch
oldrev = branch.target
merge_base = repository.merge_base(newrev, branch.target)
raise Gitlab::Git::Repository::InvalidRef unless merge_base
if oldrev == merge_base
oldrev
else
raise Gitlab::Git::CommitError.new('Branch diverged')
end
end
def update_ref_in_hooks(ref, newrev, oldrev)
with_hooks(ref, newrev, oldrev) do
update_ref(ref, newrev, oldrev)
end
end
def with_hooks(ref, newrev, oldrev)
Gitlab::Git::HooksService.new.execute(
user,
repository,
oldrev,
newrev,
ref) do |service|
yield(service)
end
end
# Gitaly note: JV: wait with migrating #update_ref until we know how to migrate its call sites.
def update_ref(ref, newrev, oldrev)
# We use 'git update-ref' because libgit2/rugged currently does not
# offer 'compare and swap' ref updates. Without compare-and-swap we can
# (and have!) accidentally reset the ref to an earlier state, clobbering
# commits. See also https://github.com/libgit2/libgit2/issues/1534.
command = %W[#{Gitlab.config.git.bin_path} update-ref --stdin -z]
output, status = popen(
command,
repository.path) do |stdin|
stdin.write("update #{ref}\x00#{newrev}\x00#{oldrev}\x00")
end
unless status.zero?
Gitlab::GitLogger.error("'git update-ref' in #{repository.path}: #{output}")
raise Gitlab::Git::CommitError.new(
"Could not update branch #{Gitlab::Git.branch_name(ref)}." \
" Please refresh and try again.")
end
end
def update_autocrlf_option
if repository.autocrlf != :input
repository.autocrlf = :input
end
end
end
end
end
# Gitaly note: JV: no RPC's here.
require 'open3'
module Gitlab
module Git
module Popen
FAST_GIT_PROCESS_TIMEOUT = 15.seconds
def popen(cmd, path, vars = {}, lazy_block: nil)
unless cmd.is_a?(Array)
raise "System commands must be given as an array of strings"
end
path ||= Dir.pwd
vars['PWD'] = path
options = { chdir: path }
cmd_output = ""
cmd_status = 0
Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|
stdout.set_encoding(Encoding::ASCII_8BIT)
# stderr and stdout pipes can block if stderr/stdout aren't drained: https://bugs.ruby-lang.org/issues/9082
# Mimic what Ruby does with capture3: https://github.com/ruby/ruby/blob/1ec544695fa02d714180ef9c34e755027b6a2103/lib/open3.rb#L257-L273
err_reader = Thread.new { stderr.read }
yield(stdin) if block_given?
stdin.close
if lazy_block
cmd_output = lazy_block.call(stdout.lazy)
cmd_status = 0
break
else
cmd_output << stdout.read
end
cmd_output << err_reader.value
cmd_status = wait_thr.value.exitstatus
end
[cmd_output, cmd_status]
end
def popen_with_timeout(cmd, timeout, path, vars = {})
unless cmd.is_a?(Array)
raise "System commands must be given as an array of strings"
end
path ||= Dir.pwd
vars['PWD'] = path
unless File.directory?(path)
FileUtils.mkdir_p(path)
end
rout, wout = IO.pipe
rerr, werr = IO.pipe
pid = Process.spawn(vars, *cmd, out: wout, err: werr, chdir: path, pgroup: true)
# stderr and stdout pipes can block if stderr/stdout aren't drained: https://bugs.ruby-lang.org/issues/9082
# Mimic what Ruby does with capture3: https://github.com/ruby/ruby/blob/1ec544695fa02d714180ef9c34e755027b6a2103/lib/open3.rb#L257-L273
out_reader = Thread.new { rout.read }
err_reader = Thread.new { rerr.read }
begin
# close write ends so we could read them
wout.close
werr.close
status = process_wait_with_timeout(pid, timeout)
cmd_output = out_reader.value
cmd_output << err_reader.value # Copying the behaviour of `popen` which merges stderr into output
[cmd_output, status.exitstatus]
rescue Timeout::Error => e
kill_process_group_for_pid(pid)
raise e
ensure
wout.close unless wout.closed?
werr.close unless werr.closed?
rout.close
rerr.close
end
end
def process_wait_with_timeout(pid, timeout)
deadline = timeout.seconds.from_now
wait_time = 0.01
while deadline > Time.now
sleep(wait_time)
_, status = Process.wait2(pid, Process::WNOHANG)
return status unless status.nil?
end
raise Timeout::Error, "Timeout waiting for process ##{pid}"
end
def kill_process_group_for_pid(pid)
Process.kill("KILL", -pid)
Process.wait(pid)
rescue Errno::ESRCH
end
end
end
end
This diff is collapsed.
......@@ -50,51 +50,6 @@ module Gitlab
entry[:oid]
end
end
def tree_entries_from_rugged(repository, sha, path, recursive)
current_path_entries = get_tree_entries_from_rugged(repository, sha, path)
ordered_entries = []
current_path_entries.each do |entry|
ordered_entries << entry
if recursive && entry.dir?
ordered_entries.concat(tree_entries_from_rugged(repository, sha, entry.path, true))
end
end
ordered_entries
end
def get_tree_entries_from_rugged(repository, sha, path)
commit = repository.lookup(sha)
root_tree = commit.tree
tree = if path
id = find_id_by_path(repository, root_tree.oid, path)
if id
repository.lookup(id)
else
[]
end
else
root_tree
end
tree.map do |entry|
new(
id: entry[:oid],
root_id: root_tree.oid,
name: entry[:name],
type: entry[:type],
mode: entry[:filemode].to_s(8),
path: path ? File.join(path, entry[:name]) : entry[:name],
commit_id: sha
)
end
rescue Rugged::ReferenceError
[]
end
end
def initialize(options)
......
module Gitlab
module Git
module Version
extend Gitlab::Git::Popen
def self.git_version
Gitlab::VersionInfo.parse(Gitaly::Server.all.first.git_binary_version)
end
......
......@@ -163,20 +163,6 @@ module Gitlab
Gitlab::Git::WikiPage.new(wiki_page, version)
end
end
def committer_with_hooks(commit_details)
Gitlab::Git::CommitterWithHooks.new(self, commit_details.to_h)
end
def with_committer_with_hooks(commit_details, &block)
committer = committer_with_hooks(commit_details)
yield committer
committer.commit
nil
end
end
end
end
......@@ -368,15 +368,6 @@ module Gitlab
private
def gitlab_projects(shard_name, disk_path)
Gitlab::Git::GitlabProjects.new(
shard_name,
disk_path,
global_hooks_path: Gitlab.config.gitlab_shell.hooks_path,
logger: Rails.logger
)
end
def gitlab_shell_fast_execute(cmd)
output, status = gitlab_shell_fast_execute_helper(cmd)
......
......@@ -208,6 +208,51 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
context 'when requesting JSON job is triggered' do
let!(:merge_request) { create(:merge_request, source_project: project) }
let(:trigger) { create(:ci_trigger, project: project) }
let(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) }
let(:job) { create(:ci_build, pipeline: pipeline, trigger_request: trigger_request) }
before do
project.add_developer(user)
sign_in(user)
allow_any_instance_of(Ci::Build).to receive(:merge_request).and_return(merge_request)
end
context 'with no variables' do
before do
get_show(id: job.id, format: :json)
end
it 'exposes trigger information' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['trigger']['short_token']).to eq 'toke'
expect(json_response['trigger']['variables'].length).to eq 0
end
end
context 'with variables' do
before do
create(:ci_pipeline_variable, pipeline: pipeline, key: :TRIGGER_KEY_1, value: 'TRIGGER_VALUE_1')
get_show(id: job.id, format: :json)
end
it 'exposes trigger information and variables' do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['trigger']['short_token']).to eq 'toke'
expect(json_response['trigger']['variables'].length).to eq 1
expect(json_response['trigger']['variables'].first['key']).to eq "TRIGGER_KEY_1"
expect(json_response['trigger']['variables'].first['value']).to eq "TRIGGER_VALUE_1"
expect(json_response['trigger']['variables'].first['public']).to eq false
end
end
end
def get_show(**extra_params)
params = {
namespace_id: project.namespace.to_param,
......
......@@ -885,4 +885,18 @@ describe Projects::MergeRequestsController do
end
end
end
describe 'GET edit' do
it 'responds successfully' do
get :edit, namespace_id: project.namespace, project_id: project, id: merge_request
expect(response).to have_gitlab_http_status(:success)
end
it 'assigns the noteable to make sure autocompletes work' do
get :edit, namespace_id: project.namespace, project_id: project, id: merge_request
expect(assigns(:noteable)).not_to be_nil
end
end
end
......@@ -125,7 +125,7 @@ describe 'Dashboard Groups page', :js do
end
it 'loads results for next page' do
expect(page).to have_selector('.gl-pagination .page', count: 2)
expect(page).to have_selector('.gl-pagination .page-item a[role=menuitemradio]', count: 2)
# Check first page
expect(page).to have_content(group2.full_name)
......@@ -134,7 +134,7 @@ describe 'Dashboard Groups page', :js do
expect(page).not_to have_selector("#group-#{group.id}")
# Go to next page
find(".gl-pagination .page:not(.active) a").click
find('.gl-pagination .page-item:not(.active) a[role=menuitemradio]').click
wait_for_requests
......
......@@ -2,6 +2,7 @@ require 'spec_helper'
describe 'Import/Export - project import integration test', :js do
include Select2Helper
include GitHelpers
let(:user) { create(:user) }
let(:file) { File.join(Rails.root, 'spec', 'features', 'projects', 'import_export', 'test_project_export.tar.gz') }
......@@ -94,12 +95,6 @@ describe 'Import/Export - project import integration test', :js do
wiki.repository.exists? && !wiki.repository.empty?
end
def project_hook_exists?(project)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
Gitlab::Git::Hook.new('post-receive', project.repository.raw_repository).exists?
end
end
def click_import_project_tab
find('#import-project-tab').click
end
......
......@@ -174,6 +174,13 @@ describe ProjectsFinder do
end
end
describe 'filter by without_deleted' do
let(:params) { { without_deleted: true } }
let!(:pending_delete_project) { create(:project, :public, pending_delete: true) }
it { is_expected.to match_array([public_project, internal_project]) }
end
describe 'sorting' do
let(:params) { { sort: 'name_asc' } }
......
{
"allOf": [{ "$ref": "job.json" }],
"allOf": [
{ "$ref": "job.json" }
],
"description": "An extension of job.json with more detailed information",
"properties": {
"artifact": { "$ref": "artifact.json" },
"terminal_path": { "type": "string" }
"terminal_path": { "type": "string" },
"trigger": { "$ref": "trigger.json" }
}
}
{
"type": "object",
"required": [
"short_token",
"variables"
],
"properties": {
"short_token": { "type": "string" },
"variables": {
"type": "array",
"items": {
"type": "object",
"required": [
"key",
"value",
"public"
],
"properties": {
"key": { "type": "string" },
"value": { "type": "string" },
"public": { "type": "boolean" }
},
"additionalProperties": false
}
}
},
"additionalProperties": false
}
......@@ -24,6 +24,8 @@ const createComponent = (hideProjects = false) => {
const store = new GroupsStore(false);
const service = new GroupsService(mockEndpoint);
store.state.pageInfo = mockPageInfo;
return new Component({
propsData: {
store,
......@@ -484,7 +486,6 @@ describe('AppComponent', () => {
it('should render groups tree', done => {
vm.store.state.groups = [mockParentGroupItem];
vm.isLoading = false;
vm.store.state.pageInfo = mockPageInfo;
Vue.nextTick(() => {
expect(vm.$el.querySelector('.groups-list-tree-container')).toBeDefined();
done();
......
import Vue from 'vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import { s__ } from '~/locale';
import mountComponent from '../../helpers/vue_mount_component_helper';
describe('Pagination links component', () => {
const paginationLinksComponent = Vue.extend(PaginationLinks);
const change = page => page;
const pageInfo = {
page: 3,
perPage: 5,
total: 30,
};
const translations = {
firstText: s__('Pagination|« First'),
prevText: s__('Pagination|Prev'),
nextText: s__('Pagination|Next'),
lastText: s__('Pagination|Last »'),
};
let paginationLinks;
let glPagination;
let destinationComponent;
beforeEach(() => {
paginationLinks = mountComponent(
paginationLinksComponent,
{
change,
pageInfo,
},
);
[glPagination] = paginationLinks.$children;
[destinationComponent] = glPagination.$children;
});
afterEach(() => {
paginationLinks.$destroy();
});
it('should provide translated text to GitLab UI pagination', () => {
Object.entries(translations).forEach(entry =>
expect(
destinationComponent[entry[0]],
).toBe(entry[1]),
);
});
it('should pass change to GitLab UI pagination', () => {
expect(
Object.is(glPagination.change, change),
).toBe(true);
});
it('should pass page from pageInfo to GitLab UI pagination', () => {
expect(
destinationComponent.value,
).toBe(pageInfo.page);
});
it('should pass per page from pageInfo to GitLab UI pagination', () => {
expect(
destinationComponent.perPage,
).toBe(pageInfo.perPage);
});
it('should pass total items from pageInfo to GitLab UI pagination', () => {
expect(
destinationComponent.totalRows,
).toBe(pageInfo.total);
});
});
......@@ -66,4 +66,21 @@ describe Banzai::Filter::MarkdownFilter do
end
end
end
describe 'footnotes in tables' do
it 'processes footnotes in table cells' do
text = <<-MD.strip_heredoc
| Column1 |
| --------- |
| foot [^1] |
[^1]: a footnote
MD
result = filter(text)
expect(result).to include('<td>foot <sup')
expect(result).to include('<section class="footnotes">')
end
end
end
require 'spec_helper'
describe Gitlab::Git::CommitterWithHooks, :seed_helper do
# TODO https://gitlab.com/gitlab-org/gitaly/issues/1234
skip 'needs to be moved to gitaly-ruby test suite' do
shared_examples 'calling wiki hooks' do
let(:project) { create(:project) }
let(:user) { project.owner }
let(:project_wiki) { ProjectWiki.new(project, user) }
let(:wiki) { project_wiki.wiki }
let(:options) do
{
id: user.id,
username: user.username,
name: user.name,
email: user.email,
message: 'commit message'
}
end
subject { described_class.new(wiki, options) }
before do
project_wiki.create_page('home', 'test content')
end
shared_examples 'failing pre-receive hook' do
before do
expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('pre-receive').and_return([false, ''])
expect_any_instance_of(Gitlab::Git::HooksService).not_to receive(:run_hook).with('update')
expect_any_instance_of(Gitlab::Git::HooksService).not_to receive(:run_hook).with('post-receive')
end
it 'raises exception' do
expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError)
end
it 'does not create a new commit inside the repository' do
current_rev = find_current_rev
expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError)
expect(current_rev).to eq find_current_rev
end
end
shared_examples 'failing update hook' do
before do
expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('pre-receive').and_return([true, ''])
expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('update').and_return([false, ''])
expect_any_instance_of(Gitlab::Git::HooksService).not_to receive(:run_hook).with('post-receive')
end
it 'raises exception' do
expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError)
end
it 'does not create a new commit inside the repository' do
current_rev = find_current_rev
expect { subject.commit }.to raise_error(Gitlab::Git::Wiki::OperationError)
expect(current_rev).to eq find_current_rev
end
end
shared_examples 'failing post-receive hook' do
before do
expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('pre-receive').and_return([true, ''])
expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('update').and_return([true, ''])
expect_any_instance_of(Gitlab::Git::HooksService).to receive(:run_hook).with('post-receive').and_return([false, ''])
end
it 'does not raise exception' do
expect { subject.commit }.not_to raise_error
end
it 'creates the commit' do
current_rev = find_current_rev
subject.commit
expect(current_rev).not_to eq find_current_rev
end
end
shared_examples 'when hooks call succceeds' do
let(:hook) { double(:hook) }
it 'calls the three hooks' do
expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
expect(hook).to receive(:trigger).exactly(3).times.and_return([true, nil])
subject.commit
end
it 'creates the commit' do
current_rev = find_current_rev
subject.commit
expect(current_rev).not_to eq find_current_rev
end
end
context 'when creating a page' do
before do
project_wiki.create_page('index', 'test content')
end
it_behaves_like 'failing pre-receive hook'
it_behaves_like 'failing update hook'
it_behaves_like 'failing post-receive hook'
it_behaves_like 'when hooks call succceeds'
end
context 'when updating a page' do
before do
project_wiki.update_page(find_page('home'), content: 'some other content', format: :markdown)
end
it_behaves_like 'failing pre-receive hook'
it_behaves_like 'failing update hook'
it_behaves_like 'failing post-receive hook'
it_behaves_like 'when hooks call succceeds'
end
context 'when deleting a page' do
before do
project_wiki.delete_page(find_page('home'))
end
it_behaves_like 'failing pre-receive hook'
it_behaves_like 'failing update hook'
it_behaves_like 'failing post-receive hook'
it_behaves_like 'when hooks call succceeds'
end
def find_current_rev
wiki.gollum_wiki.repo.commits.first&.sha
end
def find_page(name)
wiki.page(title: name)
end
end
context 'when Gitaly is enabled' do
it_behaves_like 'calling wiki hooks'
end
context 'when Gitaly is disabled', :disable_gitaly do
it_behaves_like 'calling wiki hooks'
end
end
end
......@@ -2,12 +2,24 @@ require "spec_helper"
describe Gitlab::Git::Diff, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:gitaly_diff) do
Gitlab::GitalyClient::Diff.new(
from_path: '.gitmodules',
to_path: '.gitmodules',
old_mode: 0100644,
new_mode: 0100644,
from_id: '0792c58905eff3432b721f8c4a64363d8e28d9ae',
to_id: 'efd587ccb47caf5f31fc954edb21f0a713d9ecc3',
overflow_marker: false,
collapsed: false,
too_large: false,
patch: "@@ -4,3 +4,6 @@\n [submodule \"gitlab-shell\"]\n \tpath = gitlab-shell\n \turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n"
)
end
before do
@raw_diff_hash = {
diff: <<EOT.gsub(/^ {8}/, "").sub(/\n$/, ""),
--- a/.gitmodules
+++ b/.gitmodules
@@ -4,3 +4,6 @@
[submodule "gitlab-shell"]
\tpath = gitlab-shell
......@@ -26,12 +38,6 @@ EOT
deleted_file: false,
too_large: false
}
# TODO use a Gitaly diff object instead
@rugged_diff = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
repository.rugged.diff("5937ac0a7beb003549fc5fd26fc247adbce4a52e^", "5937ac0a7beb003549fc5fd26fc247adbce4a52e", paths:
[".gitmodules"]).patches.first
end
end
describe '.new' do
......@@ -60,7 +66,7 @@ EOT
context 'using a Rugged::Patch' do
context 'with a small diff' do
let(:diff) { described_class.new(@rugged_diff) }
let(:diff) { described_class.new(gitaly_diff) }
it 'initializes the diff' do
expect(diff.to_hash).to eq(@raw_diff_hash)
......@@ -73,10 +79,8 @@ EOT
context 'using a diff that is too large' do
it 'prunes the diff' do
expect_any_instance_of(String).to receive(:bytesize)
.and_return(1024 * 1024 * 1024)
diff = described_class.new(@rugged_diff)
gitaly_diff.too_large = true
diff = described_class.new(gitaly_diff)
expect(diff.diff).to be_empty
expect(diff).to be_too_large
......@@ -84,33 +88,15 @@ EOT
end
context 'using a collapsable diff that is too large' do
before do
# The patch total size is 200, with lines between 21 and 54.
# This is a quick-and-dirty way to test this. Ideally, a new patch is
# added to the test repo with a size that falls between the real limits.
stub_const("#{described_class}::SIZE_LIMIT", 150)
stub_const("#{described_class}::COLLAPSE_LIMIT", 100)
end
it 'prunes the diff as a large diff instead of as a collapsed diff' do
diff = described_class.new(@rugged_diff, expanded: false)
gitaly_diff.too_large = true
diff = described_class.new(gitaly_diff, expanded: false)
expect(diff.diff).to be_empty
expect(diff).to be_too_large
expect(diff).not_to be_collapsed
end
end
context 'using a large binary diff' do
it 'does not prune the diff' do
expect_any_instance_of(Rugged::Diff::Delta).to receive(:binary?)
.and_return(true)
diff = described_class.new(@rugged_diff)
expect(diff.diff).not_to be_empty
end
end
end
context 'using a GitalyClient::Diff' do
......@@ -259,31 +245,37 @@ EOT
end
it 'leave non-binary diffs as-is' do
diff = described_class.new(@rugged_diff)
diff = described_class.new(gitaly_diff)
expect(diff.json_safe_diff).to eq(diff.diff)
end
end
describe '#submodule?' do
before do
# TODO use a Gitaly diff object instead
rugged_commit = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
repository.rugged.rev_parse('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
end
@diffs = rugged_commit.parents[0].diff(rugged_commit).patches
let(:gitaly_submodule_diff) do
Gitlab::GitalyClient::Diff.new(
from_path: 'gitlab-grack',
to_path: 'gitlab-grack',
old_mode: 0,
new_mode: 57344,
from_id: '0000000000000000000000000000000000000000',
to_id: '645f6c4c82fd3f5e06f67134450a570b795e55a6',
overflow_marker: false,
collapsed: false,
too_large: false,
patch: "@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n"
)
end
it { expect(described_class.new(@diffs[0]).submodule?).to eq(false) }
it { expect(described_class.new(@diffs[1]).submodule?).to eq(true) }
it { expect(described_class.new(gitaly_diff).submodule?).to eq(false) }
it { expect(described_class.new(gitaly_submodule_diff).submodule?).to eq(true) }
end
describe '#line_count' do
it 'returns the correct number of lines' do
diff = described_class.new(@rugged_diff)
diff = described_class.new(gitaly_diff)
expect(diff.line_count).to eq(9)
expect(diff.line_count).to eq(7)
end
end
......
require 'spec_helper'
describe Gitlab::Git::GitlabProjects do
after do
TestEnv.clean_test_path
end
around do |example|
# TODO move this spec to gitaly-ruby. GitlabProjects is not used in gitlab-ce
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
example.run
end
end
let(:project) { create(:project, :repository) }
if $VERBOSE
let(:logger) { Logger.new(STDOUT) }
else
let(:logger) { double('logger').as_null_object }
end
let(:tmp_repos_path) { TestEnv.repos_path }
let(:repo_name) { project.disk_path + '.git' }
let(:tmp_repo_path) { File.join(tmp_repos_path, repo_name) }
let(:gl_projects) { build_gitlab_projects(TestEnv::REPOS_STORAGE, repo_name) }
describe '#initialize' do
it { expect(gl_projects.shard_path).to eq(tmp_repos_path) }
it { expect(gl_projects.repository_relative_path).to eq(repo_name) }
it { expect(gl_projects.repository_absolute_path).to eq(File.join(tmp_repos_path, repo_name)) }
it { expect(gl_projects.logger).to eq(logger) }
end
describe '#push_branches' do
let(:remote_name) { 'remote-name' }
let(:branch_name) { 'master' }
let(:cmd) { %W(#{Gitlab.config.git.bin_path} push -- #{remote_name} #{branch_name}) }
let(:force) { false }
subject { gl_projects.push_branches(remote_name, 600, force, [branch_name]) }
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, success: true)
is_expected.to be_truthy
end
it 'fails' do
stub_spawn(cmd, 600, tmp_repo_path, success: false)
is_expected.to be_falsy
end
context 'with --force' do
let(:cmd) { %W(#{Gitlab.config.git.bin_path} push --force -- #{remote_name} #{branch_name}) }
let(:force) { true }
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, success: true)
is_expected.to be_truthy
end
end
end
describe '#fetch_remote' do
let(:remote_name) { 'remote-name' }
let(:branch_name) { 'master' }
let(:force) { false }
let(:prune) { true }
let(:tags) { true }
let(:args) { { force: force, tags: tags, prune: prune }.merge(extra_args) }
let(:extra_args) { {} }
let(:cmd) { %W(#{Gitlab.config.git.bin_path} fetch #{remote_name} --quiet --prune --tags) }
subject { gl_projects.fetch_remote(remote_name, 600, args) }
def stub_tempfile(name, filename, opts = {})
chmod = opts.delete(:chmod)
file = StringIO.new
allow(file).to receive(:close!)
allow(file).to receive(:path).and_return(name)
expect(Tempfile).to receive(:new).with(filename).and_return(file)
expect(file).to receive(:chmod).with(chmod) if chmod
file
end
context 'with default args' do
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: true)
is_expected.to be_truthy
end
it 'fails' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: false)
is_expected.to be_falsy
end
end
context 'with --force' do
let(:force) { true }
let(:cmd) { %W(#{Gitlab.config.git.bin_path} fetch #{remote_name} --quiet --prune --force --tags) }
it 'executes the command with forced option' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: true)
is_expected.to be_truthy
end
end
context 'with --no-tags' do
let(:tags) { false }
let(:cmd) { %W(#{Gitlab.config.git.bin_path} fetch #{remote_name} --quiet --prune --no-tags) }
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: true)
is_expected.to be_truthy
end
end
context 'with no prune' do
let(:prune) { false }
let(:cmd) { %W(#{Gitlab.config.git.bin_path} fetch #{remote_name} --quiet --tags) }
it 'executes the command' do
stub_spawn(cmd, 600, tmp_repo_path, {}, success: true)
is_expected.to be_truthy
end
end
describe 'with an SSH key' do
let(:extra_args) { { ssh_key: 'SSH KEY' } }
it 'sets GIT_SSH to a custom script' do
script = stub_tempfile('scriptFile', 'gitlab-shell-ssh-wrapper', chmod: 0o755)
key = stub_tempfile('/tmp files/keyFile', 'gitlab-shell-key-file', chmod: 0o400)
stub_spawn(cmd, 600, tmp_repo_path, { 'GIT_SSH' => 'scriptFile' }, success: true)
is_expected.to be_truthy
expect(script.string).to eq("#!/bin/sh\nexec ssh '-oIdentityFile=\"/tmp files/keyFile\"' '-oIdentitiesOnly=\"yes\"' \"$@\"")
expect(key.string).to eq('SSH KEY')
end
end
describe 'with known_hosts data' do
let(:extra_args) { { known_hosts: 'KNOWN HOSTS' } }
it 'sets GIT_SSH to a custom script' do
script = stub_tempfile('scriptFile', 'gitlab-shell-ssh-wrapper', chmod: 0o755)
key = stub_tempfile('/tmp files/knownHosts', 'gitlab-shell-known-hosts', chmod: 0o400)
stub_spawn(cmd, 600, tmp_repo_path, { 'GIT_SSH' => 'scriptFile' }, success: true)
is_expected.to be_truthy
expect(script.string).to eq("#!/bin/sh\nexec ssh '-oStrictHostKeyChecking=\"yes\"' '-oUserKnownHostsFile=\"/tmp files/knownHosts\"' \"$@\"")
expect(key.string).to eq('KNOWN HOSTS')
end
end
end
describe '#import_project' do
let(:project) { create(:project) }
let(:import_url) { TestEnv.factory_repo_path_bare }
let(:cmd) { %W(#{Gitlab.config.git.bin_path} clone --bare -- #{import_url} #{tmp_repo_path}) }
let(:timeout) { 600 }
subject { gl_projects.import_project(import_url, timeout) }
shared_examples 'importing repository' do
context 'success import' do
it 'imports a repo' do
expect(File.exist?(File.join(tmp_repo_path, 'HEAD'))).to be_falsy
is_expected.to be_truthy
expect(File.exist?(File.join(tmp_repo_path, 'HEAD'))).to be_truthy
end
end
context 'already exists' do
it "doesn't import" do
FileUtils.mkdir_p(tmp_repo_path)
is_expected.to be_falsy
end
end
end
describe 'logging' do
it 'imports a repo' do
message = "Importing project from <#{import_url}> to <#{tmp_repo_path}>."
expect(logger).to receive(:info).with(message)
subject
end
end
context 'timeout' do
it 'does not import a repo' do
stub_spawn_timeout(cmd, timeout, nil)
message = "Importing project from <#{import_url}> to <#{tmp_repo_path}> failed."
expect(logger).to receive(:error).with(message)
is_expected.to be_falsy
expect(gl_projects.output).to eq("Timed out\n")
expect(File.exist?(File.join(tmp_repo_path, 'HEAD'))).to be_falsy
end
end
it_behaves_like 'importing repository'
end
describe '#fork_repository' do
let(:dest_repos) { TestEnv::REPOS_STORAGE }
let(:dest_repos_path) { tmp_repos_path }
let(:dest_repo_name) { File.join('@hashed', 'aa', 'bb', 'xyz.git') }
let(:dest_repo) { File.join(dest_repos_path, dest_repo_name) }
subject { gl_projects.fork_repository(dest_repos, dest_repo_name) }
before do
FileUtils.mkdir_p(dest_repos_path)
end
after do
FileUtils.rm_rf(dest_repos_path)
end
shared_examples 'forking a repository' do
it 'forks the repository' do
is_expected.to be_truthy
expect(File.exist?(dest_repo)).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
end
it 'does not fork if a project of the same name already exists' do
# create a fake project at the intended destination
FileUtils.mkdir_p(dest_repo)
is_expected.to be_falsy
end
end
it_behaves_like 'forking a repository'
# We seem to be stuck to having only one working Gitaly storage in tests, changing
# that is not very straight-forward so I'm leaving this test here for now till
# https://gitlab.com/gitlab-org/gitlab-ce/issues/41393 is fixed.
context 'different storages' do
let(:dest_repos) { 'alternative' }
let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), dest_repos) }
before do
stub_storage_settings(dest_repos => { 'path' => dest_repos_path })
end
it 'forks the repo' do
is_expected.to be_truthy
expect(File.exist?(dest_repo)).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
end
end
describe 'log messages' do
describe 'successful fork' do
it do
message = "Forking repository from <#{tmp_repo_path}> to <#{dest_repo}>."
expect(logger).to receive(:info).with(message)
subject
end
end
describe 'failed fork due existing destination' do
it do
FileUtils.mkdir_p(dest_repo)
message = "fork-repository failed: destination repository <#{dest_repo}> already exists."
expect(logger).to receive(:error).with(message)
subject
end
end
end
end
def build_gitlab_projects(*args)
described_class.new(
*args,
global_hooks_path: Gitlab.config.gitlab_shell.hooks_path,
logger: logger
)
end
def stub_spawn(*args, success: true)
exitstatus = success ? 0 : nil
expect(gl_projects).to receive(:popen_with_timeout).with(*args)
.and_return(["output", exitstatus])
end
def stub_spawn_timeout(*args)
expect(gl_projects).to receive(:popen_with_timeout).with(*args)
.and_raise(Timeout::Error)
end
end
require 'spec_helper'
require 'fileutils'
describe Gitlab::Git::Hook do
before do
# We need this because in the spec/spec_helper.rb we define it like this:
# allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
allow_any_instance_of(described_class).to receive(:trigger).and_call_original
end
around do |example|
# TODO move hook tests to gitaly-ruby. Hook will disappear from gitlab-ce
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
example.run
end
end
describe "#trigger" do
set(:project) { create(:project, :repository) }
let(:repository) { project.repository.raw_repository }
let(:repo_path) { repository.path }
let(:hooks_dir) { File.join(repo_path, 'hooks') }
let(:user) { create(:user) }
let(:gl_id) { Gitlab::GlId.gl_id(user) }
let(:gl_username) { user.username }
def create_hook(name)
FileUtils.mkdir_p(hooks_dir)
hook_path = File.join(hooks_dir, name)
File.open(hook_path, 'w', 0755) do |f|
f.write(<<~HOOK)
#!/bin/sh
exit 0
HOOK
end
end
def create_failing_hook(name)
FileUtils.mkdir_p(hooks_dir)
hook_path = File.join(hooks_dir, name)
File.open(hook_path, 'w', 0755) do |f|
f.write(<<~HOOK)
#!/bin/sh
echo 'regular message from the hook'
echo 'error message from the hook' 1>&2
echo 'error message from the hook line 2' 1>&2
exit 1
HOOK
end
end
['pre-receive', 'post-receive', 'update'].each do |hook_name|
context "when triggering a #{hook_name} hook" do
context "when the hook is successful" do
let(:hook_path) { File.join(hooks_dir, hook_name) }
let(:gl_repository) { Gitlab::GlRepository.gl_repository(project, false) }
let(:env) do
{
'GL_ID' => gl_id,
'GL_USERNAME' => gl_username,
'PWD' => repo_path,
'GL_PROTOCOL' => 'web',
'GL_REPOSITORY' => gl_repository
}
end
it "returns success with no errors" do
create_hook(hook_name)
hook = described_class.new(hook_name, repository)
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
if hook_name != 'update'
expect(Open3).to receive(:popen3)
.with(env, hook_path, chdir: repo_path).and_call_original
end
status, errors = hook.trigger(gl_id, gl_username, blank, blank, ref)
expect(status).to be true
expect(errors).to be_blank
end
end
context "when the hook is unsuccessful" do
it "returns failure with errors" do
create_failing_hook(hook_name)
hook = described_class.new(hook_name, repository)
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
status, errors = hook.trigger(gl_id, gl_username, blank, blank, ref)
expect(status).to be false
expect(errors).to eq("error message from the hook\nerror message from the hook line 2\n")
end
end
end
end
context "when the hook doesn't exist" do
it "returns success with no errors" do
hook = described_class.new('unknown_hook', repository)
blank = Gitlab::Git::BLANK_SHA
ref = Gitlab::Git::BRANCH_REF_PREFIX + 'new_branch'
status, errors = hook.trigger(gl_id, gl_username, blank, blank, ref)
expect(status).to be true
expect(errors).to be_nil
end
end
end
end
require 'spec_helper'
describe Gitlab::Git::HooksService, :seed_helper do
let(:gl_id) { 'user-456' }
let(:gl_username) { 'janedoe' }
let(:user) { Gitlab::Git::User.new(gl_username, 'Jane Doe', 'janedoe@example.com', gl_id) }
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, 'project-123') }
let(:service) { described_class.new }
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:oldrev) { SeedRepo::Commit::PARENT_ID }
let(:newrev) { SeedRepo::Commit::ID }
let(:ref) { 'refs/heads/feature' }
describe '#execute' do
context 'when receive hooks were successful' do
let(:hook) { double(:hook) }
it 'calls all three hooks' do
expect(Gitlab::Git::Hook).to receive(:new).exactly(3).times.and_return(hook)
expect(hook).to receive(:trigger).with(gl_id, gl_username, blankrev, newrev, ref)
.exactly(3).times.and_return([true, nil])
service.execute(user, repository, blankrev, newrev, ref) { }
end
end
context 'when pre-receive hook failed' do
it 'does not call post-receive hook' do
expect(service).to receive(:run_hook).with('pre-receive').and_return([false, 'hello world'])
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
service.execute(user, repository, blankrev, newrev, ref)
end.to raise_error(Gitlab::Git::PreReceiveError, 'hello world')
end
end
context 'when update hook failed' do
it 'does not call post-receive hook' do
expect(service).to receive(:run_hook).with('pre-receive').and_return([true, nil])
expect(service).to receive(:run_hook).with('update').and_return([false, 'hello world'])
expect(service).not_to receive(:run_hook).with('post-receive')
expect do
service.execute(user, repository, blankrev, newrev, ref)
end.to raise_error(Gitlab::Git::PreReceiveError, 'hello world')
end
end
end
end
require 'spec_helper'
describe Gitlab::Git::Index, :seed_helper do
let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '') }
let(:index) { described_class.new(repository) }
before do
index.read_tree(lookup('master').tree)
end
around do |example|
# TODO move these specs to gitaly-ruby. The Index class will disappear from gitlab-ce
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
example.run
end
end
describe '#create' do
let(:options) do
{
content: 'Lorem ipsum...',
file_path: 'documents/story.txt'
}
end
context 'when no file at that path exists' do
it 'creates the file in the index' do
index.create(options)
entry = index.get(options[:file_path])
expect(entry).not_to be_nil
expect(lookup(entry[:oid]).content).to eq(options[:content])
end
end
context 'when a file at that path exists' do
before do
options[:file_path] = 'files/executables/ls'
end
it 'raises an error' do
expect { index.create(options) }.to raise_error('A file with this name already exists')
end
end
context 'when content is in base64' do
before do
options[:content] = Base64.encode64(options[:content])
options[:encoding] = 'base64'
end
it 'decodes base64' do
index.create(options)
entry = index.get(options[:file_path])
expect(lookup(entry[:oid]).content).to eq(Base64.decode64(options[:content]))
end
end
context 'when content contains CRLF' do
before do
repository.autocrlf = :input
options[:content] = "Hello,\r\nWorld"
end
it 'converts to LF' do
index.create(options)
entry = index.get(options[:file_path])
expect(lookup(entry[:oid]).content).to eq("Hello,\nWorld")
end
end
end
describe '#create_dir' do
let(:options) do
{
file_path: 'newdir'
}
end
context 'when no file or dir at that path exists' do
it 'creates the dir in the index' do
index.create_dir(options)
entry = index.get(options[:file_path] + '/.gitkeep')
expect(entry).not_to be_nil
end
end
context 'when a file at that path exists' do
before do
options[:file_path] = 'files/executables/ls'
end
it 'raises an error' do
expect { index.create_dir(options) }.to raise_error('A file with this name already exists')
end
end
context 'when a directory at that path exists' do
before do
options[:file_path] = 'files/executables'
end
it 'raises an error' do
expect { index.create_dir(options) }.to raise_error('A directory with this name already exists')
end
end
end
describe '#update' do
let(:options) do
{
content: 'Lorem ipsum...',
file_path: 'README.md'
}
end
context 'when no file at that path exists' do
before do
options[:file_path] = 'documents/story.txt'
end
it 'raises an error' do
expect { index.update(options) }.to raise_error("A file with this name doesn't exist")
end
end
context 'when a file at that path exists' do
it 'updates the file in the index' do
index.update(options)
entry = index.get(options[:file_path])
expect(lookup(entry[:oid]).content).to eq(options[:content])
end
it 'preserves file mode' do
options[:file_path] = 'files/executables/ls'
index.update(options)
entry = index.get(options[:file_path])
expect(entry[:mode]).to eq(0100755)
end
end
end
describe '#move' do
let(:options) do
{
content: 'Lorem ipsum...',
previous_path: 'README.md',
file_path: 'NEWREADME.md'
}
end
context 'when no file at that path exists' do
it 'raises an error' do
options[:previous_path] = 'documents/story.txt'
expect { index.move(options) }.to raise_error("A file with this name doesn't exist")
end
end
context 'when a file at the new path already exists' do
it 'raises an error' do
options[:file_path] = 'CHANGELOG'
expect { index.move(options) }.to raise_error("A file with this name already exists")
end
end
context 'when a file at that path exists' do
it 'removes the old file in the index' do
index.move(options)
entry = index.get(options[:previous_path])
expect(entry).to be_nil
end
it 'creates the new file in the index' do
index.move(options)
entry = index.get(options[:file_path])
expect(entry).not_to be_nil
expect(lookup(entry[:oid]).content).to eq(options[:content])
end
it 'preserves file mode' do
options[:previous_path] = 'files/executables/ls'
index.move(options)
entry = index.get(options[:file_path])
expect(entry[:mode]).to eq(0100755)
end
end
end
describe '#delete' do
let(:options) do
{
file_path: 'README.md'
}
end
context 'when no file at that path exists' do
before do
options[:file_path] = 'documents/story.txt'
end
it 'raises an error' do
expect { index.delete(options) }.to raise_error("A file with this name doesn't exist")
end
end
context 'when a file at that path exists' do
it 'removes the file in the index' do
index.delete(options)
entry = index.get(options[:file_path])
expect(entry).to be_nil
end
end
end
def lookup(revision)
repository.rugged.rev_parse(revision)
end
end
require 'spec_helper'
describe 'Gitlab::Git::Popen' do
let(:path) { Rails.root.join('tmp').to_s }
let(:test_string) { 'The quick brown fox jumped over the lazy dog' }
# The pipe buffer is typically 64K. This string is about 440K.
let(:spew_command) { ['bash', '-c', "for i in {1..10000}; do echo '#{test_string}' 1>&2; done"] }
let(:klass) do
Class.new(Object) do
include Gitlab::Git::Popen
end
end
context 'popen' do
context 'zero status' do
let(:result) { klass.new.popen(%w(ls), path) }
let(:output) { result.first }
let(:status) { result.last }
it { expect(status).to be_zero }
it { expect(output).to include('tests') }
end
context 'non-zero status' do
let(:result) { klass.new.popen(%w(cat NOTHING), path) }
let(:output) { result.first }
let(:status) { result.last }
it { expect(status).to eq(1) }
it { expect(output).to include('No such file or directory') }
end
context 'unsafe string command' do
it 'raises an error when it gets called with a string argument' do
expect { klass.new.popen('ls', path) }.to raise_error(RuntimeError)
end
end
context 'with custom options' do
let(:vars) { { 'foobar' => 123, 'PWD' => path } }
let(:options) { { chdir: path } }
it 'calls popen3 with the provided environment variables' do
expect(Open3).to receive(:popen3).with(vars, 'ls', options)
klass.new.popen(%w(ls), path, { 'foobar' => 123 })
end
end
context 'use stdin' do
let(:result) { klass.new.popen(%w[cat], path) { |stdin| stdin.write 'hello' } }
let(:output) { result.first }
let(:status) { result.last }
it { expect(status).to be_zero }
it { expect(output).to eq('hello') }
end
context 'with lazy block' do
it 'yields a lazy io' do
expect_lazy_io = lambda do |io|
expect(io).to be_a Enumerator::Lazy
expect(io.inspect).to include('#<IO:fd')
end
klass.new.popen(%w[ls], path, lazy_block: expect_lazy_io)
end
it "doesn't wait for process exit" do
Timeout.timeout(2) do
klass.new.popen(%w[yes], path, lazy_block: ->(io) {})
end
end
end
context 'with a process that writes a lot of data to stderr' do
it 'returns zero' do
output, status = klass.new.popen(spew_command, path)
expect(output).to include(test_string)
expect(status).to eq(0)
end
end
end
context 'popen_with_timeout' do
let(:timeout) { 1.second }
context 'no timeout' do
context 'zero status' do
let(:result) { klass.new.popen_with_timeout(%w(ls), timeout, path) }
let(:output) { result.first }
let(:status) { result.last }
it { expect(status).to be_zero }
it { expect(output).to include('tests') }
end
context 'multi-line string' do
let(:test_string) { "this is 1 line\n2nd line\n3rd line\n" }
let(:result) { klass.new.popen_with_timeout(['echo', test_string], timeout, path) }
let(:output) { result.first }
let(:status) { result.last }
it { expect(status).to be_zero }
# echo adds its own line
it { expect(output).to eq(test_string + "\n") }
end
context 'non-zero status' do
let(:result) { klass.new.popen_with_timeout(%w(cat NOTHING), timeout, path) }
let(:output) { result.first }
let(:status) { result.last }
it { expect(status).to eq(1) }
it { expect(output).to include('No such file or directory') }
end
context 'unsafe string command' do
it 'raises an error when it gets called with a string argument' do
expect { klass.new.popen_with_timeout('ls', timeout, path) }.to raise_error(RuntimeError)
end
end
end
context 'timeout' do
context 'timeout' do
it "raises a Timeout::Error" do
expect { klass.new.popen_with_timeout(%w(sleep 1000), timeout, path) }.to raise_error(Timeout::Error)
end
it "handles processes that do not shutdown correctly" do
expect { klass.new.popen_with_timeout(['bash', '-c', "trap -- '' SIGTERM; sleep 1000"], timeout, path) }.to raise_error(Timeout::Error)
end
it 'handles process that writes a lot of data to stderr' do
output, status = klass.new.popen_with_timeout(spew_command, timeout, path)
expect(output).to include(test_string)
expect(status).to eq(0)
end
end
context 'timeout period' do
let(:time_taken) do
begin
start = Time.now
klass.new.popen_with_timeout(%w(sleep 1000), timeout, path)
rescue
Time.now - start
end
end
it { expect(time_taken).to be >= timeout }
end
context 'clean up' do
let(:instance) { klass.new }
it 'kills the child process' do
expect(instance).to receive(:kill_process_group_for_pid).and_wrap_original do |m, *args|
# is the PID, and it should not be running at this point
m.call(*args)
pid = args.first
begin
Process.getpgid(pid)
raise "The child process should have been killed"
rescue Errno::ESRCH
end
end
expect { instance.popen_with_timeout(['bash', '-c', "trap -- '' SIGTERM; sleep 1000"], timeout, path) }.to raise_error(Timeout::Error)
end
end
end
end
end
This diff is collapsed.
require 'spec_helper'
describe Gitlab::ImportExport::RepoRestorer do
include GitHelpers
describe 'bundle a project Git repo' do
let(:user) { create(:user) }
let!(:project_with_repo) { create(:project, :repository, name: 'test-repo-restorer', path: 'test-repo-restorer') }
......@@ -36,9 +38,7 @@ describe Gitlab::ImportExport::RepoRestorer do
it 'has the webhooks' do
restorer.restore
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
expect(Gitlab::Git::Hook.new('post-receive', project.repository.raw_repository)).to exist
end
expect(project_hook_exists?(project)).to be true
end
end
end
......@@ -7,15 +7,10 @@ describe Gitlab::Shell do
let(:repository) { project.repository }
let(:gitlab_shell) { described_class.new }
let(:popen_vars) { { 'GIT_TERMINAL_PROMPT' => ENV['GIT_TERMINAL_PROMPT'] } }
let(:gitlab_projects) { double('gitlab_projects') }
let(:timeout) { Gitlab.config.gitlab_shell.git_timeout }
before do
allow(Project).to receive(:find).and_return(project)
allow(gitlab_shell).to receive(:gitlab_projects)
.with(project.repository_storage, project.disk_path + '.git')
.and_return(gitlab_projects)
end
it { is_expected.to respond_to :add_key }
......
......@@ -108,8 +108,21 @@ describe Clusters::Applications::Jupyter do
expect(values).to include('rbac')
expect(values).to include('proxy')
expect(values).to include('auth')
expect(values).to include('singleuser')
expect(values).to match(/clientId: '?#{application.oauth_application.uid}/)
expect(values).to match(/callbackUrl: '?#{application.callback_url}/)
end
context 'when cluster belongs to a project' do
let(:project) { create(:project) }
before do
application.cluster.projects << project
end
it 'sets GitLab project id' do
expect(values).to match(/GITLAB_PROJECT_ID: '?#{project.id}/)
end
end
end
end
......@@ -1028,194 +1028,6 @@ describe Repository do
end
end
describe '#update_branch_with_hooks' do
let(:old_rev) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' } # git rev-parse feature
let(:new_rev) { 'a74ae73c1ccde9b974a70e82b901588071dc142a' } # commit whose parent is old_rev
let(:updating_ref) { 'refs/heads/feature' }
let(:target_project) { project }
let(:target_repository) { target_project.repository }
around do |example|
# TODO Gitlab::Git::OperationService will be moved to gitaly-ruby and disappear from this repo
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
example.run
end
end
context 'when pre hooks were successful' do
before do
service = Gitlab::Git::HooksService.new
expect(Gitlab::Git::HooksService).to receive(:new).and_return(service)
expect(service).to receive(:execute)
.with(git_user, target_repository.raw_repository, old_rev, new_rev, updating_ref)
.and_yield(service).and_return(true)
end
it 'runs without errors' do
expect do
Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch('feature') do
new_rev
end
end.not_to raise_error
end
it 'ensures the autocrlf Git option is set to :input' do
service = Gitlab::Git::OperationService.new(git_user, repository.raw_repository)
expect(service).to receive(:update_autocrlf_option)
service.with_branch('feature') { new_rev }
end
context "when the branch wasn't empty" do
it 'updates the head' do
expect(repository.find_branch('feature').dereferenced_target.id).to eq(old_rev)
Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch('feature') do
new_rev
end
expect(repository.find_branch('feature').dereferenced_target.id).to eq(new_rev)
end
end
context 'when target project does not have the commit' do
let(:target_project) { create(:project, :empty_repo) }
let(:old_rev) { Gitlab::Git::BLANK_SHA }
let(:new_rev) { project.commit('feature').sha }
let(:updating_ref) { 'refs/heads/master' }
it 'fetch_ref and create the branch' do
expect(target_project.repository.raw_repository).to receive(:fetch_ref)
.and_call_original
Gitlab::Git::OperationService.new(git_user, target_repository.raw_repository)
.with_branch(
'master',
start_repository: project.repository.raw_repository,
start_branch_name: 'feature') { new_rev }
expect(target_repository.branch_names).to contain_exactly('master')
end
end
context 'when target project already has the commit' do
let(:target_project) { create(:project, :repository) }
it 'does not fetch_ref and just pass the commit' do
expect(target_repository).not_to receive(:fetch_ref)
Gitlab::Git::OperationService.new(git_user, target_repository.raw_repository)
.with_branch('feature', start_repository: project.repository.raw_repository) { new_rev }
end
end
end
context 'when temporary ref failed to be created from other project' do
let(:target_project) { create(:project, :empty_repo) }
before do
expect(target_project.repository.raw_repository).to receive(:run_git)
end
it 'raises Rugged::ReferenceError' do
expect do
Gitlab::Git::OperationService.new(git_user, target_project.repository.raw_repository)
.with_branch('feature',
start_repository: project.repository.raw_repository,
&:itself)
end.to raise_error(Gitlab::Git::CommandError)
end
end
context 'when the update adds more than one commit' do
let(:old_rev) { '33f3729a45c02fc67d00adb1b8bca394b0e761d9' }
it 'runs without errors' do
# old_rev is an ancestor of new_rev
expect(repository.merge_base(old_rev, new_rev)).to eq(old_rev)
# old_rev is not a direct ancestor (parent) of new_rev
expect(repository.rugged.lookup(new_rev).parent_ids).not_to include(old_rev)
branch = 'feature-ff-target'
repository.add_branch(user, branch, old_rev)
expect do
Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch(branch) do
new_rev
end
end.not_to raise_error
end
end
context 'when the update would remove commits from the target branch' do
let(:branch) { 'master' }
let(:old_rev) { repository.find_branch(branch).dereferenced_target.sha }
it 'raises an exception' do
# The 'master' branch is NOT an ancestor of new_rev.
expect(repository.merge_base(old_rev, new_rev)).not_to eq(old_rev)
# Updating 'master' to new_rev would lose the commits on 'master' that
# are not contained in new_rev. This should not be allowed.
expect do
Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch(branch) do
new_rev
end
end.to raise_error(Gitlab::Git::CommitError)
end
end
context 'when pre hooks failed' do
it 'gets an error' do
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([false, ''])
expect do
Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch('feature') do
new_rev
end
end.to raise_error(Gitlab::Git::PreReceiveError)
end
end
context 'when target branch is different from source branch' do
before do
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, ''])
end
subject do
Gitlab::Git::OperationService.new(git_user, repository.raw_repository).with_branch('new-feature') do
new_rev
end
end
it 'returns branch_created as true' do
expect(subject).not_to be_repo_created
expect(subject).to be_branch_created
end
end
context 'when repository is empty' do
before do
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, ''])
end
it 'expires creation and branch cache' do
empty_repository = create(:project, :empty_repo).repository
expect(empty_repository).to receive(:expire_exists_cache)
expect(empty_repository).to receive(:expire_root_ref_cache)
expect(empty_repository).to receive(:expire_emptiness_caches)
expect(empty_repository).to receive(:expire_branches_cache)
empty_repository.create_file(user, 'CHANGELOG', 'Changelog!',
message: 'Updates file content',
branch_name: 'master')
end
end
end
describe '#exists?' do
it 'returns true when a repository exists' do
expect(repository.exists?).to be(true)
......@@ -1298,40 +1110,6 @@ describe Repository do
end
end
describe '#update_autocrlf_option' do
around do |example|
# TODO Gitlab::Git::OperationService will be moved to gitaly-ruby and disappear from this repo
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
example.run
end
end
describe 'when autocrlf is not already set to :input' do
before do
repository.raw_repository.autocrlf = true
end
it 'sets autocrlf to :input' do
Gitlab::Git::OperationService.new(nil, repository.raw_repository).send(:update_autocrlf_option)
expect(repository.raw_repository.autocrlf).to eq(:input)
end
end
describe 'when autocrlf is already set to :input' do
before do
repository.raw_repository.autocrlf = :input
end
it 'does nothing' do
expect(repository.raw_repository).not_to receive(:autocrlf=)
.with(:input)
Gitlab::Git::OperationService.new(nil, repository.raw_repository).send(:update_autocrlf_option)
end
end
end
describe '#empty?' do
let(:empty_repository) { create(:project_empty_repo).repository }
......@@ -2025,27 +1803,6 @@ describe Repository do
end
end
describe '#update_ref' do
around do |example|
# TODO Gitlab::Git::OperationService will be moved to gitaly-ruby and disappear from this repo
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
example.run
end
end
it 'can create a ref' do
Gitlab::Git::OperationService.new(nil, repository.raw_repository).send(:update_ref, 'refs/heads/foobar', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
expect(repository.find_branch('foobar')).not_to be_nil
end
it 'raises CommitError when the ref update fails' do
expect do
Gitlab::Git::OperationService.new(nil, repository.raw_repository).send(:update_ref, 'refs/heads/master', 'refs/heads/master', Gitlab::Git::BLANK_SHA)
end.to raise_error(Gitlab::Git::CommitError)
end
end
describe '#contribution_guide', :use_clean_rails_memory_store_caching do
it 'returns and caches the output' do
expect(repository).to receive(:file_on_head)
......
......@@ -148,6 +148,16 @@ describe API::Projects do
expect(json_response.first.keys).to include('open_issues_count')
end
it 'does not include projects marked for deletion' do
project.update(pending_delete: true)
get api('/projects', user)
expect(response).to have_gitlab_http_status(200)
expect(json_response).to be_an Array
expect(json_response.map { |p| p['id'] }).not_to include(project.id)
end
it 'does not include open_issues_count if issues are disabled' do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
......@@ -1062,6 +1072,15 @@ describe API::Projects do
expect(json_response).not_to include("import_error")
end
it 'returns 404 when project is marked for deletion' do
project.update(pending_delete: true)
get api("/projects/#{project.id}", user)
expect(response).to have_gitlab_http_status(404)
expect(json_response['message']).to eq('404 Project Not Found')
end
context 'links exposure' do
it 'exposes related resources full URIs' do
get api("/projects/#{project.id}", user)
......
# frozen_string_literal: true
module GitHelpers
def project_hook_exists?(project)
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
project_path = project.repository.raw_repository.path
File.exist?(File.join(project_path, 'hooks', 'post-receive'))
end
end
end
......@@ -107,10 +107,6 @@ module TestEnv
.and_call_original
end
def disable_pre_receive
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
end
# Clean /tmp/tests
#
# Keeps gitlab-shell and gitlab-test
......
......@@ -73,9 +73,13 @@ RSpec.shared_examples 'an editable merge request' do
it 'description has autocomplete', :js do
find('#merge_request_description').native.send_keys('')
fill_in 'merge_request_description', with: '@'
fill_in 'merge_request_description', with: user.to_reference[0..4]
expect(page).to have_selector('.atwho-view')
wait_for_requests
page.within('.atwho-view') do
expect(page).to have_content(user2.name)
end
end
it 'has class js-quick-submit in form' do
......
# frozen_string_literal: true
require 'spec_helper'
describe ProjectServiceWorker, '#perform' do
let(:worker) { described_class.new }
let(:service) { JiraService.new }
before do
allow(Service).to receive(:find).and_return(service)
end
it 'executes service with given data' do
data = { test: 'test' }
expect(service).to receive(:execute).with(data)
worker.perform(1, data)
end
it 'logs error messages' do
allow(service).to receive(:execute).and_raise(StandardError, 'invalid URL')
expect(Sidekiq.logger).to receive(:error).with({ class: described_class.name, service_class: service.class.name, message: "invalid URL" })
worker.perform(1, {})
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment