Commit 1fa79760 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 82fa8a3d
......@@ -102,7 +102,7 @@ export default {
return this.tagsPagination.page;
},
set(page) {
this.requestTagsList({ pagination: { page }, id: this.$route.params.id });
this.requestTagsList({ pagination: { page }, params: this.$route.params.id });
},
},
},
......
<script>
import { GlIcon, GlFormGroup, GlFormRadio, GlFormRadioGroup, GlLink } from '@gitlab/ui';
import { SNIPPET_VISIBILITY } from '~/snippets/constants';
export default {
components: {
GlIcon,
GlFormGroup,
GlFormRadio,
GlFormRadioGroup,
GlLink,
},
props: {
helpLink: {
type: String,
default: '',
required: false,
},
isProjectSnippet: {
type: Boolean,
required: false,
default: false,
},
visibilityLevel: {
type: String,
default: '0',
required: false,
},
},
data() {
return {
selected: this.visibilityLevel,
};
},
computed: {
visibilityOptions() {
return [
{
value: '0',
icon: 'lock',
text: SNIPPET_VISIBILITY.private.label,
description: this.isProjectSnippet
? SNIPPET_VISIBILITY.private.description_project
: SNIPPET_VISIBILITY.private.description,
},
{
value: '1',
icon: 'shield',
text: SNIPPET_VISIBILITY.internal.label,
description: SNIPPET_VISIBILITY.internal.description,
},
{
value: '2',
icon: 'earth',
text: SNIPPET_VISIBILITY.public.label,
description: SNIPPET_VISIBILITY.public.description,
},
];
},
},
methods: {
updateSelectedOption(newVal) {
if (newVal !== this.selected) {
this.selected = newVal;
}
},
},
};
</script>
<template>
<div class="form-group">
<label>
{{ __('Visibility level') }}
<gl-link v-if="helpLink" :href="helpLink" target="_blank"
><gl-icon :size="12" name="question"
/></gl-link>
</label>
<gl-form-group id="visibility-level-setting">
<gl-form-radio-group :checked="selected" stacked @change="updateSelectedOption">
<gl-form-radio
v-for="option in visibilityOptions"
:key="option.icon"
:value="option.value"
class="mb-3"
>
<div class="d-flex align-items-center">
<gl-icon :size="16" :name="option.icon" />
<span class="font-weight-bold ml-1">{{ option.text }}</span>
</div>
<template #help>{{ option.description }}</template>
</gl-form-radio>
</gl-form-radio-group>
</gl-form-group>
</div>
</template>
import { __ } from '~/locale';
export const SNIPPET_VISIBILITY_PRIVATE = 'private';
export const SNIPPET_VISIBILITY_INTERNAL = 'internal';
export const SNIPPET_VISIBILITY_PUBLIC = 'public';
export const SNIPPET_VISIBILITY = {
private: {
label: __('Private'),
description: __('The snippet is visible only to me.'),
description_project: __('The snippet is visible only to project members.'),
},
internal: {
label: __('Internal'),
description: __('The snippet is visible to any logged in user.'),
},
public: {
label: __('Public'),
description: __('The snippet can be accessed without any authentication.'),
},
};
......@@ -52,8 +52,15 @@ class Projects::SnippetsController < Projects::ApplicationController
create_params = snippet_params.merge(spammable_params)
service_response = Snippets::CreateService.new(project, current_user, create_params).execute
@snippet = service_response.payload[:snippet]
repository_operation_error = service_response.error? && !@snippet.persisted? && @snippet.valid?
recaptcha_check_with_fallback { render :new }
if repository_operation_error
flash.now[:alert] = service_response.message
render :new
else
recaptcha_check_with_fallback { render :new }
end
end
def update
......
......@@ -52,10 +52,17 @@ class SnippetsController < ApplicationController
create_params = snippet_params.merge(spammable_params)
service_response = Snippets::CreateService.new(nil, current_user, create_params).execute
@snippet = service_response.payload[:snippet]
repository_operation_error = service_response.error? && !@snippet.persisted? && @snippet.valid?
move_temporary_files if @snippet.valid? && params[:files]
if repository_operation_error
flash.now[:alert] = service_response.message
recaptcha_check_with_fallback { render :new }
render :new
else
move_temporary_files if @snippet.valid? && params[:files]
recaptcha_check_with_fallback { render :new }
end
end
def update
......
......@@ -219,22 +219,15 @@ module ApplicationSettingImplementation
self.outbound_local_requests_whitelist.uniq!
end
# This method separates out the strings stored in the
# application_setting.outbound_local_requests_whitelist array into 2 arrays;
# an array of IPAddr objects (`[IPAddr.new('127.0.0.1')]`), and an array of
# domain strings (`['www.example.com']`).
def outbound_local_requests_whitelist_arrays
strong_memoize(:outbound_local_requests_whitelist_arrays) do
next [[], []] unless self.outbound_local_requests_whitelist
ip_whitelist = []
domain_whitelist = []
self.outbound_local_requests_whitelist.each do |str|
ip_obj = Gitlab::Utils.string_to_ip_object(str)
if ip_obj
ip_whitelist << ip_obj
else
domain_whitelist << str
end
end
ip_whitelist, domain_whitelist = separate_whitelists(self.outbound_local_requests_whitelist)
[ip_whitelist, domain_whitelist]
end
......@@ -360,6 +353,20 @@ module ApplicationSettingImplementation
private
def separate_whitelists(string_array)
string_array.reduce([[], []]) do |(ip_whitelist, domain_whitelist), string|
ip_obj = Gitlab::Utils.string_to_ip_object(string)
if ip_obj
ip_whitelist << ip_obj
else
domain_whitelist << string
end
[ip_whitelist, domain_whitelist]
end
end
def array_to_string(arr)
arr&.join("\n")
end
......
......@@ -374,7 +374,7 @@ class Member < ApplicationRecord
# always notify when there isn't a user yet
return true if user.blank?
NotificationRecipientService.notifiable?(user, type, notifiable_options.merge(opts))
NotificationRecipients::BuildService.notifiable?(user, type, notifiable_options.merge(opts))
end
# rubocop: enable CodeReuse/ServiceClass
......
......@@ -4,7 +4,7 @@ class SnippetRepository < ApplicationRecord
include Shardable
DEFAULT_EMPTY_FILE_NAME = 'snippetfile'
EMPTY_FILE_PATTERN = /^#{DEFAULT_EMPTY_FILE_NAME}(\d)\.txt$/.freeze
EMPTY_FILE_PATTERN = /^#{DEFAULT_EMPTY_FILE_NAME}(\d+)\.txt$/.freeze
CommitError = Class.new(StandardError)
......@@ -51,14 +51,14 @@ class SnippetRepository < ApplicationRecord
end
def transform_file_entries(files)
last_index = get_last_empty_file_index
next_index = get_last_empty_file_index + 1
files.each do |file_entry|
file_entry[:action] = infer_action(file_entry) unless file_entry[:action]
if file_entry[:file_path].blank?
file_entry[:file_path] = build_empty_file_name(last_index)
last_index += 1
file_entry[:file_path] = build_empty_file_name(next_index)
next_index += 1
end
end
end
......@@ -70,12 +70,10 @@ class SnippetRepository < ApplicationRecord
end
def get_last_empty_file_index
last_file = repository.ls_files(nil)
.map! { |file| file.match(EMPTY_FILE_PATTERN) }
.compact
.max_by { |element| element[1] }
last_file ? (last_file[1].to_i + 1) : 1
repository.ls_files(nil).inject(0) do |max, file|
idx = file[EMPTY_FILE_PATTERN, 1].to_i
[idx, max].max
end
end
def build_empty_file_name(index)
......
# frozen_string_literal: true
#
# Used by NotificationService to determine who should receive notification
#
module NotificationRecipients
module BuildService
def self.notifiable_users(users, *args)
users.compact.map { |u| NotificationRecipient.new(u, *args) }.select(&:notifiable?).map(&:user)
end
def self.notifiable?(user, *args)
NotificationRecipient.new(user, *args).notifiable?
end
def self.build_recipients(*args)
Builder::Default.new(*args).notification_recipients
end
def self.build_new_note_recipients(*args)
Builder::NewNote.new(*args).notification_recipients
end
def self.build_merge_request_unmergeable_recipients(*args)
Builder::MergeRequestUnmergeable.new(*args).notification_recipients
end
def self.build_project_maintainers_recipients(*args)
Builder::ProjectMaintainers.new(*args).notification_recipients
end
def self.build_new_release_recipients(*args)
Builder::NewRelease.new(*args).notification_recipients
end
end
end
NotificationRecipients::BuildService.prepend_if_ee('EE::NotificationRecipients::BuildService')
# frozen_string_literal: true
#
# Used by NotificationService to determine who should receive notification
#
module NotificationRecipientService
def self.notifiable_users(users, *args)
users.compact.map { |u| NotificationRecipient.new(u, *args) }.select(&:notifiable?).map(&:user)
end
def self.notifiable?(user, *args)
NotificationRecipient.new(user, *args).notifiable?
end
def self.build_recipients(*args)
Builder::Default.new(*args).notification_recipients
end
def self.build_new_note_recipients(*args)
Builder::NewNote.new(*args).notification_recipients
end
def self.build_merge_request_unmergeable_recipients(*args)
Builder::MergeRequestUnmergeable.new(*args).notification_recipients
end
def self.build_project_maintainers_recipients(*args)
Builder::ProjectMaintainers.new(*args).notification_recipients
end
def self.build_new_release_recipients(*args)
Builder::NewRelease.new(*args).notification_recipients
end
module NotificationRecipients
module Builder
class Base
def initialize(*)
......@@ -244,186 +213,5 @@ module NotificationRecipientService
end
end
end
class Default < Base
MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze
attr_reader :target
attr_reader :current_user
attr_reader :action
attr_reader :previous_assignees
attr_reader :skip_current_user
def initialize(target, current_user, action:, custom_action: nil, previous_assignees: nil, skip_current_user: true)
@target = target
@current_user = current_user
@action = action
@custom_action = custom_action
@previous_assignees = previous_assignees
@skip_current_user = skip_current_user
end
def add_watchers
add_project_watchers
end
def build!
add_participants(current_user)
add_watchers
add_custom_notifications
# Re-assign is considered as a mention of the new assignee
case custom_action
when :reassign_merge_request, :reassign_issue
add_recipients(previous_assignees, :mention, nil)
add_recipients(target.assignees, :mention, NotificationReason::ASSIGNED)
end
add_subscribed_users
if self.class.mention_type_actions.include?(custom_action)
# These will all be participants as well, but adding with the :mention
# type ensures that users with the mention notification level will
# receive them, too.
add_mentions(current_user, target: target)
# We use the `:participating` notification level in order to match existing legacy behavior as captured
# in existing specs (notification_service_spec.rb ~ line 507)
if target.is_a?(Issuable)
add_recipients(target.assignees, :participating, NotificationReason::ASSIGNED)
end
add_labels_subscribers
end
end
def acting_user
current_user if skip_current_user
end
# Build event key to search on custom notification level
# Check NotificationSetting.email_events
def custom_action
@custom_action ||= "#{action}_#{target.class.model_name.name.underscore}".to_sym
end
def self.mention_type_actions
MENTION_TYPE_ACTIONS.dup
end
end
class NewNote < Base
attr_reader :note
def initialize(note)
@note = note
end
def target
note.noteable
end
# NOTE: may be nil, in the case of a PersonalSnippet
#
# (this is okay because NotificationRecipient is written
# to handle nil projects)
def project
note.project
end
def group
if note.for_project_noteable?
project.group
else
target.try(:group)
end
end
def build!
# Add all users participating in the thread (author, assignee, comment authors)
add_participants(note.author)
add_mentions(note.author, target: note)
if note.for_project_noteable?
# Merge project watchers
add_project_watchers
else
add_group_watchers
end
add_custom_notifications
add_subscribed_users
end
def custom_action
:new_note
end
def acting_user
note.author
end
end
class NewRelease < Base
attr_reader :target
def initialize(target)
@target = target
end
def build!
add_recipients(target.project.authorized_users, :custom, nil)
end
def custom_action
:new_release
end
def acting_user
target.author
end
end
class MergeRequestUnmergeable < Base
attr_reader :target
def initialize(merge_request)
@target = merge_request
end
def build!
target.merge_participants.each do |user|
add_recipients(user, :participating, nil)
end
end
def custom_action
:unmergeable_merge_request
end
def acting_user
nil
end
end
class ProjectMaintainers < Base
attr_reader :target
def initialize(target, action:)
@target = target
@action = action
end
def build!
return [] unless project
add_recipients(project.team.maintainers, :mention, nil)
end
def acting_user
nil
end
end
end
end
NotificationRecipientService::Builder::Default.prepend_if_ee('EE::NotificationRecipientBuilders::Default') # rubocop: disable Cop/InjectEnterpriseEditionModule
NotificationRecipientService.prepend_if_ee('EE::NotificationRecipientService')
# frozen_string_literal: true
module NotificationRecipients
module Builder
class Default < Base
MENTION_TYPE_ACTIONS = [:new_issue, :new_merge_request].freeze
attr_reader :target
attr_reader :current_user
attr_reader :action
attr_reader :previous_assignees
attr_reader :skip_current_user
def initialize(target, current_user, action:, custom_action: nil, previous_assignees: nil, skip_current_user: true)
@target = target
@current_user = current_user
@action = action
@custom_action = custom_action
@previous_assignees = previous_assignees
@skip_current_user = skip_current_user
end
def add_watchers
add_project_watchers
end
def build!
add_participants(current_user)
add_watchers
add_custom_notifications
# Re-assign is considered as a mention of the new assignee
case custom_action
when :reassign_merge_request, :reassign_issue
add_recipients(previous_assignees, :mention, nil)
add_recipients(target.assignees, :mention, NotificationReason::ASSIGNED)
end
add_subscribed_users
if self.class.mention_type_actions.include?(custom_action)
# These will all be participants as well, but adding with the :mention
# type ensures that users with the mention notification level will
# receive them, too.
add_mentions(current_user, target: target)
# We use the `:participating` notification level in order to match existing legacy behavior as captured
# in existing specs (notification_service_spec.rb ~ line 507)
if target.is_a?(Issuable)
add_recipients(target.assignees, :participating, NotificationReason::ASSIGNED)
end
add_labels_subscribers
end
end
def acting_user
current_user if skip_current_user
end
# Build event key to search on custom notification level
# Check NotificationSetting.email_events
def custom_action
@custom_action ||= "#{action}_#{target.class.model_name.name.underscore}".to_sym
end
def self.mention_type_actions
MENTION_TYPE_ACTIONS.dup
end
end
end
end
NotificationRecipients::Builder::Default.prepend_if_ee('EE::NotificationRecipients::Builder::Default')
# frozen_string_literal: true
module NotificationRecipients
module Builder
class MergeRequestUnmergeable < Base
attr_reader :target
def initialize(merge_request)
@target = merge_request
end
def build!
target.merge_participants.each do |user|
add_recipients(user, :participating, nil)
end
end
def custom_action
:unmergeable_merge_request
end
def acting_user
nil
end
end
end
end
# frozen_string_literal: true
module NotificationRecipients
module Builder
class NewNote < Base
attr_reader :note
def initialize(note)
@note = note
end
def target
note.noteable
end
# NOTE: may be nil, in the case of a PersonalSnippet
#
# (this is okay because NotificationRecipient is written
# to handle nil projects)
def project
note.project
end
def group
if note.for_project_noteable?
project.group
else
target.try(:group)
end
end
def build!
# Add all users participating in the thread (author, assignee, comment authors)
add_participants(note.author)
add_mentions(note.author, target: note)
if note.for_project_noteable?
# Merge project watchers
add_project_watchers
else
add_group_watchers
end
add_custom_notifications
add_subscribed_users
end
def custom_action
:new_note
end
def acting_user
note.author
end
end
end
end
# frozen_string_literal: true
module NotificationRecipients
module Builder
class NewRelease < Base
attr_reader :target
def initialize(target)
@target = target
end
def build!
add_recipients(target.project.authorized_users, :custom, nil)
end
def custom_action
:new_release
end
def acting_user
target.author
end
end
end
end
# frozen_string_literal: true
module NotificationRecipients
module Builder
class ProjectMaintainers < Base
attr_reader :target
def initialize(target, action:)
@target = target
@action = action
end
def build!
return [] unless project
add_recipients(project.team.maintainers, :mention, nil)
end
def acting_user
nil
end
end
end
end
......@@ -108,7 +108,7 @@ class NotificationService
# * users with custom level checked with "reassign issue"
#
def reassigned_issue(issue, current_user, previous_assignees = [])
recipients = NotificationRecipientService.build_recipients(
recipients = NotificationRecipients::BuildService.build_recipients(
issue,
current_user,
action: "reassign",
......@@ -161,7 +161,7 @@ class NotificationService
def push_to_merge_request(merge_request, current_user, new_commits: [], existing_commits: [])
new_commits = new_commits.map { |c| { short_id: c.short_id, title: c.title } }
existing_commits = existing_commits.map { |c| { short_id: c.short_id, title: c.title } }
recipients = NotificationRecipientService.build_recipients(merge_request, current_user, action: "push_to")
recipients = NotificationRecipients::BuildService.build_recipients(merge_request, current_user, action: "push_to")
recipients.each do |recipient|
mailer.send(:push_to_merge_request_email, recipient.user.id, merge_request.id, current_user.id, recipient.reason, new_commits: new_commits, existing_commits: existing_commits).deliver_later
......@@ -197,7 +197,7 @@ class NotificationService
# * users with custom level checked with "reassign merge request"
#
def reassigned_merge_request(merge_request, current_user, previous_assignees = [])
recipients = NotificationRecipientService.build_recipients(
recipients = NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: "reassign",
......@@ -260,7 +260,7 @@ class NotificationService
end
def resolve_all_discussions(merge_request, current_user)
recipients = NotificationRecipientService.build_recipients(
recipients = NotificationRecipients::BuildService.build_recipients(
merge_request,
current_user,
action: "resolve_all_discussions")
......@@ -291,7 +291,7 @@ class NotificationService
def send_new_note_notifications(note)
notify_method = "note_#{note.noteable_ability_name}_email".to_sym
recipients = NotificationRecipientService.build_new_note_recipients(note)
recipients = NotificationRecipients::BuildService.build_new_note_recipients(note)
recipients.each do |recipient|
mailer.send(notify_method, recipient.user.id, note.id, recipient.reason).deliver_later
end
......@@ -299,7 +299,7 @@ class NotificationService
# Notify users when a new release is created
def send_new_release_notifications(release)
recipients = NotificationRecipientService.build_new_release_recipients(release)
recipients = NotificationRecipients::BuildService.build_new_release_recipients(release)
recipients.each do |recipient|
mailer.new_release_email(recipient.user.id, release, recipient.reason).deliver_later
......@@ -413,7 +413,7 @@ class NotificationService
end
def issue_moved(issue, new_issue, current_user)
recipients = NotificationRecipientService.build_recipients(issue, current_user, action: 'moved')
recipients = NotificationRecipients::BuildService.build_recipients(issue, current_user, action: 'moved')
recipients.map do |recipient|
email = mailer.issue_moved_email(recipient.user, issue, new_issue, current_user, recipient.reason)
......@@ -490,7 +490,7 @@ class NotificationService
end
def issue_due(issue)
recipients = NotificationRecipientService.build_recipients(
recipients = NotificationRecipients::BuildService.build_recipients(
issue,
issue.author,
action: 'due',
......@@ -526,7 +526,7 @@ class NotificationService
protected
def new_resource_email(target, method)
recipients = NotificationRecipientService.build_recipients(target, target.author, action: "new")
recipients = NotificationRecipients::BuildService.build_recipients(target, target.author, action: "new")
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, recipient.reason).deliver_later
......@@ -534,7 +534,7 @@ class NotificationService
end
def new_mentions_in_resource_email(target, new_mentioned_users, current_user, method)
recipients = NotificationRecipientService.build_recipients(target, current_user, action: "new")
recipients = NotificationRecipients::BuildService.build_recipients(target, current_user, action: "new")
recipients = recipients.select {|r| new_mentioned_users.include?(r.user) }
recipients.each do |recipient|
......@@ -545,7 +545,7 @@ class NotificationService
def close_resource_email(target, current_user, method, skip_current_user: true, closed_via: nil)
action = method == :merged_merge_request_email ? "merge" : "close"
recipients = NotificationRecipientService.build_recipients(
recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: action,
......@@ -573,7 +573,7 @@ class NotificationService
end
def removed_milestone_resource_email(target, current_user, method)
recipients = NotificationRecipientService.build_recipients(
recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: 'removed_milestone'
......@@ -585,7 +585,7 @@ class NotificationService
end
def changed_milestone_resource_email(target, milestone, current_user, method)
recipients = NotificationRecipientService.build_recipients(
recipients = NotificationRecipients::BuildService.build_recipients(
target,
current_user,
action: 'changed_milestone'
......@@ -597,7 +597,7 @@ class NotificationService
end
def reopen_resource_email(target, current_user, method, status)
recipients = NotificationRecipientService.build_recipients(target, current_user, action: "reopen")
recipients = NotificationRecipients::BuildService.build_recipients(target, current_user, action: "reopen")
recipients.each do |recipient|
mailer.send(method, recipient.user.id, target.id, status, current_user.id, recipient.reason).deliver_later
......@@ -605,7 +605,7 @@ class NotificationService
end
def merge_request_unmergeable_email(merge_request)
recipients = NotificationRecipientService.build_merge_request_unmergeable_recipients(merge_request)
recipients = NotificationRecipients::BuildService.build_merge_request_unmergeable_recipients(merge_request)
recipients.each do |recipient|
mailer.merge_request_unmergeable_email(recipient.user.id, merge_request.id).deliver_later
......@@ -619,15 +619,15 @@ class NotificationService
private
def project_maintainers_recipients(target, action:)
NotificationRecipientService.build_project_maintainers_recipients(target, action: action)
NotificationRecipients::BuildService.build_project_maintainers_recipients(target, action: action)
end
def notifiable?(*args)
NotificationRecipientService.notifiable?(*args)
NotificationRecipients::BuildService.notifiable?(*args)
end
def notifiable_users(*args)
NotificationRecipientService.notifiable_users(*args)
NotificationRecipients::BuildService.notifiable_users(*args)
end
def deliver_access_request_email(recipient, member)
......
......@@ -38,25 +38,30 @@ module Snippets
private
def save_and_commit(snippet)
snippet.with_transaction_returning_status do
result = snippet.with_transaction_returning_status do
(snippet.save && snippet.store_mentions!).tap do |saved|
break false unless saved
if Feature.enabled?(:version_snippets, current_user)
create_repository_for(snippet)
create_commit(snippet)
end
end
rescue => e # Rescuing all because we can receive Creation exceptions, GRPC exceptions, Git exceptions, ...
snippet.errors.add(:base, e.message)
end
# If the commit action failed we need to remove the repository if exists
if snippet.repository_exists?
Repositories::DestroyService.new(snippet.repository).execute
end
create_commit(snippet) if result && snippet.repository_exists?
false
end
result
rescue => e # Rescuing all because we can receive Creation exceptions, GRPC exceptions, Git exceptions, ...
snippet.errors.add(:base, e.message)
# If the commit action failed we need to remove the repository if exists
snippet.repository.remove if snippet.repository_exists?
# If the snippet was created, we need to remove it as we
# would do like if it had had any validation error
snippet.delete if snippet.persisted?
false
end
def create_repository_for(snippet)
......
......@@ -7,7 +7,7 @@
%section.issuable-discussion.js-vue-notes-event
#js-vue-notes{ data: { notes_data: notes_data(@issue).to_json,
noteable_data: serialize_issuable(@issue),
noteable_data: serialize_issuable(@issue, with_blocking_issues: Feature.enabled?(:prevent_closing_blocked_issues, @issue.project)),
noteable_type: 'Issue',
target_type: 'issue',
current_user_data: UserSerializer.new.represent(current_user, {only_path: true}, CurrentUserEntity).to_json } }
---
title: Added Edit Visibility Vue compoenent for Snippet
merge_request: 26799
author:
type: added
---
title: Add Prometheus metrics for Gitaly and database time in background jobs
merge_request: 26384
author:
type: changed
---
title: Fix bug committing snippet content when creating the snippet
merge_request: 26287
author:
type: fixed
---
title: Display GitLab issues created via Sentry global integration
merge_request: 26418
author:
type: fixed
......@@ -179,6 +179,8 @@ Settings.gitlab['email_smime'] = SmimeSignatureSettings.parse(Settings.gitlab['e
Settings.gitlab['base_url'] ||= Settings.__send__(:build_base_gitlab_url)
Settings.gitlab['url'] ||= Settings.__send__(:build_gitlab_url)
Settings.gitlab['user'] ||= 'git'
# External configuration may cause the ssh user to differ from the GitLab user
Settings.gitlab['ssh_user'] ||= Settings.gitlab.user
Settings.gitlab['user_home'] ||= begin
Etc.getpwnam(Settings.gitlab['user']).dir
rescue ArgumentError # no user configured
......@@ -560,7 +562,7 @@ Settings.gitlab_shell['receive_pack'] = true if Settings.gitlab_shell['receive
Settings.gitlab_shell['upload_pack'] = true if Settings.gitlab_shell['upload_pack'].nil?
Settings.gitlab_shell['ssh_host'] ||= Settings.gitlab.ssh_host
Settings.gitlab_shell['ssh_port'] ||= 22
Settings.gitlab_shell['ssh_user'] ||= Settings.gitlab.user
Settings.gitlab_shell['ssh_user'] = Settings.gitlab.ssh_user
Settings.gitlab_shell['owner_group'] ||= Settings.gitlab.user
Settings.gitlab_shell['ssh_path_prefix'] ||= Settings.__send__(:build_gitlab_shell_ssh_path_prefix)
Settings.gitlab_shell['git_timeout'] ||= 10800
......
......@@ -44,7 +44,8 @@ class Settings < Settingslogic
end
def build_gitlab_shell_ssh_path_prefix
user_host = "#{gitlab_shell.ssh_user}@#{gitlab_shell.ssh_host}"
user = "#{gitlab_shell.ssh_user}@" unless gitlab_shell.ssh_user.empty?
user_host = "#{user}#{gitlab_shell.ssh_host}"
if gitlab_shell.ssh_port != 22
"ssh://#{user_host}:#{gitlab_shell.ssh_port}/"
......
......@@ -278,7 +278,7 @@ application server, or a Gitaly node.
1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure Praefect](../restart_gitlab.md#omnibus-gitlab-reconfigure):
```shell
sudo gitlab-ctl reconfigure
gitlab-ctl reconfigure
```
1. Verify that Praefect can reach PostgreSQL:
......@@ -420,7 +420,7 @@ documentation](index.md#3-gitaly-server-configuration).
1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure Gitaly](../restart_gitlab.md#omnibus-gitlab-reconfigure):
```shell
sudo gitlab-ctl reconfigure
gitlab-ctl reconfigure
```
**Complete these steps for each Gitaly node!**
......@@ -488,6 +488,16 @@ Particular attention should be shown to:
gitlab_shell['secret_token'] = 'GITLAB_SHELL_SECRET_TOKEN'
```
1. Configure the `external_url` so that files could be served by GitLab
by proper endpoint access by editing `/etc/gitlab/gitlab.rb`:
You will need to replace `GITLAB_SERVER_URL` with the real URL on which
current GitLab instance is serving:
```ruby
external_url 'GITLAB_SERVER_URL'
```
1. Add Prometheus monitoring settings by editing `/etc/gitlab/gitlab.rb`.
You will need to replace:
......@@ -523,19 +533,19 @@ Particular attention should be shown to:
1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure GitLab](../restart_gitlab.md#omnibus-gitlab-reconfigure):
```shell
sudo gitlab-ctl reconfigure
gitlab-ctl reconfigure
```
1. Verify that GitLab can reach Praefect:
```shell
sudo gitlab-rake gitlab:gitaly:check
gitlab-rake gitlab:gitaly:check
```
1. Set the Grafana admin password. This command will prompt you to enter a new password:
```shell
sudo gitlab-ctl set-grafana-password
gitlab-ctl set-grafana-password
```
1. Update the **Repository storage** settings from **Admin Area > Settings >
......
......@@ -63,3 +63,29 @@ You can add custom metrics in the self monitoring project by:
1. [Duplicating](../../../user/project/integrations/prometheus.md#duplicating-a-gitlab-defined-dashboard) the default dashboard.
1. [Editing](../../../user/project/integrations/prometheus.md#view-and-edit-the-source-file-of-a-custom-dashboard) the newly created dashboard file and configuring it with [dashboard YAML properties](../../../user/project/integrations/prometheus.md#dashboard-yaml-properties).
## Troubleshooting
### Getting error message in logs: `Could not create instance administrators group. Errors: ["You don’t have permission to create groups."]`
There is [a bug](https://gitlab.com/gitlab-org/gitlab/issues/208676) which causes
project creation to fail with the following error (which appears in the log file)
when the first admin user is an
[external user](../../../user/permissions.md#external-users-core-only):
```text
Could not create instance administrators group. Errors: ["You don’t have permission to create groups."]
```
Run the following in a Rails console to check if the first admin user is an external user:
```ruby
User.admins.active.first.external?
```
If this returns true, the first admin user is an external user.
If you face this issue, you can temporarily
[make the admin user a non-external user](../../../user/permissions.md#external-users-core-only)
and then try to create the project.
Once the project is created, the admin user can be changed back to an external user.
......@@ -86,13 +86,15 @@ The following metrics are available:
| `failed_login_captcha_total` | Gauge | 11.0 | Counter of failed CAPTCHA attempts during login | |
| `successful_login_captcha_total` | Gauge | 11.0 | Counter of successful CAPTCHA attempts during login | |
| `auto_devops_pipelines_completed_total` | Counter | 12.7 | Counter of completed Auto DevOps pipelines, labeled by status | |
| `sidekiq_jobs_cpu_seconds` | Histogram | 12.4 | Seconds of cpu time to run Sidekiq job | |
| `sidekiq_jobs_completion_seconds` | Histogram | 12.2 | Seconds to complete Sidekiq job | |
| `sidekiq_jobs_queue_duration_seconds` | Histogram | 12.5 | Duration in seconds that a Sidekiq job was queued before being executed | |
| `sidekiq_jobs_failed_total` | Counter | 12.2 | Sidekiq jobs failed | |
| `sidekiq_jobs_retried_total` | Counter | 12.2 | Sidekiq jobs retried | |
| `sidekiq_running_jobs` | Gauge | 12.2 | Number of Sidekiq jobs running | |
| `sidekiq_concurrency` | Gauge | 12.5 | Maximum number of Sidekiq jobs | |
| `sidekiq_jobs_cpu_seconds` | Histogram | 12.4 | Seconds of cpu time to run Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
| `sidekiq_jobs_completion_seconds` | Histogram | 12.2 | Seconds to complete Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
| `sidekiq_jobs_db_seconds` | Histogram | 12.9 | Seconds of DB time to run Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
| `sidekiq_jobs_gitaly_seconds` | Histogram | 12.9 | Seconds of Gitaly time to run Sidekiq job | queue, boundary, external_dependencies, feature_category, job_status, urgency |
| `sidekiq_jobs_queue_duration_seconds` | Histogram | 12.5 | Duration in seconds that a Sidekiq job was queued before being executed | queue, boundary, external_dependencies, feature_category, urgency |
| `sidekiq_jobs_failed_total` | Counter | 12.2 | Sidekiq jobs failed | queue, boundary, external_dependencies, feature_category, urgency |
| `sidekiq_jobs_retried_total` | Counter | 12.2 | Sidekiq jobs retried | queue, boundary, external_dependencies, feature_category, urgency |
| `sidekiq_running_jobs` | Gauge | 12.2 | Number of Sidekiq jobs running | queue, boundary, external_dependencies, feature_category, urgency |
| `sidekiq_concurrency` | Gauge | 12.5 | Maximum number of Sidekiq jobs | |
## Metrics controlled by a feature flag
......
......@@ -33,7 +33,7 @@ future GitLab releases.**
| `CI_COMMIT_DESCRIPTION` | 10.8 | all | The description of the commit: the message without first line, if the title is shorter than 100 characters; full message in other case. |
| `CI_COMMIT_MESSAGE` | 10.8 | all | The full commit message. |
| `CI_COMMIT_REF_NAME` | 9.0 | all | The branch or tag name for which project is built |
| `CI_COMMIT_REF_PROTECTED` | 11.11 | all | `true` if the job is running on a protected branch, `false` if not |
| `CI_COMMIT_REF_PROTECTED` | 11.11 | all | `true` if the job is running on a protected reference, `false` if not |
| `CI_COMMIT_REF_SLUG` | 9.0 | all | `$CI_COMMIT_REF_NAME` lowercased, shortened to 63 bytes, and with everything except `0-9` and `a-z` replaced with `-`. No leading / trailing `-`. Use in URLs, host names and domain names. |
| `CI_COMMIT_SHA` | 9.0 | all | The commit revision for which project is built |
| `CI_COMMIT_SHORT_SHA` | 11.7 | all | The first eight characters of `CI_COMMIT_SHA` |
......
......@@ -165,33 +165,79 @@ rspec 2.6:
You can disable inheritance of globally defined defaults
and variables with the `inherit:` parameter.
To enable or disable the inheritance of all `variables:` or `default:` parameters, use the following format:
- `default: true` or `default: false`
- `variables: true` or `variables: false`
To inherit only a subset of `default:` parameters or `variables:`, specify what
you wish to inherit, and any not listed will **not** be inherited. Use
one of the following formats:
```yaml
inherit:
default: [parameter1, parameter2]
variables: [VARIABLE1, VARIABLE2]
```
Or:
```yaml
inherit:
default:
- parameter1
- parameter2
variables:
- VARIABLE1
- VARIABLE2
```
In the example below:
- `rubocop` **will** inherit both the `before_script` and the variable `DOMAIN`.
- `rspec` **will not** inherit the `before_script` or the variable `DOMAIN`.
- `capybara` **will** inherit the `before_script`, but **will not** inherit the variable `DOMAIN`.
- `rubocop`:
- **will** inherit: Nothing.
- `rspec`:
- **will** inherit: the default `image` and the `WEBHOOK_URL` variable.
- **will not** inherit: the default `before_script` and the `DOMAIN` variable.
- `capybara`:
- **will** inherit: the default `before_script` and `image`.
- **will not** inherit: the `DOMAIN` and `WEBHOOK_URL` variables.
- `karma`:
- **will** inherit: the default `image` and `before_script`, and the `DOMAIN` variable.
- **will not** inherit: `WEBHOOK_URL` variable.
```yaml
default:
image: 'ruby:2.4'
before_script:
- echo Hello World
variables:
DOMAIN: example.com
WEBHOOK_URL: https://my-webhook.example.com
rubocop:
inherit:
default: false
variables: false
script: bundle exec rubocop
rspec:
inherit:
default: false
variables: false
default: [image]
variables: [WEBHOOK_URL]
script: bundle exec rspec
capybara:
inherit:
variables: false
script: bundle exec capybara
karma:
inherit:
default: true
variables: [DOMAIN]
script: karma
```
## Parameter details
......
......@@ -191,6 +191,15 @@ then `artifacts:reports:dependency_scanning` must be set to `depscan.json`.
Following the POSIX exit code standard, the scanner will exit with 0 for success and any number from 1 to 255 for anything else.
Success also includes the case when vulnerabilities are found.
When executing a scanning job using the [Docker-in-Docker privileged mode](../../user/application_security/sast/index.md#requirements),
we reserve the following standard exit codes.
| Orchestrator Exit Code | Description |
|------------------------|----------------------------------|
| 3 | No match, no compatible analyzer |
| 4 | Project directory empty |
| 5 | No compatible Docker image |
### Logging
The scanner should log error messages and warnings so that users can easily investigate
......
......@@ -159,7 +159,7 @@ rescues `StandardError` which can make it harder to debug issues in an
development environment. The current workaround is to temporarily
comment out the `rescue` in your local development source.
You can also follow the installation pod logs to debug issues related to
You can also follow the installation logs to debug issues related to
installation. Once the installation/upgrade is underway, wait for the
pod to be created. Then run the following to obtain the pods logs as
they are written:
......
......@@ -327,6 +327,46 @@ On the Route 53 dashboard, click **Hosted zones** in the left navigation bar:
1. Click **Create**.
1. Update your DNS records with your domain registrar. The steps for doing this vary depending on which registrar you use and is beyond the scope of this guide.
## Setting up Bastion Hosts
Since our GitLab instances will be in private subnets, we need a way to connect to these instances via SSH to make configuration changes, perform upgrades, etc. One way of doing this is via a [bastion host](https://en.wikipedia.org/wiki/Bastion_host), sometimes also referred to as a jump box.
TIP: **Tip:** If you do not want to maintain bastion hosts, you can set up [AWS Systems Manager Session Manager](https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager.html) for access to instances. This is beyond the scope of this document.
### Create Bastion Host A
1. Navigate to the EC2 Dashboard and click on **Launch instance**.
1. Select the **Ubuntu Server 18.04 LTS (HVM)** AMI.
1. Choose an instance type. We'll use a `t2.micro` as we'll only use the bastion host to SSH into our other instances.
1. Click **Configure Instance Details**.
1. Under **Network**, select the `gitlab-vpc` from the dropdown menu.
1. Under **Subnet**, select the public subnet we created earlier (`gitlab-public-10.0.0.0`).
1. Double check that under **Auto-assign Public IP** you have **Use subnet setting (Enable)** selected.
1. Leave everything else as default and click **Add Storage**.
1. For storage, we'll leave everything as default and only add an 8GB root volume. We won't store anything on this instance.
1. Click **Add Tags** and on the next screen click **Add Tag**.
1. We’ll only set `Key: Name` and `Value: Bastion Host A`.
1. Click **Configure Security Group**.
1. Select **Create a new security group**, enter a **Security group name** (we'll use `bastion-sec-group`), and add a description.
1. We'll enable SSH access from anywhere (`0.0.0.0/0`). If you want stricter security, specify a single IP address or an IP address range in CIDR notation.
1. Click **Review and Launch**
1. Review all your settings and, if you're happy, click **Launch**.
1. Acknowledge that you have access to an existing key pair or create a new one. Click **Launch Instance**.
Confirm that you can SHH into the instance:
1. On the EC2 Dashboard, click on **Instances** in the left menu.
1. Select **Bastion Host A** from your list of instances.
1. Click **Connect** and follow the connection instructions.
1. If you are able to connect successfully, let's move on to setting up our second bastion host for redundancy.
### Create Bastion Host B
1. Create an EC2 instance following the same steps as above with the following changes:
1. For the **Subnet**, select the second public subnet we created earlier (`gitlab-public-10.0.2.0`).
1. Under the **Add Tags** section, we’ll set `Key: Name` and `Value: Bastion Host B` so that we can easily identify our two instances.
1. For the security group, select the existing `bastion-sec-group` we created above.
## Deploying GitLab inside an auto scaling group
We'll use AWS's wizard to deploy GitLab and then SSH into the instance to
......
......@@ -59,4 +59,4 @@ responsibility. The Application Development Platform integrates key performance
into GitLab, automatically. The following features are included:
- [Auto Monitoring](../autodevops/index.md#auto-monitoring)
- [In-app Kubernetes Pod Logs](../../user/project/clusters/kubernetes_pod_logs.md)
- [In-app Kubernetes Logs](../../user/project/clusters/kubernetes_pod_logs.md)
......@@ -825,7 +825,7 @@ management project. Refer to the
available configuration options.
NOTE: **Note:**
In this alpha implementation of installing Elastic Stack through CI, reading the environment pod logs through Elasticsearch is unsupported. This is supported if [installed via the UI](#elastic-stack).
In this alpha implementation of installing Elastic Stack through CI, reading the environment logs through Elasticsearch is unsupported. This is supported if [installed via the UI](#elastic-stack).
## Upgrading applications
......
......@@ -256,7 +256,7 @@ and give all group members access to the project at once.
Alternatively, you can [lock the sharing with group feature](#share-with-group-lock).
## Sharing a group with another group **(CORE ONLY)**
## Sharing a group with another group
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/18328) in GitLab 12.7.
......
......@@ -96,7 +96,7 @@ The options are:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/201846) in GitLab Ultimate 12.8.
This can be useful if you are triaging an application incident and need to
[explore logs](../project/integrations/prometheus.md#view-pod-logs-ultimate)
[explore logs](../project/integrations/prometheus.md#view-logs-ultimate)
from across your application. It also helps you to understand
what is affecting your application's performance and quickly resolve any problems.
......
......@@ -27,7 +27,7 @@ Using the GitLab project Kubernetes integration, you can:
- Use [Web terminals](#web-terminals).
- Use [Deploy Boards](#deploy-boards-premium). **(PREMIUM)**
- Use [Canary Deployments](#canary-deployments-premium). **(PREMIUM)**
- View [Pod logs](#pod-logs-ultimate). **(ULTIMATE)**
- View [Logs](#logs-ultimate). **(ULTIMATE)**
- Run serverless workloads on [Kubernetes with Knative](serverless/index.md).
### Deploy Boards **(PREMIUM)**
......@@ -48,11 +48,11 @@ the need to leave GitLab.
[Read more about Canary Deployments](../canary_deployments.md)
### Pod logs **(ULTIMATE)**
### Logs **(ULTIMATE)**
GitLab makes it easy to view the logs of running pods in connected Kubernetes clusters. By displaying the logs directly in GitLab, developers can avoid having to manage console tools or jump to a different interface.
[Read more about Kubernetes pod logs](kubernetes_pod_logs.md)
[Read more about Kubernetes logs](kubernetes_pod_logs.md)
### Kubernetes monitoring
......
# Kubernetes Pod Logs **(ULTIMATE)**
# Kubernetes Logs **(ULTIMATE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/4752) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 11.0.
......@@ -11,17 +11,17 @@ Everything you need to build, test, deploy, and run your app at scale.
## Overview
[Kubernetes](https://kubernetes.io) pod logs can be viewed directly within GitLab.
[Kubernetes](https://kubernetes.io) logs can be viewed directly within GitLab.
![Pod logs](img/kubernetes_pod_logs_v12_8.png)
![Pod logs](img/kubernetes_pod_logs_v12_9.png)
## Requirements
[Deploying to a Kubernetes environment](../deploy_boards.md#enabling-deploy-boards) is required in order to be able to use Pod Logs.
[Deploying to a Kubernetes environment](../deploy_boards.md#enabling-deploy-boards) is required in order to be able to use Logs.
## Usage
To access pod logs, you must have the right [permissions](../../permissions.md#project-members-permissions).
To access logs, you must have the right [permissions](../../permissions.md#project-members-permissions).
You can access them in two ways.
......@@ -29,7 +29,7 @@ You can access them in two ways.
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/22011) in GitLab 12.5.
Go to **{cloud-gear}** **Operations > Pod logs** on the sidebar menu.
Go to **{cloud-gear}** **Operations > Logs** on the sidebar menu.
![Sidebar menu](img/sidebar_menu_pod_logs_v12_5.png)
......
......@@ -585,17 +585,17 @@ From each of the panels in the dashboard, you can access the context menu by cli
The options are:
- [View logs](#view-pod-logs-ultimate)
- [View logs](#view-logs-ultimate)
- [Download CSV](#downloading-data-as-csv)
- [Generate link to chart](#embedding-gitlab-managed-kubernetes-metrics)
- [Alerts](#setting-up-alerts-for-prometheus-metrics-ultimate)
### View Pod Logs **(ULTIMATE)**
### View Logs **(ULTIMATE)**
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/122013) in GitLab 12.8.
If you have [Pod Logs](../clusters/kubernetes_pod_logs.md) enabled,
you can navigate from the charts in the dashboard to view Pod Logs by
If you have [Logs](../clusters/kubernetes_pod_logs.md) enabled,
you can navigate from the charts in the dashboard to view Logs by
clicking on the context menu in the upper-right corner.
If you use the **Timeline zoom** function at the bottom of the chart, logs will narrow down to the time range you selected.
......@@ -710,7 +710,7 @@ Prometheus server.
> [Introduced][ce-29691] in GitLab 12.2.
It is possible to display metrics charts within [GitLab Flavored Markdown](../../markdown.md#gitlab-flavored-markdown-gfm). The maximum number of embeds allowed in a GitLab Flavored Markdown field is 100.
It is possible to display metrics charts within [GitLab Flavored Markdown](../../markdown.md#gitlab-flavored-markdown-gfm) fields such as issue or merge request descriptions. The maximum number of embedded charts allowed in a GitLab Flavored Markdown field is 100.
This can be useful if you are sharing an application incident or performance
metrics to others and want to have relevant information directly available.
......@@ -748,6 +748,25 @@ It is also possible to embed either the default dashboard metrics or individual
![Embedded Metrics in issue templates](img/embed_metrics_issue_template.png)
### Embedding Cluster Health Charts **(ULTIMATE)**
> [Introduced](<https://gitlab.com/gitlab-org/gitlab/issues/40997>) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
[Cluster Health Metrics](../clusters/index.md#monitoring-your-kubernetes-cluster-ultimate) can also be embedded in [GitLab-flavored Markdown](../../markdown.md).
To embed a metric chart, include a link to that chart in the form `https://<root_url>/<project>/-/cluster/<cluster_id>?<query_params>` anywhere that GitLab-flavored Markdown is supported. To generate and copy a link to the chart, follow the instructions in the [Cluster Health Metric documentation](../clusters/index.md#monitoring-your-kubernetes-cluster-ultimate).
The following requirements must be met for the metric to unfurl:
- The `<cluster_id>` must correspond to a real cluster.
- Prometheus must be monitoring the cluster.
- The user must be allowed access to the project cluster metrics.
- The dashboards must be reporting data on the [Cluster Health Page](../clusters/index.md#monitoring-your-kubernetes-cluster-ultimate)
If the above requirements are met, then the metric will unfurl as seen below.
![Embedded Cluster Metric in issue descriptions](img/prometheus_cluster_health_embed_v12_9.png)
### Embedding Grafana charts
Grafana metrics can be embedded in [GitLab Flavored Markdown](../../markdown.md).
......
......@@ -82,7 +82,7 @@ module.exports = {
'^.+\\.js$': 'babel-jest',
'^.+\\.vue$': 'vue-jest',
},
transformIgnorePatterns: ['node_modules/(?!(@gitlab/ui|bootstrap-vue)/)'],
transformIgnorePatterns: ['node_modules/(?!(@gitlab/ui|bootstrap-vue|three)/)'],
timers: 'fake',
testEnvironment: '<rootDir>/spec/frontend/environment.js',
testEnvironmentOptions: {
......
......@@ -16,11 +16,11 @@ module Gitlab
validates :config, allowed_keys: ALLOWED_KEYS
end
entry :default, ::Gitlab::Config::Entry::Boolean,
entry :default, ::Gitlab::Ci::Config::Entry::Inherit::Default,
description: 'Indicates whether to inherit `default:`.',
default: true
entry :variables, ::Gitlab::Config::Entry::Boolean,
entry :variables, ::Gitlab::Ci::Config::Entry::Inherit::Variables,
description: 'Indicates whether to inherit `variables:`.',
default: true
end
......
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# This class represents a default inherit entry
#
class Inherit
class Default < ::Gitlab::Config::Entry::Simplifiable
strategy :BooleanStrategy, if: -> (config) { [true, false].include?(config) }
strategy :ArrayStrategy, if: -> (config) { config.is_a?(Array) }
class BooleanStrategy < ::Gitlab::Config::Entry::Boolean
def inherit?(_key)
value
end
end
class ArrayStrategy < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
ALLOWED_VALUES = ::Gitlab::Ci::Config::Entry::Default::ALLOWED_KEYS.map(&:to_s).freeze
validations do
validates :config, type: Array
validates :config, array_of_strings: true
validates :config, allowed_array_values: { in: ALLOWED_VALUES }
end
def inherit?(key)
value.include?(key.to_s)
end
end
class UnknownStrategy < ::Gitlab::Config::Entry::Node
def errors
["#{location} should be a bool or array of strings"]
end
def inherit?(key)
false
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# This class represents a variables inherit entry
#
class Inherit
class Variables < ::Gitlab::Config::Entry::Simplifiable
strategy :BooleanStrategy, if: -> (config) { [true, false].include?(config) }
strategy :ArrayStrategy, if: -> (config) { config.is_a?(Array) }
class BooleanStrategy < ::Gitlab::Config::Entry::Boolean
def inherit?(_key)
value
end
end
class ArrayStrategy < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
validations do
validates :config, type: Array
validates :config, array_of_strings: true
end
def inherit?(key)
value.include?(key.to_s)
end
end
class UnknownStrategy < ::Gitlab::Config::Entry::Node
def errors
["#{location} should be a bool or array of strings"]
end
def inherit?(key)
false
end
end
end
end
end
end
end
end
......@@ -94,7 +94,7 @@ module Gitlab
end
def overwrite_entry(deps, key, current_entry)
return unless inherit_entry&.default_value
return unless inherit_entry&.default_entry&.inherit?(key)
return unless deps.default_entry
deps.default_entry[key] unless current_entry.specified?
......@@ -111,11 +111,12 @@ module Gitlab
end
def root_and_job_variables_value
if inherit_entry&.variables_value
@root_variables_value.to_h.merge(variables_value.to_h) # rubocop:disable Gitlab/ModuleWithInstanceVariables
else
variables_value.to_h
root_variables = @root_variables_value.to_h # rubocop:disable Gitlab/ModuleWithInstanceVariables
root_variables = root_variables.select do |key, _|
inherit_entry&.variables_entry&.inherit?(key)
end
root_variables.merge(variables_value.to_h)
end
end
end
......
......@@ -29,9 +29,11 @@ module Gitlab
# Used by embedded dashboards.
# @param options - y_label [String] Y-Axis label of
# a panel. Used by embedded dashboards.
# @param options - cluster [Cluster]
# @param options - cluster [Cluster]. Used by
# embedded and un-embedded dashboards.
# @param options - cluster_type [Symbol] The level of
# cluster, one of [:admin, :project, :group]
# cluster, one of [:admin, :project, :group]. Used by
# embedded and un-embedded dashboards.
# @param options - grafana_url [String] URL pointing
# to a grafana dashboard panel
# @param options - prometheus_alert_id [Integer] ID of
......
......@@ -3,7 +3,8 @@
# Responsible for determining which dashboard service should
# be used to fetch or generate a dashboard hash.
# The services can be considered in two categories - embeds
# and dashboards. Embeds are all portions of dashboards.
# and dashboards. Embed hashes are identical to dashboard hashes except
# that they contain a subset of panels.
module Gitlab
module Metrics
module Dashboard
......
......@@ -53,8 +53,9 @@ module Gitlab
repository_url = if Gitlab::CurrentSettings.enabled_git_access_protocol == 'ssh'
shell = config.gitlab_shell
user = "#{shell.ssh_user}@" unless shell.ssh_user.empty?
port = ":#{shell.ssh_port}" unless shell.ssh_port == 22
"ssh://#{shell.ssh_user}@#{shell.ssh_host}#{port}/#{path}.git"
"ssh://#{user}#{shell.ssh_host}#{port}/#{path}.git"
else
"#{project_url}.git"
end
......
......@@ -45,6 +45,8 @@ module Gitlab
labels[:job_status] = job_succeeded ? "done" : "fail"
@metrics[:sidekiq_jobs_cpu_seconds].observe(labels, job_thread_cputime)
@metrics[:sidekiq_jobs_completion_seconds].observe(labels, monotonic_time)
@metrics[:sidekiq_jobs_db_seconds].observe(labels, ActiveRecord::LogSubscriber.runtime / 1000)
@metrics[:sidekiq_jobs_gitaly_seconds].observe(labels, Gitlab::GitalyClient.query_time)
end
end
......@@ -54,6 +56,8 @@ module Gitlab
{
sidekiq_jobs_cpu_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_cpu_seconds, 'Seconds of cpu time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_completion_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_completion_seconds, 'Seconds to complete Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_db_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_db_seconds, 'Seconds of database time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_gitaly_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_gitaly_seconds, 'Seconds of Gitaly time to run Sidekiq job', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_queue_duration_seconds: ::Gitlab::Metrics.histogram(:sidekiq_jobs_queue_duration_seconds, 'Duration in seconds that a Sidekiq job was queued before being executed', {}, SIDEKIQ_LATENCY_BUCKETS),
sidekiq_jobs_failed_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_failed_total, 'Sidekiq jobs failed'),
sidekiq_jobs_retried_total: ::Gitlab::Metrics.counter(:sidekiq_jobs_retried_total, 'Sidekiq jobs retried'),
......
......@@ -75,7 +75,21 @@ module Sentry
http_get(api_urls.issue_url(issue_id))[:body]
end
def parse_gitlab_issue(plugin_issues)
def parse_gitlab_issue(issue)
parse_issue_annotations(issue) || parse_plugin_issue(issue)
end
def parse_issue_annotations(issue)
issue
.fetch('annotations', [])
.reject(&:blank?)
.map { |annotation| Nokogiri.make(annotation) }
.find { |html| html['href']&.starts_with?(Gitlab.config.gitlab.url) }
.try(:[], 'href')
end
def parse_plugin_issue(issue)
plugin_issues = issue.fetch('pluginIssues', nil)
return unless plugin_issues
gitlab_plugin = plugin_issues.detect { |item| item['id'] == 'gitlab' }
......@@ -145,7 +159,7 @@ module Sentry
short_id: issue.fetch('shortId', nil),
status: issue.fetch('status', nil),
frequency: issue.dig('stats', '24h'),
gitlab_issue: parse_gitlab_issue(issue.fetch('pluginIssues', nil)),
gitlab_issue: parse_gitlab_issue(issue),
project_id: issue.dig('project', 'id'),
project_name: issue.dig('project', 'name'),
project_slug: issue.dig('project', 'slug'),
......
......@@ -6,7 +6,7 @@ module QA
attr_accessor :title, :key
attribute :md5_fingerprint do
Page::Project::Settings::Repository.perform do |setting|
Page::Project::Settings::CICD.perform do |setting|
setting.expand_deploy_keys do |key|
key.find_md5_fingerprint(title)
end
......@@ -25,7 +25,7 @@ module QA
Page::Project::Menu.perform(&:go_to_repository_settings)
Page::Project::Settings::Repository.perform do |setting|
Page::Project::Settings::CICD.perform do |setting|
setting.expand_deploy_keys do |page|
page.fill_key_title(title)
page.fill_key_value(key)
......
......@@ -17,7 +17,7 @@ module QA
expect(deploy_key.md5_fingerprint).to eq key.md5_fingerprint
Page::Project::Settings::Repository.perform do |setting|
Page::Project::Settings::CICD.perform do |setting|
setting.expand_deploy_keys do |keys|
expect(keys).to have_key(deploy_key_title, key.md5_fingerprint)
end
......
......@@ -23,7 +23,6 @@ describe Projects::ClustersController do
describe 'functionality' do
context 'when project has one or more clusters' do
let(:project) { create(:project) }
let!(:enabled_cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
let!(:disabled_cluster) { create(:cluster, :disabled, :provided_by_gcp, :production_environment, projects: [project]) }
......@@ -53,8 +52,6 @@ describe Projects::ClustersController do
end
context 'when project does not have a cluster' do
let(:project) { create(:project) }
it 'returns an empty state page' do
go
......
......@@ -126,6 +126,7 @@ describe 'Container Registry', :js do
describe 'image repo details' do
before do
stub_container_registry_tags(repository: %r{my/image}, tags: ('1'..'20').to_a, with_manifest: true)
visit_container_registry_details 'my/image'
end
......@@ -140,12 +141,18 @@ describe 'Container Registry', :js do
it 'user removes a specific tag from container repository' do
service = double('service')
expect(service).to receive(:execute).with(container_repository) { { status: :success } }
expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['latest']) { service }
expect(Projects::ContainerRepository::DeleteTagsService).to receive(:new).with(container_repository.project, user, tags: ['1']) { service }
click_on(class: 'js-delete-registry')
first('.js-delete-registry').click
expect(find('.modal .modal-title')).to have_content _('Remove tag')
find('.modal .modal-footer .btn-danger').click
end
it('pagination navigate to the second page') do
pagination = find('.gl-pagination')
pagination.click_link('2')
expect(page).to have_content '20'
end
end
end
end
......
......@@ -95,6 +95,29 @@ shared_examples_for 'snippet editor' do
link = find('a.no-attachment-icon img[alt="banana_sample"]')['src']
expect(link).to match(%r{/#{Regexp.escape(project.full_path)}/uploads/\h{32}/banana_sample\.gif\z})
end
context 'when the git operation fails' do
let(:error) { 'This is a git error' }
before do
allow_next_instance_of(Snippets::CreateService) do |instance|
allow(instance).to receive(:create_commit).and_raise(StandardError, error)
end
fill_form
click_button('Create snippet')
wait_for_requests
end
it 'displays the error' do
expect(page).to have_content(error)
end
it 'renders new page' do
expect(page).to have_content('New Snippet')
end
end
end
context 'when a user is not authenticated' do
......
......@@ -78,6 +78,29 @@ shared_examples_for 'snippet editor' do
expect(reqs.first.status_code).to eq(200)
end
context 'when the git operation fails' do
let(:error) { 'This is a git error' }
before do
allow_next_instance_of(Snippets::CreateService) do |instance|
allow(instance).to receive(:create_commit).and_raise(StandardError, error)
end
fill_form
click_button('Create snippet')
wait_for_requests
end
it 'displays the error' do
expect(page).to have_content(error)
end
it 'renders new page' do
expect(page).to have_content('New Snippet')
end
end
it 'validation fails for the first time' do
fill_in 'personal_snippet_title', with: 'My Snippet Title'
click_button('Create snippet')
......
......@@ -9,9 +9,14 @@ describe('Blob viewer', () => {
let blob;
let mock;
const jQueryMock = {
tooltip: jest.fn(),
};
preloadFixtures('snippets/show.html');
beforeEach(() => {
$.fn.extend(jQueryMock);
mock = new MockAdapter(axios);
loadFixtures('snippets/show.html');
......@@ -27,7 +32,7 @@ describe('Blob viewer', () => {
html: '<div>testing</div>',
});
spyOn(axios, 'get').and.callThrough();
jest.spyOn(axios, 'get');
});
afterEach(() => {
......@@ -38,7 +43,7 @@ describe('Blob viewer', () => {
it('loads source file after switching views', done => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
setImmediate(() => {
expect(
document
.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
......@@ -54,7 +59,7 @@ describe('Blob viewer', () => {
new BlobViewer();
setTimeout(() => {
setImmediate(() => {
expect(
document
.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]')
......@@ -65,26 +70,20 @@ describe('Blob viewer', () => {
});
});
it('doesnt reload file if already loaded', done => {
it('doesnt reload file if already loaded', () => {
const asyncClick = () =>
new Promise(resolve => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(resolve);
setImmediate(resolve);
});
asyncClick()
return asyncClick()
.then(() => asyncClick())
.then(() => {
expect(
document.querySelector('.blob-viewer[data-type="simple"]').getAttribute('data-loaded'),
).toBe('true');
done();
})
.catch(() => {
fail();
done();
});
});
......@@ -100,13 +99,13 @@ describe('Blob viewer', () => {
});
it('has tooltip when disabled', () => {
expect(copyButton.getAttribute('data-original-title')).toBe(
expect(copyButton.getAttribute('title')).toBe(
'Switch to the source to copy the file contents',
);
});
it('is blurred when clicked and disabled', () => {
spyOn(copyButton, 'blur');
jest.spyOn(copyButton, 'blur').mockImplementation(() => {});
copyButton.click();
......@@ -114,7 +113,7 @@ describe('Blob viewer', () => {
});
it('is not blurred when clicked and not disabled', () => {
spyOn(copyButton, 'blur');
jest.spyOn(copyButton, 'blur').mockImplementation(() => {});
copyButton.classList.remove('disabled');
copyButton.click();
......@@ -125,7 +124,7 @@ describe('Blob viewer', () => {
it('enables after switching to simple view', done => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
setImmediate(() => {
expect(copyButton.classList.contains('disabled')).toBeFalsy();
done();
......@@ -135,8 +134,8 @@ describe('Blob viewer', () => {
it('updates tooltip after switching to simple view', done => {
document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]').click();
setTimeout(() => {
expect(copyButton.getAttribute('data-original-title')).toBe('Copy file contents');
setImmediate(() => {
expect(copyButton.getAttribute('title')).toBe('Copy file contents');
done();
});
......@@ -155,7 +154,7 @@ describe('Blob viewer', () => {
it('adds active class to new viewer button', () => {
const simpleBtn = document.querySelector('.js-blob-viewer-switch-btn[data-viewer="simple"]');
spyOn(simpleBtn, 'blur');
jest.spyOn(simpleBtn, 'blur').mockImplementation(() => {});
blob.switchToViewer('simple');
......@@ -174,7 +173,7 @@ describe('Blob viewer', () => {
blob.switchToViewer('simple');
blob.switchToViewer('rich');
expect(axios.get.calls.count()).toBe(1);
expect(axios.get.mock.calls.length).toBe(1);
});
});
});
......@@ -219,7 +219,7 @@ describe('Details Page', () => {
dispatchSpy.mockResolvedValue();
wrapper.setData({ currentPage: 2 });
expect(store.dispatch).toHaveBeenCalledWith('requestTagsList', {
id: wrapper.vm.$route.params.id,
params: wrapper.vm.$route.params.id,
pagination: { page: 2 },
});
});
......
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Snippet Visibility Edit component rendering matches the snapshot 1`] = `
<div
class="form-group"
>
<label>
Visibility level
<gl-link-stub
href="/foo/bar"
target="_blank"
>
<gl-icon-stub
name="question"
size="12"
/>
</gl-link-stub>
</label>
<gl-form-group-stub
id="visibility-level-setting"
>
<gl-form-radio-group-stub
checked="0"
disabledfield="disabled"
htmlfield="html"
options=""
stacked=""
textfield="text"
valuefield="value"
>
<gl-form-radio-stub
class="mb-3"
value="0"
>
<div
class="d-flex align-items-center"
>
<gl-icon-stub
name="lock"
size="16"
/>
<span
class="font-weight-bold ml-1"
>
Private
</span>
</div>
</gl-form-radio-stub>
<gl-form-radio-stub
class="mb-3"
value="1"
>
<div
class="d-flex align-items-center"
>
<gl-icon-stub
name="shield"
size="16"
/>
<span
class="font-weight-bold ml-1"
>
Internal
</span>
</div>
</gl-form-radio-stub>
<gl-form-radio-stub
class="mb-3"
value="2"
>
<div
class="d-flex align-items-center"
>
<gl-icon-stub
name="earth"
size="16"
/>
<span
class="font-weight-bold ml-1"
>
Public
</span>
</div>
</gl-form-radio-stub>
</gl-form-radio-group-stub>
</gl-form-group-stub>
</div>
`;
import SnippetVisibilityEdit from '~/snippets/components/snippet_visibility_edit.vue';
import { GlFormRadio } from '@gitlab/ui';
import { SNIPPET_VISIBILITY } from '~/snippets/constants';
import { mount, shallowMount } from '@vue/test-utils';
describe('Snippet Visibility Edit component', () => {
let wrapper;
let radios;
const defaultHelpLink = '/foo/bar';
const defaultVisibilityLevel = '0';
function findElements(sel) {
return wrapper.findAll(sel);
}
function createComponent(
{
helpLink = defaultHelpLink,
isProjectSnippet = false,
visibilityLevel = defaultVisibilityLevel,
} = {},
deep = false,
) {
const method = deep ? mount : shallowMount;
wrapper = method.call(this, SnippetVisibilityEdit, {
propsData: {
helpLink,
isProjectSnippet,
visibilityLevel,
},
});
radios = findElements(GlFormRadio);
}
afterEach(() => {
wrapper.destroy();
});
describe('rendering', () => {
it('matches the snapshot', () => {
createComponent();
expect(wrapper.element).toMatchSnapshot();
});
it.each`
label | value
${SNIPPET_VISIBILITY.private.label} | ${`0`}
${SNIPPET_VISIBILITY.internal.label} | ${`1`}
${SNIPPET_VISIBILITY.public.label} | ${`2`}
`('should render correct $label label', ({ label, value }) => {
createComponent();
const radio = radios.at(parseInt(value, 10));
expect(radio.attributes('value')).toBe(value);
expect(radio.text()).toContain(label);
});
describe('rendered help-text', () => {
it.each`
description | value | label
${SNIPPET_VISIBILITY.private.description} | ${`0`} | ${SNIPPET_VISIBILITY.private.label}
${SNIPPET_VISIBILITY.internal.description} | ${`1`} | ${SNIPPET_VISIBILITY.internal.label}
${SNIPPET_VISIBILITY.public.description} | ${`2`} | ${SNIPPET_VISIBILITY.public.label}
`('should render correct $label description', ({ description, value }) => {
createComponent({}, true);
const help = findElements('.help-text').at(parseInt(value, 10));
expect(help.text()).toBe(description);
});
it('renders correct Private description for a project snippet', () => {
createComponent({ isProjectSnippet: true }, true);
const helpText = findElements('.help-text')
.at(0)
.text();
expect(helpText).not.toContain(SNIPPET_VISIBILITY.private.description);
expect(helpText).toBe(SNIPPET_VISIBILITY.private.description_project);
});
});
});
describe('functionality', () => {
it('pre-selects correct option in the list', () => {
const pos = 1;
createComponent({ visibilityLevel: `${pos}` }, true);
const radio = radios.at(pos);
expect(radio.find('input[type="radio"]').element.checked).toBe(true);
});
});
});
......@@ -23,14 +23,30 @@ describe SubmoduleHelper do
it 'detects ssh on standard port' do
allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(22) # set this just to be sure
allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
stub_url([config.user, '@', config.host, ':gitlab-org/gitlab-foss.git'].join(''))
stub_url([config.ssh_user, '@', config.host, ':gitlab-org/gitlab-foss.git'].join(''))
expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
end
it 'detects ssh on standard port without a username' do
allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(22) # set this just to be sure
allow(Gitlab.config.gitlab_shell).to receive(:ssh_user).and_return('')
allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
stub_url([config.host, ':gitlab-org/gitlab-foss.git'].join(''))
expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
end
it 'detects ssh on non-standard port' do
allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(2222)
allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
stub_url(['ssh://', config.user, '@', config.host, ':2222/gitlab-org/gitlab-foss.git'].join(''))
stub_url(['ssh://', config.ssh_user, '@', config.host, ':2222/gitlab-org/gitlab-foss.git'].join(''))
expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
end
it 'detects ssh on non-standard port without a username' do
allow(Gitlab.config.gitlab_shell).to receive(:ssh_port).and_return(2222)
allow(Gitlab.config.gitlab_shell).to receive(:ssh_user).and_return('')
allow(Gitlab.config.gitlab_shell).to receive(:ssh_path_prefix).and_return(Settings.send(:build_gitlab_shell_ssh_path_prefix))
stub_url(['ssh://', config.host, ':2222/gitlab-org/gitlab-foss.git'].join(''))
expect(subject).to eq([namespace_project_path('gitlab-org', 'gitlab-foss'), namespace_project_tree_path('gitlab-org', 'gitlab-foss', 'hash')])
end
......
# frozen_string_literal: true
require 'spec_helper'
describe ::Gitlab::Ci::Config::Entry::Inherit::Default do
using RSpec::Parameterized::TableSyntax
subject { described_class.new(config) }
context 'validations' do
where(:config, :valid) do
true | true
false | true
%w[image] | true
%w[unknown] | false
%i[image] | false
[true] | false
"string" | false
end
with_them do
it do
expect(subject.valid?).to eq(valid)
end
end
end
describe '#inherit?' do
where(:config, :inherit) do
true | true
false | false
%w[image] | true
%w[before_script] | false
end
with_them do
it do
expect(subject.inherit?('image')).to eq(inherit)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe ::Gitlab::Ci::Config::Entry::Inherit::Variables do
using RSpec::Parameterized::TableSyntax
subject { described_class.new(config) }
context 'validations' do
where(:config, :valid) do
true | true
false | true
%w[A] | true
%w[A B] | true
%i[image] | true
[true] | false
"string" | false
end
with_them do
it do
expect(subject.valid?).to eq(valid)
end
end
end
describe '#inherit?' do
where(:config, :inherit) do
true | true
false | false
%w[A] | true
%w[B] | false
end
with_them do
it do
expect(subject.inherit?('A')).to eq(inherit)
end
end
end
end
......@@ -18,7 +18,7 @@ describe Gitlab::Ci::Config::Entry::Job do
end
before do
allow(entry).to receive_message_chain(:inherit_entry, :default_value).and_return(true)
allow(entry).to receive_message_chain(:inherit_entry, :default_entry, :inherit?).and_return(true)
end
end
......
......@@ -269,13 +269,13 @@ describe Gitlab::Ci::Config::Entry::Processable do
context 'when root yaml variables are used' do
let(:variables) do
Gitlab::Ci::Config::Entry::Variables.new(
A: 'root', C: 'root'
A: 'root', C: 'root', D: 'root'
).value
end
it 'does return all variables and overwrite them' do
expect(entry.value).to include(
variables: { 'A' => 'job', 'B' => 'job', 'C' => 'root' }
variables: { 'A' => 'job', 'B' => 'job', 'C' => 'root', 'D' => 'root' }
)
end
......@@ -293,32 +293,61 @@ describe Gitlab::Ci::Config::Entry::Processable do
)
end
end
context 'when inherit of only specific variable is enabled' do
let(:config) do
{
variables: { A: 'job', B: 'job' },
inherit: { variables: ['D'] }
}
end
it 'does return only job variables' do
expect(entry.value).to include(
variables: { 'A' => 'job', 'B' => 'job', 'D' => 'root' }
)
end
end
end
end
context 'of default:tags' do
using RSpec::Parameterized::TableSyntax
where(:default_tags, :tags, :inherit_default, :result) do
nil | %w[a b] | nil | %w[a b]
nil | %w[a b] | true | %w[a b]
nil | %w[a b] | false | %w[a b]
%w[b c] | %w[a b] | nil | %w[a b]
%w[b c] | %w[a b] | true | %w[a b]
%w[b c] | %w[a b] | false | %w[a b]
%w[b c] | nil | nil | %w[b c]
%w[b c] | nil | true | %w[b c]
%w[b c] | nil | false | nil
where(:name, :default_tags, :tags, :inherit_default, :result) do
"only local tags" | nil | %w[a b] | nil | %w[a b]
"only local tags" | nil | %w[a b] | true | %w[a b]
"only local tags" | nil | %w[a b] | false | %w[a b]
"global and local tags" | %w[b c] | %w[a b] | nil | %w[a b]
"global and local tags" | %w[b c] | %w[a b] | true | %w[a b]
"global and local tags" | %w[b c] | %w[a b] | false | %w[a b]
"only global tags" | %w[b c] | nil | nil | %w[b c]
"only global tags" | %w[b c] | nil | true | %w[b c]
"only global tags" | %w[b c] | nil | false | nil
"only global tags" | %w[b c] | nil | %w[image] | nil
"only global tags" | %w[b c] | nil | %w[tags] | %w[b c]
end
with_them do
let(:config) { { tags: tags, inherit: { default: inherit_default } } }
let(:default_specified_tags) { double('tags', 'specified?' => true, 'valid?' => true, 'value' => default_tags) }
let(:config) do
{ tags: tags,
inherit: { default: inherit_default } }
end
let(:default_specified_tags) do
double('tags',
'specified?' => true,
'valid?' => true,
'value' => default_tags,
'errors' => [])
end
before do
allow(default).to receive('[]').with(:tags).and_return(default_specified_tags)
entry.compose!(deps)
expect(entry).to be_valid
end
it { expect(entry.tags_value).to eq(result) }
......
......@@ -515,6 +515,8 @@ module Gitlab
nil | ["global script"]
{ default: false } | nil
{ default: true } | ["global script"]
{ default: %w[before_script] } | ["global script"]
{ default: %w[image] } | nil
end
with_them do
......@@ -527,26 +529,28 @@ module Gitlab
it { expect(subject[:options][:before_script]).to eq(result) }
end
end
context "in default context" do
using RSpec::Parameterized::TableSyntax
context "in default context" do
using RSpec::Parameterized::TableSyntax
where(:inherit, :result) do
nil | ["global script"]
{ default: false } | nil
{ default: true } | ["global script"]
end
with_them do
let(:config) do
{
default: { before_script: ["global script"] },
test: { script: ["script"], inherit: inherit }
}
where(:inherit, :result) do
nil | ["global script"]
{ default: false } | nil
{ default: true } | ["global script"]
{ default: %w[before_script] } | ["global script"]
{ default: %w[image] } | nil
end
it { expect(subject[:options][:before_script]).to eq(result) }
with_them do
let(:config) do
{
default: { before_script: ["global script"] },
test: { script: ["script"], inherit: inherit }
}
end
it { expect(subject[:options][:before_script]).to eq(result) }
end
end
end
......@@ -845,6 +849,18 @@ module Gitlab
)
end
end
context 'when specific variables are to inherited' do
let(:inherit) { { variables: %w[VAR1 VAR4] } }
it 'returns all unique variables and inherits only specified variables' do
expect(subject).to contain_exactly(
{ key: 'VAR4', value: 'global4', public: true },
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
end
end
end
context 'when job variables are defined' do
......
......@@ -89,6 +89,13 @@ describe Gitlab::Middleware::Go do
it 'returns the full project path' do
expect_response_with_path(go, enabled_protocol, project.full_path, project.default_branch)
end
context 'with an empty ssh_user' do
it 'returns the full project path' do
allow(Gitlab.config.gitlab_shell).to receive(:ssh_user).and_return('')
expect_response_with_path(go, enabled_protocol, project.full_path, project.default_branch)
end
end
end
context 'without access to the project' do
......@@ -234,7 +241,9 @@ describe Gitlab::Middleware::Go do
def expect_response_with_path(response, protocol, path, branch)
repository_url = case protocol
when :ssh
"ssh://#{Gitlab.config.gitlab.user}@#{Gitlab.config.gitlab.host}/#{path}.git"
shell = Gitlab.config.gitlab_shell
user = "#{shell.ssh_user}@" unless shell.ssh_user.empty?
"ssh://#{user}#{shell.ssh_host}/#{path}.git"
when :http, nil
"http://#{Gitlab.config.gitlab.host}/#{path}.git"
end
......
......@@ -20,6 +20,8 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
let(:queue_duration_seconds) { double('queue duration seconds metric') }
let(:completion_seconds_metric) { double('completion seconds metric') }
let(:user_execution_seconds_metric) { double('user execution seconds metric') }
let(:db_seconds_metric) { double('db seconds metric') }
let(:gitaly_seconds_metric) { double('gitaly seconds metric') }
let(:failed_total_metric) { double('failed total metric') }
let(:retried_total_metric) { double('retried total metric') }
let(:running_jobs_metric) { double('running jobs metric') }
......@@ -28,6 +30,8 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_queue_duration_seconds, anything, anything, anything).and_return(queue_duration_seconds)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_completion_seconds, anything, anything, anything).and_return(completion_seconds_metric)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_db_seconds, anything, anything, anything).and_return(db_seconds_metric)
allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything).and_return(gitaly_seconds_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
......@@ -55,16 +59,23 @@ describe Gitlab::SidekiqMiddleware::ServerMetrics do
let(:queue_duration_for_job) { 0.01 }
let(:db_duration) { 3 }
let(:gitaly_duration) { 4 }
before do
allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job)
allow(ActiveRecord::LogSubscriber).to receive(:runtime).and_return(db_duration * 1000)
allow(Gitlab::GitalyClient).to receive(:query_time).and_return(gitaly_duration)
expect(running_jobs_metric).to receive(:increment).with(labels, 1)
expect(running_jobs_metric).to receive(:increment).with(labels, -1)
expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
expect(db_seconds_metric).to receive(:observe).with(labels_with_job_status, db_duration)
expect(gitaly_seconds_metric).to receive(:observe).with(labels_with_job_status, gitaly_duration)
expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
end
......
......@@ -254,6 +254,34 @@ describe Sentry::Client::Issue do
expect(subject.gitlab_issue).to eq('https://gitlab.com/gitlab-org/gitlab/issues/1')
end
context 'when issue annotations exist' do
before do
issue_sample_response['annotations'] = [
nil,
'',
"<a href=\"http://github.com/issues/6\">github-issue-6</a>",
"<div>annotation</a>",
"<a href=\"http://localhost/gitlab-org/gitlab/issues/2\">gitlab-org/gitlab#2</a>"
]
stub_sentry_request(sentry_request_url, body: issue_sample_response)
end
it 'has a correct GitLab issue url' do
expect(subject.gitlab_issue).to eq('http://localhost/gitlab-org/gitlab/issues/2')
end
end
context 'when no GitLab issue is linked' do
before do
issue_sample_response['pluginIssues'] = []
stub_sentry_request(sentry_request_url, body: issue_sample_response)
end
it 'does not find a GitLab issue' do
expect(subject.gitlab_issue).to be_nil
end
end
it 'has the correct tags' do
expect(subject.tags).to eq({ level: issue_sample_response['level'], logger: issue_sample_response['logger'] })
end
......
......@@ -168,34 +168,42 @@ describe SnippetRepository do
end
end
context 'when files are not named' do
let(:data) do
[
{
file_path: '',
content: 'foo',
action: :create
},
{
file_path: '',
content: 'bar',
action: :create
},
{
file_path: 'foo.txt',
content: 'bar',
action: :create
}
]
shared_examples 'snippet repository with file names' do |*filenames|
it 'sets a name for unnamed files' do
ls_files = snippet.repository.ls_files(nil)
expect(ls_files).to include(*filenames)
end
end
let_it_be(:named_snippet) { { file_path: 'fee.txt', content: 'bar', action: :create } }
let_it_be(:unnamed_snippet) { { file_path: '', content: 'dummy', action: :create } }
it 'sets a name for non named files' do
context 'when some files are not named' do
let(:data) { [named_snippet] + Array.new(2) { unnamed_snippet.clone } }
before do
expect do
snippet_repository.multi_files_action(user, data, commit_opts)
end.not_to raise_error
end
it_behaves_like 'snippet repository with file names', 'snippetfile1.txt', 'snippetfile2.txt'
end
expect(snippet.repository.ls_files(nil)).to include('snippetfile1.txt', 'snippetfile2.txt', 'foo.txt')
context 'repository already has 10 unnamed snippets' do
let(:pre_populate_data) { Array.new(10) { unnamed_snippet.clone } }
let(:data) { [named_snippet] + Array.new(2) { unnamed_snippet.clone } }
before do
# Pre-populate repository with 9 unnamed snippets.
snippet_repository.multi_files_action(user, pre_populate_data, commit_opts)
expect do
snippet_repository.multi_files_action(user, data, commit_opts)
end.not_to raise_error
end
it_behaves_like 'snippet repository with file names', 'snippetfile10.txt', 'snippetfile11.txt'
end
end
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
describe NotificationRecipientService do
describe NotificationRecipients::BuildService do
let(:service) { described_class }
let(:assignee) { create(:user) }
let(:project) { create(:project, :public) }
......
# frozen_string_literal: true
require 'spec_helper'
describe NotificationRecipients::Builder::Default do
describe '#build!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, group: group).tap { |p| p.add_developer(project_watcher) } }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:other_user) { create(:user) }
let_it_be(:participant) { create(:user) }
let_it_be(:group_watcher) { create(:user) }
let_it_be(:project_watcher) { create(:user) }
let_it_be(:notification_setting_project_w) { create(:notification_setting, source: project, user: project_watcher, level: 2) }
let_it_be(:notification_setting_group_w) { create(:notification_setting, source: group, user: group_watcher, level: 2) }
subject { described_class.new(issue, current_user, action: :new).tap { |s| s.build! } }
context 'participants and project watchers' do
before do
expect(issue).to receive(:participants).and_return([participant, current_user])
end
it 'adds all participants and watchers' do
expect(subject.recipients.map(&:user)).to include(participant, project_watcher, group_watcher)
expect(subject.recipients.map(&:user)).not_to include(other_user)
end
end
context 'subscribers' do
it 'adds all subscribers' do
subscriber = create(:user)
non_subscriber = create(:user)
create(:subscription, project: project, user: subscriber, subscribable: issue, subscribed: true)
create(:subscription, project: project, user: non_subscriber, subscribable: issue, subscribed: false)
expect(subject.recipients.map(&:user)).to include(subscriber)
end
end
end
end
......@@ -710,7 +710,7 @@ describe NotificationService, :mailer do
user_3 = create(:user)
recipient_1 = NotificationRecipient.new(user_1, :custom, custom_action: :new_release)
recipient_2 = NotificationRecipient.new(user_2, :custom, custom_action: :new_release)
allow(NotificationRecipientService).to receive(:build_new_release_recipients).and_return([recipient_1, recipient_2])
allow(NotificationRecipients::BuildService).to receive(:build_new_release_recipients).and_return([recipient_1, recipient_2])
release
......
......@@ -185,12 +185,10 @@ describe Snippets::CreateService do
expect { subject }.not_to change { Snippet.count }
end
it 'does not create the repository' do
expect(snippet.repository_exists?).to be_falsey
end
it 'destroys the existing repository' do
expect(Repositories::DestroyService).to receive(:new).and_call_original
it 'destroys the created repository' do
expect_next_instance_of(Repository) do |instance|
expect(instance).to receive(:remove).and_call_original
end
subject
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment