Commit 2e25ea20 authored by Clement Ho's avatar Clement Ho

Merge branch 'master' into 4627-fix-epic-issue-reordering

parents 823dedad 1f7b7c81
No related merge requests found
This source diff could not be displayed because it is too large. You can view the blob instead.
5.11.0
6.0.2
3.5.0
3.5.1
......@@ -422,7 +422,7 @@ group :ed25519 do
end
# Gitaly GRPC client
gem 'gitaly-proto', '~> 0.73.0', require: 'gitaly'
gem 'gitaly-proto', '~> 0.74.0', require: 'gitaly'
gem 'toml-rb', '~> 0.3.15', require: false
......
......@@ -309,7 +309,7 @@ GEM
po_to_json (>= 1.0.0)
rails (>= 3.2.0)
gherkin-ruby (0.3.2)
gitaly-proto (0.73.0)
gitaly-proto (0.74.0)
google-protobuf (~> 3.1)
grpc (~> 1.0)
github-linguist (4.7.6)
......@@ -1091,7 +1091,7 @@ DEPENDENCIES
gettext (~> 3.2.2)
gettext_i18n_rails (~> 1.8.0)
gettext_i18n_rails_js (~> 1.2.0)
gitaly-proto (~> 0.73.0)
gitaly-proto (~> 0.74.0)
github-linguist (~> 4.7.0)
gitlab-flowdock-git-hook (~> 1.0.1)
gitlab-license (~> 1.0)
......
......@@ -85,7 +85,8 @@ These types of merge requests for the upcoming release need special consideratio
and a dedicated team with front-end, back-end, and UX.
* **Small features**: any other feature request.
**Large features** must be with a maintainer **by the 1st**. This means that:
It is strongly recommended that **large features** be with a maintainer **by the
1st**. This means that:
* There is a merge request (even if it's WIP).
* The person (or people, if it needs a frontend and backend maintainer) who will
......@@ -100,14 +101,37 @@ The maintainer can also choose to assign a reviewer to perform an initial
review, but this way the maintainer is unlikely to be surprised by receiving an
MR later in the cycle.
**Small features** must be with a reviewer (not necessarily maintainer) **by the
3rd**.
It is strongly recommended that **small features** be with a reviewer (not
necessarily a maintainer) **by the 3rd**.
Most merge requests from the community do not have a specific release
target. However, if one does and falls into either of the above categories, it's
the reviewer's responsibility to manage the above communication and assignment
on behalf of the community member.
#### What happens if these deadlines are missed?
If a small or large feature is _not_ with a maintainer or reviewer by the
recommended date, this does _not_ mean that maintainers or reviewers will refuse
to review or merge it, or that the feature will definitely not make it in before
the feature freeze.
However, with every day that passes without review, it will become more likely
that the feature will slip, because maintainers and reviewers may not have
enough time to do a thorough review, and developers may not have enough time to
adequately address any feedback that may come back.
A maintainer or reviewer may also determine that it will not be possible to
finish the current scope of the feature in time, but that it is possible to
reduce the scope so that something can still ship this month, with the remaining
scope moving to the next release. The sooner this decision is made, in
conversation with the Product Manager and developer, the more time there is to
extract that which is now out of scope, and to finish that which remains in scope.
For these reasons, it is strongly recommended to follow the guidelines above,
to maximize the chances of your feature making it in before the feature freeze,
and to prevent any last minute surprises.
### On the 7th
Merge requests should still be complete, following the
......
10.4.0-pre
10.5.0-pre
/* eslint-disable func-names, space-before-function-paren, no-var, prefer-arrow-callback, wrap-iife, no-shadow, consistent-return, one-var, one-var-declaration-per-line, camelcase, default-case, no-new, quotes, no-duplicate-case, no-case-declarations, no-fallthrough, max-len */
import Milestone from './milestone';
import notificationsDropdown from './notifications_dropdown';
import LineHighlighter from './line_highlighter';
import MergeRequest from './merge_request';
import initCompareAutocomplete from './compare_autocomplete';
import Sidebar from './right_sidebar';
import Flash from './flash';
import BlobViewer from './blob/viewer/index';
import GfmAutoComplete from './gfm_auto_complete';
......@@ -17,13 +14,13 @@ import { convertPermissionToBoolean } from './lib/utils/common_utils';
import GlFieldErrors from './gl_field_errors';
import Shortcuts from './shortcuts';
import ShortcutsIssuable from './shortcuts_issuable';
import U2FAuthenticate from './u2f/authenticate';
import Diff from './diff';
import SearchAutocomplete from './search_autocomplete';
// EE-only
import UsersSelect from './users_select';
import UserCallout from './user_callout';
import initCompareAutocomplete from './compare_autocomplete';
import initGeoInfoModal from 'ee/init_geo_info_modal'; // eslint-disable-line import/first
import initGroupAnalytics from 'ee/init_group_analytics'; // eslint-disable-line import/first
import initPathLocks from 'ee/path_locks'; // eslint-disable-line import/first
......@@ -92,6 +89,11 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
.catch(fail);
shortcut_handler = true;
break;
case 'projects:environments:metrics':
import('./pages/projects/environments/metrics')
.then(callDefault)
.catch(fail);
break;
case 'projects:merge_requests:index':
import('./pages/projects/merge_requests/index')
.then(callDefault)
......@@ -116,10 +118,15 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
.catch(fail);
break;
case 'projects:milestones:show':
import('./pages/projects/milestones/show')
.then(callDefault)
.catch(fail);
new UserCallout();
break;
case 'groups:milestones:show':
new Milestone();
new Sidebar();
import('./pages/groups/milestones/show')
.then(callDefault)
.catch(fail);
break;
case 'dashboard:milestones:show':
import('./pages/dashboard/milestones/show')
......@@ -590,6 +597,10 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
.then(callDefault)
.catch(fail);
break;
case 'dashboard:groups:index':
import('./pages/dashboard/groups/index')
.then(callDefault)
.catch(fail);
case 'admin:licenses:new':
import(/* webpackChunkName: "admin_licenses" */ 'ee/pages/admin/licenses/new').then(m => m.default()).catch(fail);
break;
......@@ -602,18 +613,15 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
}
switch (path[0]) {
case 'sessions':
import('./pages/sessions')
.then(callDefault)
.catch(fail);
break;
case 'omniauth_callbacks':
if (!gon.u2f) break;
const u2fAuthenticate = new U2FAuthenticate(
$('#js-authenticate-u2f'),
'#js-login-u2f-form',
gon.u2f,
document.querySelector('#js-login-2fa-device'),
document.querySelector('.js-2fa-form'),
);
u2fAuthenticate.start();
// needed in rspec
gl.u2fAuthenticate = u2fAuthenticate;
import('./pages/omniauth_callbacks')
.then(callDefault)
.catch(fail);
break;
case 'admin':
import('./pages/admin')
.then(callDefault)
......@@ -672,10 +680,6 @@ import initLDAPGroupsSelect from 'ee/ldap_groups_select'; // eslint-disable-line
break;
}
break;
case 'dashboard':
case 'root':
new UserCallout();
break;
case 'profiles':
import('./pages/profiles/index/')
.then(callDefault)
......
......@@ -118,14 +118,14 @@ export const showSubLevelItems = (el) => {
moveSubItemsToPosition(el, subItems);
};
export const mouseEnterTopItems = (el) => {
export const mouseEnterTopItems = (el, timeout = getHideSubItemsInterval()) => {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => {
if (currentOpenMenu) hideMenu(currentOpenMenu);
showSubLevelItems(el);
}, getHideSubItemsInterval());
}, timeout);
};
export const mouseLeaveTopItem = (el) => {
......
......@@ -10,7 +10,7 @@ import groupItemComponent from './components/group_item.vue';
Vue.use(Translate);
document.addEventListener('DOMContentLoaded', () => {
export default () => {
const el = document.getElementById('js-groups-tree');
// Don't do anything if element doesn't exist (No groups)
......@@ -71,4 +71,4 @@ document.addEventListener('DOMContentLoaded', () => {
});
},
});
});
};
import Vue from 'vue';
import VueResource from 'vue-resource';
Vue.use(VueResource);
import '../../vue_shared/vue_resource_interceptor';
export default class GroupsService {
constructor(endpoint) {
......
import Vue from 'vue';
import Dashboard from './components/dashboard.vue';
document.addEventListener('DOMContentLoaded', () => new Vue({
export default () => new Vue({
el: '#prometheus-graphs',
render: createElement => createElement(Dashboard),
}));
});
import initGroupsList from '../../../../groups';
export default () => {
initGroupsList();
};
import GroupsList from '~/groups_list';
import Landing from '~/landing';
import initGroupsList from '../../../groups';
export default function () {
new GroupsList(); // eslint-disable-line no-new
initGroupsList();
const landingElement = document.querySelector('.js-explore-groups-landing');
if (!landingElement) return;
const exploreGroupsLanding = new Landing(
......
import Activities from '~/activities';
export default new Activities();
export default () => new Activities();
import Labels from '~/labels';
export default new Labels();
export default () => new Labels();
import Labels from '~/labels';
export default new Labels();
export default () => new Labels();
import initMilestonesShow from '~/pages/init_milestones_show';
export default initMilestonesShow;
......@@ -5,6 +5,7 @@ import notificationsDropdown from '~/notifications_dropdown';
import NotificationsForm from '~/notifications_form';
import ProjectsList from '~/projects_list';
import ShortcutsNavigation from '~/shortcuts_navigation';
import initGroupsList from '../../../groups';
export default () => {
const newGroupChildWrapper = document.querySelector('.js-new-project-subgroup');
......@@ -16,4 +17,6 @@ export default () => {
if (newGroupChildWrapper) {
new NewGroupChild(newGroupChildWrapper);
}
initGroupsList();
};
/* eslint-disable no-new */
import Milestone from '~/milestone';
import Sidebar from '~/right_sidebar';
export default () => {
new Milestone();
new Sidebar();
};
import initU2F from '../../shared/sessions/u2f';
export default () => {
initU2F();
};
import monitoringBundle from '~/monitoring/monitoring_bundle';
export default monitoringBundle;
import initMilestonesShow from '~/pages/init_milestones_show';
export default initMilestonesShow;
......@@ -210,7 +210,7 @@
</div>
<span class="help-block">{{ visibilityLevelDescription }}</span>
<label
v-if="visibilityLevel !== visibilityOptions.PUBLIC"
v-if="visibilityLevel !== visibilityOptions.PRIVATE"
class="request-access"
>
<input
......
import initU2F from '../../shared/sessions/u2f';
export default () => {
initU2F();
};
import U2FAuthenticate from '../../u2f/authenticate';
export default () => {
if (!gon.u2f) return;
const u2fAuthenticate = new U2FAuthenticate(
$('#js-authenticate-u2f'),
'#js-login-u2f-form',
gon.u2f,
document.querySelector('#js-login-2fa-device'),
document.querySelector('.js-2fa-form'),
);
u2fAuthenticate.start();
// needed in rspec
gl.u2fAuthenticate = u2fAuthenticate;
};
......@@ -32,8 +32,8 @@ export default class IssuableTemplateSelector extends TemplateSelector {
this.startLoadingSpinner();
Api.issueTemplate(this.namespacePath, this.projectPath, query.name, this.issuableType, (err, currentTemplate) => {
this.currentTemplate = currentTemplate;
if (err) return; // Error handled by global AJAX error handler
this.stopLoadingSpinner();
if (err) return; // Error handled by global AJAX error handler
this.setInputValueToTemplateContent();
});
return;
......
......@@ -2,7 +2,11 @@ module GroupTree
# rubocop:disable Gitlab/ModuleWithInstanceVariables
def render_group_tree(groups)
@groups = if params[:filter].present?
Gitlab::GroupHierarchy.new(groups.search(params[:filter]))
# We find the ancestors by ID of the search results here.
# Otherwise the ancestors would also have filters applied,
# which would cause them not to be preloaded.
group_ids = groups.search(params[:filter]).select(:id)
Gitlab::GroupHierarchy.new(Group.where(id: group_ids))
.base_and_ancestors
else
# Only show root groups if no parent-id is given
......
......@@ -27,12 +27,16 @@ class GroupDescendantsFinder
end
def execute
# The children array might be extended with the ancestors of projects when
# filtering. In that case, take the maximum so the array does not get limited
# Otherwise, allow paginating through all results
# The children array might be extended with the ancestors of projects and
# subgroups when filtering. In that case, take the maximum so the array does
# not get limited otherwise, allow paginating through all results.
#
all_required_elements = children
all_required_elements |= ancestors_for_projects if params[:filter]
if params[:filter]
all_required_elements |= ancestors_of_filtered_subgroups
all_required_elements |= ancestors_of_filtered_projects
end
total_count = [all_required_elements.size, paginator.total_count].max
Kaminari.paginate_array(all_required_elements, total_count: total_count)
......@@ -49,8 +53,11 @@ class GroupDescendantsFinder
end
def paginator
@paginator ||= Gitlab::MultiCollectionPaginator.new(subgroups, projects,
per_page: params[:per_page])
@paginator ||= Gitlab::MultiCollectionPaginator.new(
subgroups,
projects.with_route,
per_page: params[:per_page]
)
end
def direct_child_groups
......@@ -94,15 +101,21 @@ class GroupDescendantsFinder
#
# So when searching 'project', on the 'subgroup' page we want to preload
# 'nested-group' but not 'subgroup' or 'root'
def ancestors_for_groups(base_for_ancestors)
Gitlab::GroupHierarchy.new(base_for_ancestors)
def ancestors_of_groups(base_for_ancestors)
group_ids = base_for_ancestors.except(:select, :sort).select(:id)
Gitlab::GroupHierarchy.new(Group.where(id: group_ids))
.base_and_ancestors(upto: parent_group.id)
end
def ancestors_for_projects
def ancestors_of_filtered_projects
projects_to_load_ancestors_of = projects.where.not(namespace: parent_group)
groups_to_load_ancestors_of = Group.where(id: projects_to_load_ancestors_of.select(:namespace_id))
ancestors_for_groups(groups_to_load_ancestors_of)
ancestors_of_groups(groups_to_load_ancestors_of)
.with_selects_for_list(archived: params[:archived])
end
def ancestors_of_filtered_subgroups
ancestors_of_groups(subgroups)
.with_selects_for_list(archived: params[:archived])
end
......@@ -112,7 +125,7 @@ class GroupDescendantsFinder
# When filtering subgroups, we want to find all matches withing the tree of
# descendants to show to the user
groups = if params[:filter]
ancestors_for_groups(subgroups_matching_filter)
subgroups_matching_filter
else
direct_child_groups
end
......@@ -121,8 +134,10 @@ class GroupDescendantsFinder
end
def direct_child_projects
GroupProjectsFinder.new(group: parent_group, current_user: current_user, params: params)
.execute
GroupProjectsFinder.new(group: parent_group,
current_user: current_user,
options: { only_owned: true },
params: params).execute
end
# Finds all projects nested under `parent_group` or any of its descendant
......
......@@ -1042,6 +1042,8 @@ class Project < ActiveRecord::Base
end
def fork_source
return nil unless forked?
forked_from_project || fork_network&.root_project
end
......
......@@ -266,15 +266,7 @@ class Repository
return if kept_around?(sha)
# This will still fail if the file is corrupted (e.g. 0 bytes)
begin
raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false)
rescue Rugged::ReferenceError => ex
Rails.logger.error "Unable to create #{REF_KEEP_AROUND} reference for repository #{path}: #{ex}"
rescue Rugged::OSError => ex
raise unless ex.message =~ /Failed to create locked file/ && ex.message =~ /File exists/
Rails.logger.error "Unable to create #{REF_KEEP_AROUND} reference for repository #{path}: #{ex}"
end
raw_repository.write_ref(keep_around_ref_name(sha), sha, shell: false)
end
def kept_around?(sha)
......
......@@ -331,6 +331,8 @@ class User < ActiveRecord::Base
#
# Returns an ActiveRecord::Relation.
def search(query)
return none if query.blank?
query = query.downcase
order = <<~SQL
......@@ -354,6 +356,8 @@ class User < ActiveRecord::Base
# This method uses ILIKE on PostgreSQL and LIKE on MySQL.
def search_with_secondary_emails(query)
return none if query.blank?
query = query.downcase
email_table = Email.arel_table
......
......@@ -56,6 +56,9 @@ module MergeRequests
end
true
rescue PushRule::MatchError => e
handle_merge_error(log_message: e.message, save_message_on_model: true)
false
end
private
......
......@@ -7,10 +7,8 @@
- page_title "Activity"
- header_title "Activity", activity_dashboard_path
.hidden-xs
= render "projects/last_push"
%div{ class: container_class }
= render "projects/last_push"
= render 'dashboard/activity_head'
%section.activities
......
.js-groups-list-holder
#js-groups-tree{ data: { hide_projects: 'true', endpoint: dashboard_groups_path(format: :json), path: dashboard_groups_path, form_sel: 'form#group-filter-form', filter_sel: '.js-groups-list-filter', holder_sel: '.js-groups-list-holder', dropdown_sel: '.js-group-filter-dropdown-wrap' } }
.loading-container.text-center
= icon('spinner spin 2x', class: 'loading-animation prepend-top-20')
......@@ -3,9 +3,6 @@
- header_title "Groups", dashboard_groups_path
= render 'dashboard/groups_head'
= webpack_bundle_tag 'common_vue'
= webpack_bundle_tag 'groups'
- if params[:filter].blank? && @groups.empty?
= render 'shared/groups/empty_state'
- else
......
......@@ -7,9 +7,8 @@
- page_title "Projects"
- header_title "Projects", dashboard_projects_path
= render "projects/last_push"
%div{ class: container_class }
= render "projects/last_push"
- if show_projects?(@projects, params)
= render 'dashboard/projects_head'
= render 'nav'
......
......@@ -4,9 +4,8 @@
- page_title "Starred Projects"
- header_title "Projects", dashboard_projects_path
= render "projects/last_push"
%div{ class: container_class }
= render "projects/last_push"
= render 'dashboard/projects_head'
- if params[:filter_projects] || any_projects?(@projects)
......
.js-groups-list-holder
#js-groups-tree{ data: { hide_projects: 'true', endpoint: explore_groups_path(format: :json), path: explore_groups_path, form_sel: 'form#group-filter-form', filter_sel: '.js-groups-list-filter', holder_sel: '.js-groups-list-holder', dropdown_sel: '.js-group-filter-dropdown-wrap' } }
.loading-container.text-center
= icon('spinner spin 2x', class: 'loading-animation prepend-top-20')
......@@ -2,9 +2,6 @@
- page_title "Groups"
- header_title "Groups", dashboard_groups_path
= webpack_bundle_tag 'common_vue'
= webpack_bundle_tag 'groups'
- if current_user
= render 'dashboard/groups_head'
- else
......
= webpack_bundle_tag 'common_vue'
= webpack_bundle_tag 'groups'
.js-groups-list-holder
#js-groups-tree{ data: { hide_projects: 'false', group_id: group.id, endpoint: group_children_path(group, format: :json), path: group_path(group), form_sel: 'form#group-filter-form', filter_sel: '.js-groups-list-filter', holder_sel: '.js-groups-list-holder', dropdown_sel: '.js-group-filter-dropdown-wrap' } }
.loading-container.text-center
= icon('spinner spin 2x', class: 'loading-animation prepend-top-20')
- event = last_push_event
- if event && show_last_push_widget?(event)
%div{ class: container_class }
.row-content-block.top-block.hidden-xs.white
.event-last-push
.event-last-push-text
%span= s_("LastPushEvent|You pushed to")
%strong
= link_to event.ref_name, project_commits_path(event.project, event.ref_name), class: 'ref-name'
.row-content-block.top-block.hidden-xs.white
.event-last-push
.event-last-push-text
%span= s_("LastPushEvent|You pushed to")
%strong
= link_to event.ref_name, project_commits_path(event.project, event.ref_name), class: 'ref-name'
- if event.project != @project
%span= s_("LastPushEvent|at")
%strong= link_to_project event.project
- if event.project != @project
%span= s_("LastPushEvent|at")
%strong= link_to_project event.project
#{time_ago_with_tooltip(event.created_at)}
#{time_ago_with_tooltip(event.created_at)}
.pull-right
= link_to new_mr_path_from_push_event(event), title: _("New merge request"), class: "btn btn-info btn-sm" do
#{ _('Create merge request') }
.pull-right
= link_to new_mr_path_from_push_event(event), title: _("New merge request"), class: "btn btn-info btn-sm" do
#{ _('Create merge request') }
......@@ -2,6 +2,7 @@
- page_title _("Activity")
= render 'projects/last_push'
%div{ class: container_class }
= render 'projects/last_push'
= render 'projects/activity'
......@@ -6,9 +6,10 @@
- content_for :page_specific_javascripts do
= webpack_bundle_tag 'blob'
= render 'projects/last_push'
%div{ class: container_class }
= render 'projects/last_push'
#tree-holder.tree-holder
= render 'blob', blob: @blob
......
xml.entry do
xml.id project_commit_url(@project, id: commit.id)
xml.link href: project_commit_url(@project, id: commit.id)
xml.title truncate(commit.title, length: 80)
xml.title truncate(commit.title, length: 80, escape: false)
xml.updated commit.committed_date.xmlschema
xml.media :thumbnail, width: "40", height: "40", url: image_url(avatar_icon(commit.author_email))
......@@ -10,5 +10,5 @@ xml.entry do
xml.email commit.author_email
end
xml.summary markdown(commit.description, pipeline: :single_line)
xml.summary markdown(commit.description, pipeline: :single_line), type: 'html'
end
......@@ -3,7 +3,6 @@
- content_for :page_specific_javascripts do
= webpack_bundle_tag 'common_vue'
= webpack_bundle_tag 'common_d3'
= webpack_bundle_tag 'monitoring'
.prometheus-container{ class: container_class }
.top-area
......
- illustration = local_assigns.fetch(:illustration)
- illustration_size = local_assigns.fetch(:illustration_size)
- title = local_assigns.fetch(:title)
- content = local_assigns.fetch(:content, nil)
- content = local_assigns.fetch(:content)
- action = local_assigns.fetch(:action, nil)
.row.empty-state
......@@ -11,8 +11,7 @@
.col-xs-12
.text-content
%h4.text-center= title
- if content
%p= content
%p= content
- if action
.text-center
= action
......@@ -97,12 +97,18 @@
title: _('This job requires a manual action'),
content: _('This job depends on a user to trigger its process. Often they are used to deploy code to production environments'),
action: ( link_to _('Trigger this manual action'), play_project_job_path(@project, @build), method: :post, class: 'btn btn-primary', title: _('Trigger this manual action') )
- elsif @build.created?
= render 'empty_state',
illustration: 'illustrations/job_not_triggered.svg',
illustration_size: 'svg-306',
title: _('This job has not been triggered yet'),
content: _('This job depends on upstream jobs that need to succeed in order for this job to be triggered')
- else
= render 'empty_state',
illustration: 'illustrations/job_not_triggered.svg',
illustration_size: 'svg-306',
title: _('This job has not been triggered yet')
title: _('This job has not started yet'),
content: _('This job is in pending state and is waiting to be picked by a runner')
= render "sidebar"
.js-build-options{ data: javascript_build_options }
......
......@@ -10,7 +10,8 @@
= webpack_bundle_tag 'common_vue'
= webpack_bundle_tag 'filtered_search'
= render 'projects/last_push'
%div{ class: container_class }
= render 'projects/last_push'
- if @project.merge_requests.exists?
%div{ class: container_class }
......
......@@ -7,7 +7,9 @@
= render partial: 'flash_messages', locals: { project: @project }
= render "projects/last_push"
%div{ class: [container_class, ("limit-container-width" unless fluid_layout)] }
= render "projects/last_push"
= render "home_panel"
- if can?(current_user, :download_code, @project)
......
......@@ -24,6 +24,8 @@
.add-to-tree-dropdown
%ul.dropdown-menu
- if can_edit_tree?
%li.dropdown-header
#{ _('This directory') }
%li
= link_to project_new_blob_path(@project, @id) do
#{ _('New file') }
......@@ -60,6 +62,8 @@
#{ _('New directory') }
%li.divider
%li.dropdown-header
#{ _('This repository') }
%li
= link_to new_project_branch_path(@project) do
#{ _('New branch') }
......
......@@ -6,7 +6,6 @@
= content_for :meta_tags do
= auto_discovery_link_tag(:atom, project_commits_url(@project, @ref, rss_url_options), title: "#{@project.name}:#{@ref} commits")
= render 'projects/last_push'
%div{ class: [(container_class), ("limit-container-width" unless fluid_layout)] }
= render 'projects/last_push'
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
- content_for :page_specific_javascripts do
= page_specific_javascript_bundle_tag('group')
- parent = @group.parent
- group_path = root_url
- group_path << parent.full_path + '/' if parent
......
#!/usr/bin/env ruby
require 'optparse'
options = {}
opt_parser = OptionParser.new do |opt|
opt.banner = <<DOCSTRING
Profile a URL on this GitLab instance.
Usage:
#{__FILE__} url --output=<profile-html> --sql=<sql-log> [--user=<user>] [--post=<post-data>]
Example:
#{__FILE__} /dashboard/issues --output=dashboard-profile.html --sql=dashboard.log --user=root
DOCSTRING
opt.separator ''
opt.separator 'Options:'
opt.on('-o', '--output=/tmp/profile.html', 'profile output filename') do |output|
options[:profile_output] = output
end
opt.on('-s', '--sql=/tmp/profile_sql.txt', 'SQL output filename') do |sql|
options[:sql_output] = sql
end
opt.on('-u', '--user=root', 'User to authenticate as') do |username|
options[:username] = username
end
opt.on('-p', "--post='user=john&pass=test'", 'Send HTTP POST data') do |post_data|
options[:post_data] = post_data
end
end
opt_parser.parse!
options[:url] = ARGV[0]
if options[:url].nil? ||
options[:profile_output].nil? ||
options[:sql_output].nil?
puts opt_parser
exit
end
require File.expand_path('../config/environment', File.dirname(__FILE__))
result = Gitlab::Profiler.profile(options[:url],
logger: Logger.new(options[:sql_output]),
post_data: options[:post_data],
user: User.find_by_username(options[:username]),
private_token: ENV['PRIVATE_TOKEN'])
printer = RubyProf::CallStackPrinter.new(result)
file = File.open(options[:profile_output], 'w')
printer.print(file)
file.close
This source diff could not be displayed because it is too large. You can view the blob instead.
---
title: 'Geo: sync .gitattributes to info/attributes in secondary nodes'
merge_request: 4159
author:
type: changed
---
title: Update the Geo documentation to replicate all secrets to the secondary
merge_request: 4188
author:
type: fixed
---
title: Update Geo documentation to reuse the primary node SSH host key on secondary
node
merge_request: 4198
author:
type: fixed
---
title: Geo - Remove duplicated message on on geo:update_primary_node_url rake task
merge_request:
author:
type: fixed
---
title: Capture push rule regex errors and present them to user
merge_request: 4102
author:
type: fixed
---
title: Fix failed LDAP logins when sync_ssh_keys is included in config
merge_request:
author:
type: fixed
---
title: Stop loading spinner on error of issuable templates
merge_request: 16600
author: Takuya Noguchi
type: fixed
---
title: Fix bug in which projects with forks could not change visibility settings from
Private to Public
merge_request: 16595
author:
type: fixed
---
title: Allows html text in commits atom feed
merge_request: 16603
author: Jacopo Beschi @jacopo-beschi
type: fixed
---
title: Fix error on empty query for Members API
merge_request: 16235
author:
type: fixed
---
title: Fix missing "allow users to request access" option in public project permissions
merge_request: 16485
author:
type: fixed
---
title: Set timezone for karma to UTC
merge_request: 16602
author: Takuya Noguchi
type: other
---
title: Fix issues when rendering groups and their children
merge_request: 16584
author:
type: fixed
---
title: Add section headers to plus button dropdown
merge_request: 16394
author: George Tsiolis
type: added
---
title: Adjust layout width for fixed layout
merge_request: 16337
author: George Tsiolis
type: fixed
---
title: Use has_table_privilege for TRIGGER on PostgreSQL
merge_request:
author:
type: fixed
---
title: Default to Gitaly for 'git push' HTTP/SSH, and make Gitaly mandatory for SSH
pull
merge_request: 16586
author:
type: other
......@@ -18,6 +18,8 @@ webpackConfig.devtool = 'cheap-inline-source-map';
// Karma configuration
module.exports = function(config) {
process.env.TZ = 'Etc/UTC';
var progressReporter = process.env.CI ? 'mocha' : 'progress';
var karmaConfig = {
......
worker_processes 2
timeout 60
before_fork do |server, worker|
if /darwin/ =~ RUBY_PLATFORM
require 'fiddle'
# Dynamically load Foundation.framework, ~implicitly~ initialising
# the Objective-C runtime before any forking happens in Unicorn
#
# From https://bugs.ruby-lang.org/issues/14009
Fiddle.dlopen '/System/Library/Frameworks/Foundation.framework/Foundation'
end
end
......@@ -49,9 +49,6 @@ var config = {
graphs: './graphs/graphs_bundle.js',
graphs_charts: './graphs/graphs_charts.js',
graphs_show: './graphs/graphs_show.js',
group: './group.js',
groups: './groups/index.js',
groups_list: './groups_list.js',
help: './help/help.js',
issuable: './issuable/issuable_bundle.js',
issues: './issues/issues_bundle.js',
......@@ -133,9 +130,9 @@ var config = {
{
test: /\_worker\.js$/,
use: [
{
{
loader: 'worker-loader',
options: {
options: {
inline: true
}
},
......
......@@ -1293,7 +1293,7 @@ to the CI pipeline:
```yaml
variables:
GIT_STRATEGY: clone
GIT_CHECKOUT: false
GIT_CHECKOUT: "false"
script:
- git checkout master
- git merge $CI_BUILD_REF_NAME
......
......@@ -27,10 +27,23 @@ This exported module should be used instead of directly using `axios` to ensure
});
```
## Mock axios response on tests
## Mock axios response in tests
To help us mock the responses we need we use [axios-mock-adapter][axios-mock-adapter]
To help us mock the responses we are using [axios-mock-adapter][axios-mock-adapter].
Advantages over [`spyOn()`]:
- no need to create response objects
- does not allow call through (which we want to avoid)
- simple API to test error cases
- provides `replyOnce()` to allow for different responses
We have also decided against using [axios interceptors] because they are not suitable for mocking.
[axios interceptors]: https://github.com/axios/axios#interceptors
[`spyOn()`]: https://jasmine.github.io/api/edge/global.html#spyOn
### Example
```javascript
import axios from '~/lib/utils/axios_utils';
......@@ -50,11 +63,11 @@ To help us mock the responses we need we use [axios-mock-adapter][axios-mock-ada
});
afterEach(() => {
mock.reset();
mock.restore();
});
```
### Mock poll requests on tests with axios
### Mock poll requests in tests with axios
Because polling function requires a header object, we need to always include an object as the third argument:
......
......@@ -36,7 +36,8 @@ graphs/dashboards.
GitLab provides built-in tools to aid the process of improving performance:
* [Sherlock](profiling.md#sherlock)
* [Profiling](profiling.md)
* [Sherlock](profiling.md#sherlock)
* [GitLab Performance Monitoring](../administration/monitoring/performance/index.md)
* [Request Profiling](../administration/monitoring/performance/request_profiling.md)
* [QueryRecoder](query_recorder.md) for preventing `N+1` regressions
......
......@@ -4,6 +4,41 @@ To make it easier to track down performance problems GitLab comes with a set of
profiling tools, some of these are available by default while others need to be
explicitly enabled.
## Profiling a URL
There is a `Gitlab::Profiler.profile` method, and corresponding
`bin/profile-url` script, that enable profiling a GET or POST request to a
specific URL, either as an anonymous user (the default) or as a specific user.
When using the script, command-line documentation is available by passing no
arguments.
When using the method in an interactive console session, any changes to the
application code within that console session will be reflected in the profiler
output.
For example:
```ruby
Gitlab::Profiler.profile('/my-user')
# Returns a RubyProf::Profile for the regular operation of this request
class UsersController; def show; sleep 100; end; end
Gitlab::Profiler.profile('/my-user')
# Returns a RubyProf::Profile where 100 seconds is spent in UsersController#show
```
Passing a `logger:` keyword argument to `Gitlab::Profiler.profile` will send
ActiveRecord and ActionController log output to that logger. Further options are
documented with the method source.
[GitLab-Profiler](https://gitlab.com/gitlab-com/gitlab-profiler) is a project
that builds on this to add some additional niceties, such as allowing
configuration with a single Yaml file for multiple URLs, and uploading of the
profile and log output to S3.
For GitLab.com, you can find the latest results here:
<http://redash.gitlab.com/dashboard/gitlab-profiler-statistics>
## Sherlock
Sherlock is a custom profiling tool built into GitLab. Sherlock is _only_
......@@ -27,13 +62,3 @@ Bullet will log query problems to both the Rails log as well as the Chrome
console.
As a follow up to finding `N+1` queries with Bullet, consider writing a [QueryRecoder test](query_recorder.md) to prevent a regression.
## GitLab Profiler
[Gitlab-Profiler](https://gitlab.com/gitlab-com/gitlab-profiler) was built to
help developers understand why specific URLs of their application may be slow
and to provide hard data that can help reduce load times.
For GitLab.com, you can find the latest results here:
<http://redash.gitlab.com/dashboard/gitlab-profiler-statistics>
......@@ -88,6 +88,8 @@ Finished in 34.51 seconds (files took 0.76702 seconds to load)
1 example, 0 failures
```
Note: `live_debug` only works on javascript enabled specs.
### `let` variables
GitLab's RSpec suite has made extensive use of `let` variables to reduce
......
......@@ -25,32 +25,49 @@ in your testing/production environment.
- **Do not** add anything in the secondaries Geo nodes admin area
(**Admin Area ➔ Geo Nodes**). This is handled solely by the primary node.
### Step 1. Copying the database encryption key
### Step 1. Manually replicate secret GitLab values
GitLab stores a unique encryption key on disk that is used to encrypt
sensitive data stored in the database. All secondary nodes must have the
**exact same value** for `db_key_base` as defined on the primary node.
GitLab stores a number of secret values in the `/etc/gitlab/gitlab-secrets.json`
file which *must* match between the primary and secondary nodes. Until there is
a means of automatically replicating these between nodes (see
[issue #3789](https://gitlab.com/gitlab-org/gitlab-ee/issues/3789)), they must
be manually replicated to the secondary.
1. SSH into the **primary** node, and execute the command below
to display the current encryption key:
1. SSH into the **primary** node, and execute the command below:
```bash
sudo gitlab-rake geo:db:show_encryption_key
sudo cat /etc/gitlab/gitlab-secrets.json
```
Copy the encryption key to bring it to the secondary node in the following steps.
This will display the secrets that need to be replicated, in JSON format.
1. SSH into the **secondary** node and login as root:
1. SSH into the **secondary** node and login as the `root` user:
```
sudo -i
```
1. Add the following to `/etc/gitlab/gitlab.rb`, replacing `encryption-key` with the output
of the previous command:
1. Make a backup of any existing secrets:
```ruby
gitlab_rails['db_key_base'] = 'encryption-key'
```bash
mv /etc/gitlab/gitlab-secrets.json /etc/gitlab/gitlab-secrets.json.`date +%F`
```
1. Copy `/etc/gitlab/gitlab-secrets.json` from the primary to the secondary, or
copy-and-paste the file contents between nodes:
```bash
sudo editor /etc/gitlab/gitlab-secrets.json
# paste the output of the `cat` command you ran on the primary
# save and exit
```
1. Ensure the file permissions are correct:
```bash
chown root:root /etc/gitlab/gitlab-secrets.json
chmod 0600 /etc/gitlab/gitlab-secrets.json
```
1. Reconfigure the secondary node for the change to take effect:
......@@ -68,7 +85,62 @@ Make sure the secondary instance is
running and accessible. You can login to the secondary node
with the same credentials as used in the primary.
### Step 2. (Optional) Enabling hashed storage (from GitLab 10.0)
### Step 2. Manually replicate primary SSH host keys
GitLab integrates with the system-installed SSH daemon, designating a user
(typically named git) through which all access requests are handled.
In a [Disaster Recovery](disaster-recovery.md) situation, GitLab system
administrators will promote a secondary Geo replica to a primary and they can
update the DNS records for the primary domain to point to the secondary to prevent
the need to update all references to the primary domain to the secondary domain,
like changing Git remotes and API URLs.
This will cause all SSH requests to the newly promoted primary node from
failing due to SSH host key mismatch. To prevent this, the primary SSH host
keys must be manually replicated to the secondary node.
1. SSH into the **secondary** node and login as the `root` user:
```
sudo -i
```
1. Make a backup of any existing SSH host keys:
```bash
find /etc/ssh -iname ssh_host_* -exec mv {} {}.backup.`date +%F` \;
```
1. SSH into the **primary** node, and execute the command below:
```bash
sudo find /etc/ssh -iname ssh_host_* -not -iname '*.pub'
```
1. For each file in that list copy the file from the primary node to
the **same** location on your **secondary** node.
1. On your **secondary** node, ensure the file permissions are correct:
```bash
chown root:root /etc/ssh/ssh_host_*
chmod 0600 /etc/ssh/ssh_host_*
```
1. Regenerate the public keys from the private keys:
```bash
find /etc/ssh -iname ssh_host_* -not -iname '*.backup*' -exec sh -c 'ssh-keygen -y -f "{}" > "{}.pub"' \;
```
1. Restart sshd:
```bash
service ssh restart
```
### Step 3. (Optional) Enabling hashed storage (from GitLab 10.0)
>**Warning**
Hashed storage is in **Alpha**. It is considered experimental and not
......@@ -85,7 +157,7 @@ renames no longer require synchronization between nodes.
![](img/hashed-storage.png)
### Step 3. (Optional) Configuring the secondary to trust the primary
### Step 4. (Optional) Configuring the secondary to trust the primary
You can safely skip this step if your primary uses a CA-issued HTTPS certificate.
......@@ -95,14 +167,14 @@ certificate from the primary and follow
[these instructions](https://docs.gitlab.com/omnibus/settings/ssl.html)
on the secondary.
### Step 4. Enable Git access over HTTP/HTTPS
### Step 5. Enable Git access over HTTP/HTTPS
GitLab Geo synchronizes repositories over HTTP/HTTPS, and therefore requires this clone
method to be enabled. Navigate to **Admin Area ➔ Settings**
(`/admin/application_settings`) on the primary node, and set
`Enabled Git access protocols` to `Both SSH and HTTP(S)` or `Only HTTP(S)`.
### Step 5. Verify proper functioning of the secondary node
### Step 6. Verify proper functioning of the secondary node
Congratulations! Your secondary geo node is now configured!
......
......@@ -26,43 +26,56 @@ in your testing/production environment.
- **Do not** add anything in the secondaries Geo nodes admin area
(**Admin Area ➔ Geo Nodes**). This is handled solely by the primary node.
### Step 1. Copying the database encryption key
### Step 1. Manually replicate secret GitLab values
GitLab stores a unique encryption key on disk that is used to encrypt
sensitive data stored in the database. All secondary nodes must have the
**exact same value** for `db_key_base` as defined on the primary node.
GitLab stores a number of secret values in the `/home/git/gitlab/config/secrets.yml`
file which *must* match between the primary and secondary nodes. Until there is
a means of automatically replicating these between nodes (see
[issue #3789](https://gitlab.com/gitlab-org/gitlab-ee/issues/3789)), they must
be manually replicated to the secondary.
1. SSH into the **primary** node, and execute the command below to display the
current encryption key:
1. SSH into the **primary** node, and execute the command below:
```bash
sudo -u git -H bundle exec rake geo:db:show_encryption_key RAILS_ENV=production
sudo cat /home/git/gitlab/config/secrets.yml
```
Copy the encryption key to bring it to the secondary node in the following steps.
This will display the secrets that need to be replicated, in YAML format.
1. SSH into the **secondary**, and execute the command below to open the
`secrets.yml` file:
1. SSH into the **secondary** node and login as the `git` user:
```bash
sudo -u git -H editor config/secrets.yml
sudo -i -u git
```
1. Change the value of `db_key_base` to the output from the primary node.
Then save and close the file.
1. Make a backup of any existing secrets:
1. Restart GitLab for the changes to take effect:
```bash
mv /home/git/gitlab/config/secrets.yml /home/git/gitlab/config/secrets.yml.`date +%F`
```
1. Copy `/home/git/gitlab/config/secrets.yml` from the primary to the secondary, or
copy-and-paste the file contents between nodes:
```bash
service gitlab restart
sudo editor /home/git/gitlab/config/secrets.yml
# paste the output of the `cat` command you ran on the primary
# save and exit
```
1. Ensure the file permissions are correct:
```bash
chown git:git /home/git/gitlab/config/secrets.yml
chmod 0600 /home/git/gitlab/config/secrets.yml
```
The secondary will start automatically replicating missing data from the
primary in a process known as backfill. Meanwhile, the primary node will start
to notify changes to the secondary, which will act on those notifications
immediately. Make sure the secondary instance is running and accessible.
1. Restart GitLab for the changes to take effect:
### Step 2. (Optional) Enabling hashed storage
```bash
service gitlab restart
```
Once restarted, the secondary will automatically start replicating missing data
from the primary in a process known as backfill. Meanwhile, the primary node
......@@ -72,11 +85,15 @@ act on those notifications immediately.
Make sure the secondary instance is running and accessible. You can login to
the secondary node with the same credentials as used in the primary.
### Step 2. (Optional) Enabling hashed storage (from GitLab 10.0)
### Step 2. Manually replicate primary SSH host keys
Read [Manually replicate primary SSH host keys](configuration.md#step-2-manually-replicate-primary-ssh-host-keys)
### Step 3. (Optional) Enabling hashed storage (from GitLab 10.0)
Read [Enabling Hashed Storage](configuration.md#step-2-optional-enabling-hashed-storage-from-gitlab-10-0)
Read [Enabling Hashed Storage](configuration.md#step-3-optional-enabling-hashed-storage-from-gitlab-10-0)
### Step 3. (Optional) Configuring the secondary to trust the primary
### Step 4. (Optional) Configuring the secondary to trust the primary
You can safely skip this step if your primary uses a CA-issued HTTPS certificate.
......@@ -92,16 +109,16 @@ cp primary.geo.example.com.crt /usr/local/share/ca-certificates
update-ca-certificates
```
### Step 4. Enable Git access over HTTP/HTTPS
### Step 5. Enable Git access over HTTP/HTTPS
GitLab Geo synchronizes repositories over HTTP/HTTPS, and therefore requires this clone
method to be enabled. Navigate to **Admin Area ➔ Settings**
(`/admin/application_settings`) on the primary node, and set
`Enabled Git access protocols` to `Both SSH and HTTP(S)` or `Only HTTP(S)`.
### Step 5. Verify proper functioning of the secondary node
### Step 6. Verify proper functioning of the secondary node
Read [Verify proper functioning of the secondary node](configuration.md#step-5-verify-proper-functioning-of-the-secondary-node).
Read [Verify proper functioning of the secondary node](configuration.md#step-6-verify-proper-functioning-of-the-secondary-node).
## Selective replication
......
......@@ -73,7 +73,7 @@ secondary domain, like changing Git remotes and API URLs.
1. SSH in to your **secondary** and login as root:
```
```bash
sudo -i
```
......@@ -82,20 +82,20 @@ secondary domain, like changing Git remotes and API URLs.
After updating the primary domain's DNS records to point to the secondary,
edit `/etc/gitlab/gitlab.rb` on the the secondary to reflect the new URL:
```
```ruby
# Change the existing external_url configuration
external_url 'https://gitlab.example.com'
```
1. Reconfigure the secondary node for the change to take effect:
```
```bash
gitlab-ctl reconfigure
```
1. Execute the command below to update the newly promoted primary node URL:
```
```bash
gitlab-rake geo:update_primary_node_url
```
......
......@@ -10,6 +10,19 @@ primary, but this is not officially supported yet.
If you still want to proceed, see our step-by-step instructions on how to
manually [promote a secondary node](disaster-recovery.md) into primary.
## I followed the disaster recovery instructions and now two-factor auth is broken!
The setup instructions for GitLab Geo prior to 10.5 failed to replicate the
`otp_key_base` secret, which used to encrypt the two-factor authentication
secrets stored in the database. If it differs between primary and secondary
nodes, users with two-factor authentication enabled won't be able to log in
after a DR failover.
If you still have access to the old primary node, you can follow the
instructions in the [Upgrading to GitLab 10.5](updating_the_geo_nodes.md#upgrading-to-gitlab-105)
section to resolve the error. Otherwise, the secret is lost and you'll need to
[reset two-factor authentication for all users](../security/two_factor_authentication.md#disabling-2fa-for-everyone).
## What data is replicated to a secondary node?
We currently replicate project repositories, LFS objects, generated
......
......@@ -14,6 +14,33 @@ all you need to do is update GitLab itself:
the tracking database is enabled.
1. [Test](#check-status-after-updating) primary and secondary nodes, and check version in each.
## Upgrading to GitLab 10.5
For Geo Disaster Recovery to work with minimum downtime, your Geo secondary
should use the same set of secrets as the primary. However, setup instructions
prior to the 10.5 release only synchronized the `db_key_base` secret.
To rectify this error on existing installations, you should **overwrite** the
contents of `/etc/gitlab/gitlab-secrets.json` on the secondary node with the
contents of `/etc/gitlab/gitlab-secrets.json` on the primary node, then run the
following command on the secondary node:
```bash
sudo gitlab-ctl reconfigure
```
If you do not perform this step, you may find that two-factor authentication
[is broken following DR](faq.md#i-followed-the-disaster-recovery-instructions-and-now-two-factor-auth-is-broken).
To prevent SSH requests to the newly promoted primary node from failing
due to SSH host key mismatch when updating the primary domain's DNS record
you should perform the step to [Manually replicate primary SSH host keys](configuration.md#step-2-manually-replicate-primary-ssh-host-keys) in each
secondary node.
## Upgrading to GitLab 10.4
There are no Geo-specific steps to take!
## Upgrading to GitLab 10.3
### Support for SSH repository synchronization removed
......@@ -22,7 +49,7 @@ In GitLab 10.2, synchronizing secondaries over SSH was deprecated. In 10.3,
support is removed entirely. All installations will switch to the HTTP/HTTPS
cloning method instead. Before upgrading, ensure that all your Geo nodes are
configured to use this method and that it works for your installation. In
particular, ensure that [Git access over HTTP/HTTPS is enabled](configuration.md#step-4-enable-git-access-over-http-https).
particular, ensure that [Git access over HTTP/HTTPS is enabled](configuration.md#step-5-enable-git-access-over-http-https).
Synchronizing repositories over the public Internet using HTTP is insecure, so
you should ensure that you have HTTPS configured before upgrading. Note that
......@@ -30,7 +57,7 @@ file synchronization is **also** insecure in these cases!
## Upgrading to GitLab 10.2
### Secure PostgreSQL replication
### Secure PostgreSQL replication
Support for TLS-secured PostgreSQL replication has been added. If you are
currently using PostgreSQL replication across the open internet without an
......
......@@ -169,6 +169,30 @@ For Omnibus GitLab packages:
1. [Reconfigure GitLab] for the changes to take effect
#### Digital Ocean Spaces and other S3-compatible providers
Not all S3 providers are fully-compatible with the Fog library. For example,
if you see `411 Length Required` errors after attempting to upload, you may
need to downgrade the `aws_signature_version` value from the default value to
2 [due to this issue](https://github.com/fog/fog-aws/issues/428).
1. For example, with [Digital Ocean Spaces](https://www.digitalocean.com/products/spaces/),
this example configuration can be used for a bucket in Amsterdam (AMS3):
```ruby
gitlab_rails['backup_upload_connection'] = {
'provider' => 'AWS',
'region' => 'ams3',
'aws_access_key_id' => 'AKIAKIAKI',
'aws_secret_access_key' => 'secret123',
'aws_signature_version' => 2,
'endpoint' => 'https://ams3.digitaloceanspaces.com'
}
gitlab_rails['backup_upload_remote_directory'] = 'my.s3.bucket'
```
1. [Reconfigure GitLab] for the changes to take effect
---
For installations from source:
......
# Rack attack
# Rack Attack
To prevent abusive clients doing damage GitLab uses rack-attack gem.
Rack Attack, also known as Rack::Attack, is [a rubygem](https://github.com/kickstarter/rack-attack)
that is meant to protect GitLab with the ability to customize throttling and
blocking user IPs.
You can prevent brute-force passwords attacks, scrapers, or any other offenders
by throttling requests from IP addresses making large volumes of requests.
In case you find throttling is not enough to protect you against abusive clients,
Rack Attack offers IP whitelisting, blacklisting, Fail2ban style filtering and
tracking.
If you installed or upgraded GitLab by following the official guides this should be enabled by default.
By default, user sign-in, user sign-up (if enabled), and user password reset is
limited to 6 requests per minute. After trying for 6 times, the client will
have to wait for the next minute to be able to try again.
If you are missing `config/initializers/rack_attack.rb` the following steps need to be taken in order to enable protection for your GitLab instance:
If you installed or upgraded GitLab by following the [official guides](../install/README.md)
this should be enabled by default. If your instance is not exposed to any incoming
connections, it is recommended to disable Rack Attack.
1. In config/application.rb find and uncomment the following line:
For more information on how to use these options check out
[rack-attack README](https://github.com/kickstarter/rack-attack/blob/master/README.md).
config.middleware.use Rack::Attack
## Settings
1. Rename `config/initializers/rack_attack.rb.example` to `config/initializers/rack_attack.rb`.
**Omnibus GitLab**
1. Review the `paths_to_be_protected` and add any other path you need protecting.
1. Open `/etc/gitlab/gitlab.rb` with you editor
1. Add the following:
1. Restart GitLab instance.
```ruby
gitlab_rails['rack_attack_git_basic_auth'] = {
'enabled' => true,
'ip_whitelist' => ["127.0.0.1"],
'maxretry' => 10,
'findtime' => 60,
'bantime' => 3600
}
```
By default, user sign-in, user sign-up(if enabled) and user password reset is limited to 6 requests per minute. After trying for 6 times, client will have to wait for the next minute to be able to try again. These settings can be found in `config/initializers/rack_attack.rb`
3. Reconfigure GitLab:
If you want more restrictive/relaxed throttle rule change the `limit` or `period` values. For example, more relaxed throttle rule will be if you set limit: 3 and period: 1.second(this will allow 3 requests per second). You can also add other paths to the protected list by adding to `paths_to_be_protected` variable. If you change any of these settings do not forget to restart your GitLab instance.
```
sudo gitlab-ctl reconfigure
```
In case you find throttling is not enough to protect you against abusive clients, rack-attack gem offers IP whitelisting, blacklisting, Fail2ban style filter and tracking.
The following settings can be configured:
For more information on how to use these options check out [rack-attack README](https://github.com/kickstarter/rack-attack/blob/master/README.md).
- `enabled`: By default this is set to `true`. Set this to `false` to disable Rack Attack.
- `ip_whitelist`: Whitelist any IPs from being blocked. They must be formatted as strings within a ruby array.
For example, `["127.0.0.1", "127.0.0.2", "127.0.0.3"]`.
- `maxretry`: The maximum amount of times a request can be made in the
specified time.
- `findtime`: The maximum amount of time failed requests can count against an IP
before it's blacklisted.
- `bantime`: The total amount of time that a blacklisted IP will be blocked in
seconds.
**Installations from source**
These settings can be found in `config/initializers/rack_attack.rb`. If you are
missing `config/initializers/rack_attack.rb`, the following steps need to be
taken in order to enable protection for your GitLab instance:
1. In `config/application.rb` find and uncomment the following line:
```ruby
config.middleware.use Rack::Attack
```
1. Copy `config/initializers/rack_attack.rb.example` to `config/initializers/rack_attack.rb`
1. Open `config/initializers/rack_attack.rb`, review the
`paths_to_be_protected`, and add any other path you need protecting
1. Restart GitLab:
```sh
sudo service gitlab restart
```
If you want more restrictive/relaxed throttle rules, edit
`config/initializers/rack_attack.rb` and change the `limit` or `period` values.
For example, more relaxed throttle rules will be if you set
`limit: 3` and `period: 1.seconds` (this will allow 3 requests per second).
You can also add other paths to the protected list by adding to `paths_to_be_protected`
variable. If you change any of these settings do not forget to restart your
GitLab instance.
## Remove blocked IPs from Rack Attack via Redis
In case you want to remove a blocked IP, follow these steps:
1. Find the IPs that have been blocked in the production log:
```sh
grep "Rack_Attack" /var/log/gitlab/gitlab-rails/production.log
```
2. Since the blacklist is stored in Redis, you need to open up `redis-cli`:
```sh
/opt/gitlab/embedded/bin/redis-cli -s /var/opt/gitlab/redis/redis.socket
```
3. You can remove the block using the following syntax, replacing `<ip>` with
the actual IP that is blacklisted:
```
del cache:gitlab:rack::attack:allow2ban:ban:<ip>
```
4. Confirm that the key with the IP no longer shows up:
```
keys *rack::attack*
```
5. Optionally, add the IP to the whitelist to prevent it from being blacklisted
again (see [settings](#settings)).
## Troubleshooting
### Rack attack is blacklisting the load balancer
Rack Attack may block your load balancer if all traffic appears to come from
the load balancer. In that case, you will need to:
1. [Configure `nginx[real_ip_trusted_addresses]`](https://docs.gitlab.com/omnibus/settings/nginx.html#configuring-gitlab-trusted_proxies-and-the-nginx-real_ip-module).
This will keep users' IPs from being listed as the load balancer IPs.
2. Whitelist the load balancer's IP address(es) in the Rack Attack [settings](#settings).
3. Reconfigure GitLab:
```
sudo gitlab-ctl reconfigure
```
4. [Remove the block via Redis.](#remove-blocked-ips-from-rack-attack-via-redis)
......@@ -21,10 +21,10 @@ project in an easy and automatic way:
1. [Auto Code Quality](#auto-code-quality)
1. [Auto SAST (Static Application Security Testing)](#auto-sast)
1. [Auto SAST for Docker images](#auto-sast-for-docker-images)
1. [Auto DAST (Dynamic Application Security Testing)](#auto-dast)
1. [Auto Browser Performance Testing](#auto-browser-performance-testing)
1. [Auto Review Apps](#auto-review-apps)
1. [Auto DAST (Dynamic Application Security Testing)](#auto-dast)
1. [Auto Deploy](#auto-deploy)
1. [Auto Browser Performance Testing](#auto-browser-performance-testing)
1. [Auto Monitoring](#auto-monitoring)
As Auto DevOps relies on many different components, it's good to have a basic
......@@ -229,6 +229,32 @@ check out.
In GitLab Enterprise Edition Ultimate, any security warnings are also
[shown in the merge request widget](../../user/project/merge_requests/sast_docker.md).
### Auto Review Apps
NOTE: **Note:**
This is an optional step, since many projects do not have a Kubernetes cluster
available. If the [prerequisites](#prerequisites) are not met, the job will
silently be skipped.
CAUTION: **Caution:**
Your apps should *not* be manipulated outside of Helm (using Kubernetes directly.)
This can cause confusion with Helm not detecting the change, and subsequent
deploys with Auto DevOps can undo your changes. Also, if you change something
and want to undo it by deploying again, Helm may not detect that anything changed
in the first place, and thus not realize that it needs to re-apply the old config.
[Review Apps][review-app] are temporary application environments based on the
branch's code so developers, designers, QA, product managers, and other
reviewers can actually see and interact with code changes as part of the review
process. Auto Review Apps create a Review App for each branch.
The Review App will have a unique URL based on the project name, the branch
name, and a unique number, combined with the Auto DevOps base domain. For
example, `user-project-branch-1234.example.com`. A link to the Review App shows
up in the merge request widget for easy discovery. When the branch is deleted,
for example after the merge request is merged, the Review App will automatically
be deleted.
### Auto DAST
> Introduced in [GitLab Enterprise Edition Ultimate][ee] 10.4.
......@@ -257,32 +283,6 @@ Auto Browser Performance Testing utilizes the [Sitespeed.io container](https://h
In GitLab Enterprise Edition Premium, performance differences between the source
and target branches are [shown in the merge request widget](../../user/project/merge_requests/browser_performance_testing.md).
### Auto Review Apps
NOTE: **Note:**
This is an optional step, since many projects do not have a Kubernetes cluster
available. If the [prerequisites](#prerequisites) are not met, the job will
silently be skipped.
CAUTION: **Caution:**
Your apps should *not* be manipulated outside of Helm (using Kubernetes directly.)
This can cause confusion with Helm not detecting the change, and subsequent
deploys with Auto DevOps can undo your changes. Also, if you change something
and want to undo it by deploying again, Helm may not detect that anything changed
in the first place, and thus not realize that it needs to re-apply the old config.
[Review Apps][review-app] are temporary application environments based on the
branch's code so developers, designers, QA, product managers, and other
reviewers can actually see and interact with code changes as part of the review
process. Auto Review Apps create a Review App for each branch.
The Review App will have a unique URL based on the project name, the branch
name, and a unique number, combined with the Auto DevOps base domain. For
example, `user-project-branch-1234.example.com`. A link to the Review App shows
up in the merge request widget for easy discovery. When the branch is deleted,
for example after the merge request is merged, the Review App will automatically
be deleted.
### Auto Deploy
NOTE: **Note:**
......
class PushRule < ActiveRecord::Base
MatchError = Class.new(StandardError)
belongs_to :project
validates :project, presence: true, unless: "is_sample?"
......@@ -98,6 +100,8 @@ class PushRule < ActiveRecord::Base
else
true
end
rescue RegexpError => e
raise MatchError, "Regular expression '#{regex}' is invalid: #{e.message}"
end
def read_setting_with_global_default(setting)
......
......@@ -22,6 +22,8 @@ module Geo
fetch_geo_mirror(project.repository)
end
update_gitattributes
update_registry!(finished_at: DateTime.now, attrs: { last_repository_sync_failure: nil })
log_info('Finished repository sync',
update_delay_s: update_delay_in_seconds,
......@@ -52,6 +54,13 @@ module Geo
project.repository
end
# Update info/attributes file using the contents of .gitattributes file from the default branch
def update_gitattributes
return if project.default_branch.nil?
repository.copy_gitattributes(project.default_branch)
end
def retry_count
registry.public_send("#{type}_retry_count") || -1 # rubocop:disable GitlabSecurity/PublicSend
end
......
module EE
module Gitlab
module Checks
module ChangeAccess
extend ActiveSupport::Concern
extend ::Gitlab::Utils::Override
include PathLocksHelper
include ::Gitlab::Utils::StrongMemoize
ERROR_MESSAGES = {
push_rule_branch_name: "Branch name does not follow the pattern '%{branch_name_regex}'",
push_rule_committer_not_verified: "Comitter email '%{commiter_email}' is not verified.",
push_rule_committer_not_allowed: "You cannot push commits for '%{committer_email}'. You can only push commits that were committed with one of your own verified emails."
}.freeze
override :exec
def exec
return true if skip_authorization
super
push_rule_check
true
end
private
def push_rule_check
return unless newrev && oldrev && project.feature_available?(:push_rules)
push_rule = project.push_rule
if tag_name
push_rule_tag_check(push_rule)
else
push_rule_branch_check(push_rule)
end
end
def push_rule_tag_check(push_rule)
if tag_deletion_denied_by_push_rule?(push_rule)
raise ::Gitlab::GitAccess::UnauthorizedError, 'You cannot delete a tag'
end
end
def push_rule_branch_check(push_rule)
unless branch_name_allowed_by_push_rule?(push_rule)
message = ERROR_MESSAGES[:push_rule_branch_name] % { branch_name_regex: push_rule.branch_name_regex }
raise ::Gitlab::GitAccess::UnauthorizedError.new(message)
end
commit_validation = push_rule.try(:commit_validation?)
# if newrev is blank, the branch was deleted
return if deletion? || !(commit_validation || validate_path_locks?)
# n+1: https://gitlab.com/gitlab-org/gitlab-ee/issues/3593
::Gitlab::GitalyClient.allow_n_plus_1_calls do
commits.each do |commit|
push_rule_commit_check(commit, push_rule)
end
end
rescue ::PushRule::MatchError => e
raise ::Gitlab::GitAccess::UnauthorizedError, e.message
end
def branch_name_allowed_by_push_rule?(push_rule)
return true if skip_branch_name_push_rule?(push_rule)
push_rule.branch_name_allowed?(branch_name)
end
def skip_branch_name_push_rule?(push_rule)
push_rule.nil? ||
deletion? ||
branch_name.blank? ||
branch_name == project.default_branch
end
def tag_deletion_denied_by_push_rule?(push_rule)
push_rule.try(:deny_delete_tag) &&
!updated_from_web? &&
deletion? &&
tag_exists?
end
def push_rule_commit_check(commit, push_rule)
if push_rule.try(:commit_validation?)
error = check_commit(commit, push_rule)
raise ::Gitlab::GitAccess::UnauthorizedError, error if error
end
if error = check_commit_diff(commit, push_rule)
raise ::Gitlab::GitAccess::UnauthorizedError, error
end
end
# If commit does not pass push rule validation the whole push should be rejected.
# This method should return nil if no error found or a string if error.
# In case of errors - all other checks will be canceled and push will be rejected.
def check_commit(commit, push_rule)
unless push_rule.commit_message_allowed?(commit.safe_message)
return "Commit message does not follow the pattern '#{push_rule.commit_message_regex}'"
end
unless push_rule.author_email_allowed?(commit.committer_email)
return "Committer's email '#{commit.committer_email}' does not follow the pattern '#{push_rule.author_email_regex}'"
end
unless push_rule.author_email_allowed?(commit.author_email)
return "Author's email '#{commit.author_email}' does not follow the pattern '#{push_rule.author_email_regex}'"
end
committer_error_message = committer_check(commit, push_rule)
return committer_error_message if committer_error_message
if !updated_from_web? && !push_rule.commit_signature_allowed?(commit)
return "Commit must be signed with a GPG key"
end
# Check whether author is a GitLab member
if push_rule.member_check
unless ::User.existing_member?(commit.author_email.downcase)
return "Author '#{commit.author_email}' is not a member of team"
end
if commit.author_email.casecmp(commit.committer_email) == -1
unless ::User.existing_member?(commit.committer_email.downcase)
return "Committer '#{commit.committer_email}' is not a member of team"
end
end
end
nil
end
def committer_check(commit, push_rule)
unless push_rule.committer_allowed?(commit.committer_email, user_access.user)
committer_is_current_user = commit.committer == user_access.user
if committer_is_current_user && !commit.committer.verified_email?(commit.committer_email)
ERROR_MESSAGES[:push_rule_committer_not_verified] % { committer_email: commit.committer_email }
else
ERROR_MESSAGES[:push_rule_committer_not_allowed] % { committer_email: commit.committer_email }
end
end
end
def check_commit_diff(commit, push_rule)
validations = validations_for_commit(commit, push_rule)
return if validations.empty?
commit.raw_deltas.each do |diff|
validations.each do |validation|
if error = validation.call(diff)
return error
end
end
end
nil
end
def validations_for_commit(commit, push_rule)
validations = base_validations
return validations unless push_rule
validations << file_name_validation(push_rule)
if push_rule.max_file_size > 0
validations << file_size_validation(commit, push_rule.max_file_size)
end
validations
end
def base_validations
validate_path_locks? ? [path_locks_validation] : []
end
def validate_path_locks?
strong_memoize(:validate_path_locks) do
project.feature_available?(:file_locks) &&
project.path_locks.any? && newrev && oldrev &&
project.default_branch == branch_name # locks protect default branch only
end
end
def path_locks_validation
lambda do |diff|
path = diff.new_path || diff.old_path
lock_info = project.find_path_lock(path)
if lock_info && lock_info.user != user_access.user
return "The path '#{lock_info.path}' is locked by #{lock_info.user.name}"
end
end
end
def file_name_validation(push_rule)
lambda do |diff|
if (diff.renamed_file || diff.new_file) && blacklisted_regex = push_rule.filename_blacklisted?(diff.new_path)
return nil unless blacklisted_regex.present?
"File name #{diff.new_path} was blacklisted by the pattern #{blacklisted_regex}."
end
end
end
def file_size_validation(commit, max_file_size)
lambda do |diff|
return if diff.deleted_file
blob = project.repository.blob_at(commit.id, diff.new_path)
if blob && blob.size && blob.size > max_file_size.megabytes
return "File #{diff.new_path.inspect} is larger than the allowed size of #{max_file_size} MB"
end
end
end
def commits
project.repository.new_commits(newrev)
end
end
end
end
end
......@@ -46,7 +46,7 @@ module EE
def ldap_attributes(config)
attributes = super + [
'memberof',
config.sync_ssh_keys
(config.sync_ssh_keys if config.sync_ssh_keys.is_a?(String))
]
attributes.compact.uniq
end
......
......@@ -26,10 +26,8 @@ module Gitlab
$stdout.puts "Updating primary Geo node with URL #{node.url} ..."
if node.update(url: GeoNode.current_node_url)
puts "#{node.url} is now the primary Geo node URL".color(:green)
$stdout.puts "#{node.url} is now the primary Geo node URL".color(:green)
else
puts "Error saving Geo node:\n#{node.errors.full_messages.join("\n")}".color(:red)
$stdout.puts "Error saving Geo node:\n#{node.errors.full_messages.join("\n")}".color(:red)
exit 1
end
......
......@@ -55,11 +55,6 @@ namespace :geo do
Gitlab::Geo::DatabaseTasks.load_seed
end
desc 'Display database encryption key'
task show_encryption_key: :environment do
puts Rails.application.secrets.db_key_base
end
desc 'Refresh Foreign Tables definition in Geo Secondary node'
task refresh_foreign_tables: [:environment] do
if Gitlab::Geo::GeoTasks.foreign_server_configured?
......
module API
module Helpers
module InternalHelpers
SSH_GITALY_FEATURES = {
'git-receive-pack' => [:ssh_receive_pack, Gitlab::GitalyClient::MigrationStatus::OPT_IN],
'git-upload-pack' => [:ssh_upload_pack, Gitlab::GitalyClient::MigrationStatus::OPT_OUT]
}.freeze
attr_reader :redirected_path
def wiki?
......@@ -102,8 +97,14 @@ module API
# Return the Gitaly Address if it is enabled
def gitaly_payload(action)
feature, status = SSH_GITALY_FEATURES[action]
return unless feature && Gitlab::GitalyClient.feature_enabled?(feature, status: status)
return unless %w[git-receive-pack git-upload-pack].include?(action)
if action == 'git-receive-pack'
return unless Gitlab::GitalyClient.feature_enabled?(
:ssh_receive_pack,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT
)
end
{
repository: repository.gitaly_repository,
......
......@@ -22,7 +22,7 @@ module API
source = find_source(source_type, params[:id])
users = source.users
users = users.merge(User.search(params[:query])) if params[:query]
users = users.merge(User.search(params[:query])) if params[:query].present?
present paginate(users), with: Entities::Member, source: source
end
......
......@@ -23,7 +23,7 @@ module API
source = find_source(source_type, params[:id])
users = source.users
users = users.merge(User.search(params[:query])) if params[:query]
users = users.merge(User.search(params[:query])) if params[:query].present?
present paginate(users), with: ::API::Entities::Member, source: source
end
......
module Gitlab
module Checks
class ChangeAccess
include PathLocksHelper
include Gitlab::Utils::StrongMemoize
prepend EE::Gitlab::Checks::ChangeAccess
ERROR_MESSAGES = {
push_code: 'You are not allowed to push code to this project.',
......@@ -16,14 +15,10 @@ module Gitlab
update_protected_tag: 'Protected tags cannot be updated.',
delete_protected_tag: 'Protected tags cannot be deleted.',
create_protected_tag: 'You are not allowed to create this tag as it is protected.',
lfs_objects_missing: 'LFS objects are missing. Ensure LFS is properly set up or try a manual "git lfs push --all".',
push_rule_branch_name: "Branch name does not follow the pattern '%{branch_name_regex}'",
push_rule_committer_not_verified: "Comitter email '%{commiter_email}' is not verified.",
push_rule_committer_not_allowed: "You cannot push commits for '%{committer_email}'. You can only push commits that were committed with one of your own verified emails."
lfs_objects_missing: 'LFS objects are missing. Ensure LFS is properly set up or try a manual "git lfs push --all".'
}.freeze
# protocol is currently used only in EE
attr_reader :user_access, :project, :skip_authorization, :protocol
attr_reader :user_access, :project, :skip_authorization, :protocol, :oldrev, :newrev, :ref, :branch_name, :tag_name
def initialize(
change, user_access:, project:, skip_authorization: false,
......@@ -45,7 +40,6 @@ module Gitlab
branch_checks
tag_checks
lfs_objects_exist_check
push_rule_check
true
end
......@@ -59,9 +53,9 @@ module Gitlab
end
def branch_checks
return unless @branch_name
return unless branch_name
if deletion? && @branch_name == project.default_branch
if deletion? && branch_name == project.default_branch
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:delete_default_branch]
end
......@@ -69,7 +63,7 @@ module Gitlab
end
def protected_branch_checks
return unless ProtectedBranch.protected?(project, @branch_name)
return unless ProtectedBranch.protected?(project, branch_name)
if forced_push?
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:force_push_protected_branch]
......@@ -83,7 +77,7 @@ module Gitlab
end
def protected_branch_deletion_checks
unless user_access.can_delete_branch?(@branch_name)
unless user_access.can_delete_branch?(branch_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:non_master_delete_protected_branch]
end
......@@ -94,18 +88,18 @@ module Gitlab
def protected_branch_push_checks
if matching_merge_request?
unless user_access.can_merge_to_branch?(@branch_name) || user_access.can_push_to_branch?(@branch_name)
unless user_access.can_merge_to_branch?(branch_name) || user_access.can_push_to_branch?(branch_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:merge_protected_branch]
end
else
unless user_access.can_push_to_branch?(@branch_name)
unless user_access.can_push_to_branch?(branch_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:push_protected_branch]
end
end
end
def tag_checks
return unless @tag_name
return unless tag_name
if tag_exists? && user_access.cannot_do_action?(:admin_project)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:change_existing_tags]
......@@ -115,12 +109,12 @@ module Gitlab
end
def protected_tag_checks
return unless ProtectedTag.protected?(project, @tag_name)
return unless ProtectedTag.protected?(project, tag_name)
raise(GitAccess::UnauthorizedError, ERROR_MESSAGES[:update_protected_tag]) if update?
raise(GitAccess::UnauthorizedError, ERROR_MESSAGES[:delete_protected_tag]) if deletion?
unless user_access.can_create_tag?(@tag_name)
unless user_access.can_create_tag?(tag_name)
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:create_protected_tag]
end
end
......@@ -132,218 +126,32 @@ module Gitlab
end
def tag_exists?
project.repository.tag_exists?(@tag_name)
project.repository.tag_exists?(tag_name)
end
def forced_push?
Gitlab::Checks::ForcePush.force_push?(@project, @oldrev, @newrev)
Gitlab::Checks::ForcePush.force_push?(project, oldrev, newrev)
end
def update?
!Gitlab::Git.blank_ref?(@oldrev) && !deletion?
!Gitlab::Git.blank_ref?(oldrev) && !deletion?
end
def deletion?
Gitlab::Git.blank_ref?(@newrev)
Gitlab::Git.blank_ref?(newrev)
end
def matching_merge_request?
Checks::MatchingMergeRequest.new(@newrev, @branch_name, @project).match?
Checks::MatchingMergeRequest.new(newrev, branch_name, project).match?
end
def lfs_objects_exist_check
lfs_check = Checks::LfsIntegrity.new(project, @newrev)
lfs_check = Checks::LfsIntegrity.new(project, newrev)
if lfs_check.objects_missing?
raise GitAccess::UnauthorizedError, ERROR_MESSAGES[:lfs_objects_missing]
end
end
def push_rule_check
return unless @newrev && @oldrev && project.feature_available?(:push_rules)
push_rule = project.push_rule
# Prevent tag removal
if @tag_name
if tag_deletion_denied_by_push_rule?(push_rule)
raise GitAccess::UnauthorizedError, 'You cannot delete a tag'
end
else
unless branch_name_allowed_by_push_rule?(push_rule)
message = ERROR_MESSAGES[:push_rule_branch_name] % { branch_name_regex: push_rule.branch_name_regex }
raise GitAccess::UnauthorizedError.new(message)
end
commit_validation = push_rule.try(:commit_validation?)
# if newrev is blank, the branch was deleted
return if deletion? || !(commit_validation || validate_path_locks?)
# n+1: https://gitlab.com/gitlab-org/gitlab-ee/issues/3593
Gitlab::GitalyClient.allow_n_plus_1_calls do
commits.each do |commit|
if commit_validation
error = check_commit(commit, push_rule)
raise GitAccess::UnauthorizedError, error if error
end
if error = check_commit_diff(commit, push_rule)
raise GitAccess::UnauthorizedError, error
end
end
end
end
end
def branch_name_allowed_by_push_rule?(push_rule)
return true if skip_branch_name_push_rule?(push_rule)
push_rule.branch_name_allowed?(@branch_name)
end
def skip_branch_name_push_rule?(push_rule)
push_rule.nil? ||
deletion? ||
@branch_name.blank? ||
@branch_name == @project.default_branch
end
def tag_deletion_denied_by_push_rule?(push_rule)
push_rule.try(:deny_delete_tag) &&
!updated_from_web? &&
deletion? &&
tag_exists?
end
# If commit does not pass push rule validation the whole push should be rejected.
# This method should return nil if no error found or a string if error.
# In case of errors - all other checks will be canceled and push will be rejected.
def check_commit(commit, push_rule)
unless push_rule.commit_message_allowed?(commit.safe_message)
return "Commit message does not follow the pattern '#{push_rule.commit_message_regex}'"
end
unless push_rule.author_email_allowed?(commit.committer_email)
return "Committer's email '#{commit.committer_email}' does not follow the pattern '#{push_rule.author_email_regex}'"
end
unless push_rule.author_email_allowed?(commit.author_email)
return "Author's email '#{commit.author_email}' does not follow the pattern '#{push_rule.author_email_regex}'"
end
committer_error_message = committer_check(commit, push_rule)
return committer_error_message if committer_error_message
if !updated_from_web? && !push_rule.commit_signature_allowed?(commit)
return "Commit must be signed with a GPG key"
end
# Check whether author is a GitLab member
if push_rule.member_check
unless User.existing_member?(commit.author_email.downcase)
return "Author '#{commit.author_email}' is not a member of team"
end
if commit.author_email.casecmp(commit.committer_email) == -1
unless User.existing_member?(commit.committer_email.downcase)
return "Committer '#{commit.committer_email}' is not a member of team"
end
end
end
nil
end
def committer_check(commit, push_rule)
unless push_rule.committer_allowed?(commit.committer_email, user_access.user)
committer_is_current_user = commit.committer == user_access.user
if committer_is_current_user && !commit.committer.verified_email?(commit.committer_email)
ERROR_MESSAGES[:push_rule_committer_not_verified] % { committer_email: commit.committer_email }
else
ERROR_MESSAGES[:push_rule_committer_not_allowed] % { committer_email: commit.committer_email }
end
end
end
def check_commit_diff(commit, push_rule)
validations = validations_for_commit(commit, push_rule)
return if validations.empty?
commit.raw_deltas.each do |diff|
validations.each do |validation|
if error = validation.call(diff)
return error
end
end
end
nil
end
def validations_for_commit(commit, push_rule)
validations = base_validations
return validations unless push_rule
validations << file_name_validation(push_rule)
if push_rule.max_file_size > 0
validations << file_size_validation(commit, push_rule.max_file_size)
end
validations
end
def base_validations
validate_path_locks? ? [path_locks_validation] : []
end
def validate_path_locks?
strong_memoize(:validate_path_locks) do
@project.feature_available?(:file_locks) &&
project.path_locks.any? && @newrev && @oldrev &&
project.default_branch == @branch_name # locks protect default branch only
end
end
def path_locks_validation
lambda do |diff|
path = diff.new_path || diff.old_path
lock_info = project.find_path_lock(path)
if lock_info && lock_info.user != user_access.user
return "The path '#{lock_info.path}' is locked by #{lock_info.user.name}"
end
end
end
def file_name_validation(push_rule)
lambda do |diff|
if (diff.renamed_file || diff.new_file) && blacklisted_regex = push_rule.filename_blacklisted?(diff.new_path)
return nil unless blacklisted_regex.present?
"File name #{diff.new_path} was blacklisted by the pattern #{blacklisted_regex}."
end
end
end
def file_size_validation(commit, max_file_size)
lambda do |diff|
return if diff.deleted_file
blob = project.repository.blob_at(commit.id, diff.new_path)
if blob && blob.size && blob.size > max_file_size.megabytes
return "File #{diff.new_path.inspect} is larger than the allowed size of #{max_file_size} MB"
end
end
end
def commits
project.repository.new_commits(@newrev)
end
end
end
end
......@@ -12,30 +12,40 @@ module Gitlab
# Returns true if the current user can create and execute triggers on the
# given table.
def self.create_and_execute_trigger?(table)
priv =
if Database.postgresql?
where(privilege_type: 'TRIGGER', table_name: table)
.where('grantee = user')
else
queries = [
Grant.select(1)
.from('information_schema.user_privileges')
.where("PRIVILEGE_TYPE = 'SUPER'")
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')"),
if Database.postgresql?
# We _must not_ use quote_table_name as this will produce double
# quotes on PostgreSQL and for "has_table_privilege" we need single
# quotes.
quoted_table = connection.quote(table)
Grant.select(1)
.from('information_schema.schema_privileges')
.where("PRIVILEGE_TYPE = 'TRIGGER'")
.where('TABLE_SCHEMA = ?', Gitlab::Database.database_name)
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')")
]
begin
from(nil)
.pluck("has_table_privilege(#{quoted_table}, 'TRIGGER')")
.first
rescue ActiveRecord::StatementInvalid
# This error is raised when using a non-existing table name. In this
# case we just want to return false as a user technically can't
# create triggers for such a table.
false
end
else
queries = [
Grant.select(1)
.from('information_schema.user_privileges')
.where("PRIVILEGE_TYPE = 'SUPER'")
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')"),
union = SQL::Union.new(queries).to_sql
Grant.select(1)
.from('information_schema.schema_privileges')
.where("PRIVILEGE_TYPE = 'TRIGGER'")
.where('TABLE_SCHEMA = ?', Gitlab::Database.database_name)
.where("GRANTEE = CONCAT('\\'', REPLACE(CURRENT_USER(), '@', '\\'@\\''), '\\'')")
]
Grant.from("(#{union}) privs")
end
union = SQL::Union.new(queries).to_sql
priv.any?
Grant.from("(#{union}) privs").any?
end
end
end
end
......
......@@ -132,6 +132,8 @@ module Gitlab
end
def find_by_gitaly(repository, sha, path, limit: MAX_DATA_DISPLAY_SIZE)
return unless path
path = path.sub(/\A\/*/, '')
path = '/' if path.empty?
name = File.basename(path)
......@@ -173,6 +175,8 @@ module Gitlab
end
def find_by_rugged(repository, sha, path, limit:)
return unless path
rugged_commit = repository.lookup(sha)
root_tree = rugged_commit.tree
......
......@@ -1091,19 +1091,6 @@ module Gitlab
end
end
def shell_write_ref(ref_path, ref, old_ref)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
raise ArgumentError, "invalid old_ref #{old_ref.inspect}" if !old_ref.nil? && old_ref.include?("\x00")
input = "update #{ref_path}\x00#{ref}\x00#{old_ref}\x00"
run_git!(%w[update-ref --stdin -z]) { |stdin| stdin.write(input) }
end
def rugged_write_ref(ref_path, ref)
rugged.references.create(ref_path, ref, force: true)
end
def fetch_ref(source_repository, source_ref:, target_ref:)
Gitlab::Git.check_namespace!(source_repository)
source_repository = RemoteRepository.new(source_repository) unless source_repository.is_a?(RemoteRepository)
......@@ -1281,32 +1268,15 @@ module Gitlab
author_email: nil, author_name: nil,
start_branch_name: nil, start_repository: self)
OperationService.new(user, self).with_branch(
branch_name,
start_branch_name: start_branch_name,
start_repository: start_repository
) do |start_commit|
index = Gitlab::Git::Index.new(self)
parents = []
if start_commit
index.read_tree(start_commit.rugged_commit.tree)
parents = [start_commit.sha]
gitaly_migrate(:operation_user_commit_files) do |is_enabled|
if is_enabled
gitaly_operation_client.user_commit_files(user, branch_name,
message, actions, author_email, author_name,
start_branch_name, start_repository)
else
rugged_multi_action(user, branch_name, message, actions,
author_email, author_name, start_branch_name, start_repository)
end
actions.each { |opts| index.apply(opts.delete(:action), opts) }
committer = user_to_committer(user)
author = Gitlab::Git.committer_hash(email: author_email, name: author_name) || committer
options = {
tree: index.write_tree,
message: message,
parents: parents,
author: author,
committer: committer
}
create_commit(options)
end
end
# rubocop:enable Metrics/ParameterLists
......@@ -1359,6 +1329,25 @@ module Gitlab
private
def shell_write_ref(ref_path, ref, old_ref)
raise ArgumentError, "invalid ref_path #{ref_path.inspect}" if ref_path.include?(' ')
raise ArgumentError, "invalid ref #{ref.inspect}" if ref.include?("\x00")
raise ArgumentError, "invalid old_ref #{old_ref.inspect}" if !old_ref.nil? && old_ref.include?("\x00")
input = "update #{ref_path}\x00#{ref}\x00#{old_ref}\x00"
run_git!(%w[update-ref --stdin -z]) { |stdin| stdin.write(input) }
end
def rugged_write_ref(ref_path, ref)
rugged.references.create(ref_path, ref, force: true)
rescue Rugged::ReferenceError => ex
Rails.logger.error "Unable to create #{ref_path} reference for repository #{path}: #{ex}"
rescue Rugged::OSError => ex
raise unless ex.message =~ /Failed to create locked file/ && ex.message =~ /File exists/
Rails.logger.error "Unable to create #{ref_path} reference for repository #{path}: #{ex}"
end
def fresh_worktree?(path)
File.exist?(path) && !clean_stuck_worktree(path)
end
......@@ -2073,6 +2062,39 @@ module Gitlab
remove_remote(remote_name)
end
def rugged_multi_action(
user, branch_name, message, actions, author_email, author_name,
start_branch_name, start_repository)
OperationService.new(user, self).with_branch(
branch_name,
start_branch_name: start_branch_name,
start_repository: start_repository
) do |start_commit|
index = Gitlab::Git::Index.new(self)
parents = []
if start_commit
index.read_tree(start_commit.rugged_commit.tree)
parents = [start_commit.sha]
end
actions.each { |opts| index.apply(opts.delete(:action), opts) }
committer = user_to_committer(user)
author = Gitlab::Git.committer_hash(email: author_email, name: author_name) || committer
options = {
tree: index.write_tree,
message: message,
parents: parents,
author: author,
committer: committer
}
create_commit(options)
end
end
def fetch_remote(remote_name = 'origin', env: nil)
run_git(['fetch', remote_name], env: env).last.zero?
end
......
......@@ -3,6 +3,8 @@ module Gitlab
class OperationService
include Gitlab::EncodingHelper
MAX_MSG_SIZE = 128.kilobytes.freeze
def initialize(repository)
@gitaly_repo = repository.gitaly_repository
@repository = repository
......@@ -175,6 +177,49 @@ module Gitlab
end
end
def user_commit_files(
user, branch_name, commit_message, actions, author_email, author_name,
start_branch_name, start_repository)
req_enum = Enumerator.new do |y|
header = user_commit_files_request_header(user, branch_name,
commit_message, actions, author_email, author_name,
start_branch_name, start_repository)
y.yield Gitaly::UserCommitFilesRequest.new(header: header)
actions.each do |action|
action_header = user_commit_files_action_header(action)
y.yield Gitaly::UserCommitFilesRequest.new(
action: Gitaly::UserCommitFilesAction.new(header: action_header)
)
reader = binary_stringio(action[:content])
until reader.eof?
chunk = reader.read(MAX_MSG_SIZE)
y.yield Gitaly::UserCommitFilesRequest.new(
action: Gitaly::UserCommitFilesAction.new(content: chunk)
)
end
end
end
response = GitalyClient.call(@repository.storage, :operation_service,
:user_commit_files, req_enum, remote_storage: start_repository.storage)
if (pre_receive_error = response.pre_receive_error.presence)
raise Gitlab::Git::HooksService::PreReceiveError, pre_receive_error
end
if (index_error = response.index_error.presence)
raise Gitlab::Git::Index::IndexError, index_error
end
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(response.branch_update)
end
private
def call_cherry_pick_or_revert(rpc, user:, commit:, branch_name:, message:, start_branch_name:, start_repository:)
......@@ -212,6 +257,33 @@ module Gitlab
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(response.branch_update)
end
end
def user_commit_files_request_header(
user, branch_name, commit_message, actions, author_email, author_name,
start_branch_name, start_repository)
Gitaly::UserCommitFilesRequestHeader.new(
repository: @gitaly_repo,
user: Gitlab::Git::User.from_gitlab(user).to_gitaly,
branch_name: encode_binary(branch_name),
commit_message: encode_binary(commit_message),
commit_author_name: encode_binary(author_name),
commit_author_email: encode_binary(author_email),
start_branch_name: encode_binary(start_branch_name),
start_repository: start_repository.gitaly_repository
)
end
def user_commit_files_action_header(action)
Gitaly::UserCommitFilesActionHeader.new(
action: action[:action].upcase.to_sym,
file_path: encode_binary(action[:file_path]),
previous_path: encode_binary(action[:previous_path]),
base64_content: action[:encoding] == 'base64'
)
rescue RangeError
raise ArgumentError, "Unknown action '#{action[:action]}'"
end
end
end
end
# coding: utf-8
module Gitlab
module Profiler
FILTERED_STRING = '[FILTERED]'.freeze
IGNORE_BACKTRACES = %w[
lib/gitlab/i18n.rb
lib/gitlab/request_context.rb
config/initializers
lib/gitlab/database/load_balancing/
lib/gitlab/etag_caching/
lib/gitlab/metrics/
lib/gitlab/middleware/
lib/gitlab/performance_bar/
lib/gitlab/request_profiler/
lib/gitlab/profiler.rb
].freeze
# Takes a URL to profile (can be a fully-qualified URL, or an absolute path)
# and returns the ruby-prof profile result. Formatting that result is the
# caller's responsibility. Requests are GET requests unless post_data is
# passed.
#
# Optional arguments:
# - logger: will be used for SQL logging, including a summary at the end of
# the log file of the total time spent per model class.
#
# - post_data: a string of raw POST data to use. Changes the HTTP verb to
# POST.
#
# - user: a user to authenticate as. Only works if the user has a valid
# personal access token.
#
# - private_token: instead of providing a user instance, the token can be
# given as a string. Takes precedence over the user option.
def self.profile(url, logger: nil, post_data: nil, user: nil, private_token: nil)
app = ActionDispatch::Integration::Session.new(Rails.application)
verb = :get
headers = {}
if post_data
verb = :post
headers['Content-Type'] = 'application/json'
end
if user
private_token ||= user.personal_access_tokens.active.pluck(:token).first
end
headers['Private-Token'] = private_token if private_token
logger = create_custom_logger(logger, private_token: private_token)
RequestStore.begin!
# Make an initial call for an asset path in development mode to avoid
# sprockets dominating the profiler output.
ActionController::Base.helpers.asset_path('katex.css') if Rails.env.development?
# Rails loads internationalization files lazily the first time a
# translation is needed. Running this prevents this overhead from showing
# up in profiles.
::I18n.t('.')[:test_string]
# Remove API route mounting from the profile.
app.get('/api/v4/users')
result = with_custom_logger(logger) do
RubyProf.profile { app.public_send(verb, url, post_data, headers) } # rubocop:disable GitlabSecurity/PublicSend
end
RequestStore.end!
log_load_times_by_model(logger)
result
end
def self.create_custom_logger(logger, private_token: nil)
return unless logger
logger.dup.tap do |new_logger|
new_logger.instance_variable_set(:@private_token, private_token)
class << new_logger
attr_reader :load_times_by_model, :private_token
def debug(message, *)
message.gsub!(private_token, FILTERED_STRING) if private_token
_, type, time = *message.match(/(\w+) Load \(([0-9.]+)ms\)/)
if type && time
@load_times_by_model ||= {}
@load_times_by_model[type] ||= 0
@load_times_by_model[type] += time.to_f
end
super
backtrace = Rails.backtrace_cleaner.clean(caller)
backtrace.each do |caller_line|
next if caller_line.match(Regexp.union(IGNORE_BACKTRACES))
stripped_caller_line = caller_line.sub("#{Rails.root}/", '')
super(" ↳ #{stripped_caller_line}")
end
end
end
end
end
def self.with_custom_logger(logger)
original_colorize_logging = ActiveSupport::LogSubscriber.colorize_logging
original_activerecord_logger = ActiveRecord::Base.logger
original_actioncontroller_logger = ActionController::Base.logger
if logger
ActiveSupport::LogSubscriber.colorize_logging = false
ActiveRecord::Base.logger = logger
ActionController::Base.logger = logger
end
result = yield
ActiveSupport::LogSubscriber.colorize_logging = original_colorize_logging
ActiveRecord::Base.logger = original_activerecord_logger
ActionController::Base.logger = original_actioncontroller_logger
result
end
def self.log_load_times_by_model(logger)
return unless logger.respond_to?(:load_times_by_model)
logger.load_times_by_model.to_a.sort_by(&:last).reverse.each do |(model, time)|
logger.info("#{model} total: #{time.round(2)}ms")
end
end
end
end
......@@ -34,7 +34,10 @@ module Gitlab
feature_enabled = case action.to_s
when 'git_receive_pack'
Gitlab::GitalyClient.feature_enabled?(:post_receive_pack)
Gitlab::GitalyClient.feature_enabled?(
:post_receive_pack,
status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT
)
when 'git_upload_pack'
true
when 'info_refs'
......
......@@ -6,3 +6,4 @@ gem 'capybara-screenshot', '~> 1.0.18'
gem 'rake', '~> 12.3.0'
gem 'rspec', '~> 3.7'
gem 'selenium-webdriver', '~> 3.8.0'
gem 'airborne', '~> 0.2.13'
GEM
remote: https://rubygems.org/
specs:
activesupport (5.1.4)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (~> 0.7)
minitest (~> 5.1)
tzinfo (~> 1.1)
addressable (2.5.2)
public_suffix (>= 2.0.2, < 4.0)
airborne (0.2.13)
activesupport
rack
rack-test (~> 0.6, >= 0.6.2)
rest-client (>= 1.7.3, < 3.0)
rspec (~> 3.1)
byebug (9.1.0)
capybara (2.16.1)
addressable
......@@ -17,13 +28,25 @@ GEM
childprocess (0.8.0)
ffi (~> 1.0, >= 1.0.11)
coderay (1.1.2)
concurrent-ruby (1.0.5)
diff-lcs (1.3)
domain_name (0.5.20170404)
unf (>= 0.0.5, < 1.0.0)
ffi (1.9.18)
http-cookie (1.0.3)
domain_name (~> 0.5)
i18n (0.9.1)
concurrent-ruby (~> 1.0)
launchy (2.4.3)
addressable (~> 2.3)
method_source (0.9.0)
mime-types (3.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2016.0521)
mini_mime (1.0.0)
mini_portile2 (2.3.0)
minitest (5.11.1)
netrc (0.11.0)
nokogiri (1.8.1)
mini_portile2 (~> 2.3.0)
pry (0.11.3)
......@@ -37,11 +60,15 @@ GEM
rack-test (0.8.2)
rack (>= 1.0, < 3)
rake (12.3.0)
rest-client (2.0.2)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
rspec (3.7.0)
rspec-core (~> 3.7.0)
rspec-expectations (~> 3.7.0)
rspec-mocks (~> 3.7.0)
rspec-core (3.7.0)
rspec-core (3.7.1)
rspec-support (~> 3.7.0)
rspec-expectations (3.7.0)
diff-lcs (>= 1.2.0, < 2.0)
......@@ -54,6 +81,12 @@ GEM
selenium-webdriver (3.8.0)
childprocess (~> 0.5)
rubyzip (~> 1.0)
thread_safe (0.3.6)
tzinfo (1.2.4)
thread_safe (~> 0.1)
unf (0.1.4)
unf_ext
unf_ext (0.0.7.4)
xpath (2.1.0)
nokogiri (~> 1.3)
......@@ -61,6 +94,7 @@ PLATFORMS
ruby
DEPENDENCIES
airborne (~> 0.2.13)
capybara (~> 2.16.1)
capybara-screenshot (~> 1.0.18)
pry-byebug (~> 3.5.1)
......@@ -69,4 +103,4 @@ DEPENDENCIES
selenium-webdriver (~> 3.8.0)
BUNDLED WITH
1.16.0
1.16.1
......@@ -11,6 +11,8 @@ module QA
autoload :Scenario, 'qa/runtime/scenario'
autoload :Browser, 'qa/runtime/browser'
autoload :Env, 'qa/runtime/env'
autoload :Address, 'qa/runtime/address'
autoload :API, 'qa/runtime/api'
end
##
......@@ -26,6 +28,7 @@ module QA
autoload :Group, 'qa/factory/resource/group'
autoload :Project, 'qa/factory/resource/project'
autoload :DeployKey, 'qa/factory/resource/deploy_key'
autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token'
end
module Repository
......@@ -85,6 +88,7 @@ module QA
autoload :Main, 'qa/page/menu/main'
autoload :Side, 'qa/page/menu/side'
autoload :Admin, 'qa/page/menu/admin'
autoload :Profile, 'qa/page/menu/profile'
end
module Dashboard
......@@ -108,6 +112,10 @@ module QA
end
end
module Profile
autoload :PersonalAccessTokens, 'qa/page/profile/personal_access_tokens'
end
module Admin
autoload :Settings, 'qa/page/admin/settings'
end
......
module QA
module Factory
module Resource
##
# Create a personal access token that can be used by the api
#
class PersonalAccessToken < Factory::Base
attr_accessor :name
product :access_token do
Page::Profile::PersonalAccessTokens.act { created_access_token }
end
def fabricate!
Page::Menu::Main.act { go_to_profile_settings }
Page::Menu::Profile.act { click_access_tokens }
Page::Profile::PersonalAccessTokens.perform do |page|
page.fill_token_name(name || 'api-test-token')
page.check_api
page.create_token
end
end
end
end
end
end
......@@ -77,7 +77,7 @@ module Page
view 'app/views/devise/sessions/_new_base.html.haml' do
element :login_field, 'text_field :login'
element :passowrd_field, 'password_field :password'
element :password_field, 'password_field :password'
element :sign_in_button, 'submit "Sign in"'
end
......@@ -103,6 +103,16 @@ view 'app/views/my/view.html.haml' do
end
```
## Running the test locally
During development, you can run the `qa:selectors` test by running
```shell
bin/qa Test::Sanity::Selectors
```
from within the `qa` directory.
## Where to ask for help?
If you need more information, ask for help on `#qa` channel on Slack (GitLab
......
......@@ -7,6 +7,7 @@ module QA
element :user_avatar
element :user_menu, '.dropdown-menu-nav'
element :user_sign_out_link, 'link_to "Sign out"'
element :settings_link, 'link_to "Settings"'
end
view 'app/views/layouts/nav/_dashboard.html.haml' do
......@@ -40,7 +41,13 @@ module QA
def sign_out
within_user_menu do
click_link('Sign out')
click_link 'Sign out'
end
end
def go_to_profile_settings
within_user_menu do
click_link 'Settings'
end
end
......
module QA
module Page
module Menu
class Profile < Page::Base
view 'app/views/layouts/nav/sidebar/_profile.html.haml' do
element :access_token_link, 'link_to profile_personal_access_tokens_path'
element :access_token_title, 'Access Tokens'
element :top_level_items, '.sidebar-top-level-items'
end
def click_access_tokens
within_sidebar do
click_link('Access Tokens')
end
end
private
def within_sidebar
page.within('.sidebar-top-level-items') do
yield
end
end
end
end
end
end
module QA
module Page
module Profile
class PersonalAccessTokens < Page::Base
view 'app/views/shared/_personal_access_tokens_form.html.haml' do
element :personal_access_token_name_field, 'text_field :name'
element :create_token_button, 'submit "Create #{type} token"' # rubocop:disable Lint/InterpolationCheck
element :scopes_api_radios, "label :scopes"
end
view 'app/views/profiles/personal_access_tokens/index.html.haml' do
element :create_token_field, "text_field_tag 'created-personal-access-token'"
end
def fill_token_name(name)
fill_in 'personal_access_token_name', with: name
end
def check_api
check 'personal_access_token_scopes_api'
end
def create_token
click_on 'Create personal access token'
end
def created_access_token
page.find('#created-personal-access-token').value
end
end
end
end
end
module QA
module Runtime
class Address
attr_reader :address
def initialize(instance, page = nil)
@instance = instance
@address = host + (page.is_a?(String) ? page : page&.path)
end
def host
if @instance.is_a?(Symbol)
Runtime::Scenario.send("#{@instance}_address")
else
@instance.to_s
end
end
end
end
end
require 'airborne'
module QA
module Runtime
module API
class Client
attr_reader :address
def initialize(address = :gitlab)
@address = address
end
def personal_access_token
@personal_access_token ||= get_personal_access_token
end
def get_personal_access_token
# you can set the environment variable PERSONAL_ACCESS_TOKEN
# to use a specific access token rather than create one from the UI
if Runtime::Env.personal_access_token
Runtime::Env.personal_access_token
else
create_personal_access_token
end
end
private
def create_personal_access_token
Runtime::Browser.visit(@address, Page::Main::Login) do
Page::Main::Login.act { sign_in_using_credentials }
Factory::Resource::PersonalAccessToken.fabricate!.access_token
end
end
end
class Request
API_VERSION = 'v4'.freeze
def initialize(api_client, path, personal_access_token: nil)
personal_access_token ||= api_client.personal_access_token
request_path = request_path(path, personal_access_token: personal_access_token)
@session_address = Runtime::Address.new(api_client.address, request_path)
end
def url
@session_address.address
end
# Prepend a request path with the path to the API
#
# path - Path to append
#
# Examples
#
# >> request_path('/issues')
# => "/api/v4/issues"
#
# >> request_path('/issues', personal_access_token: 'sometoken)
# => "/api/v4/issues?private_token=..."
#
# Returns the relative path to the requested API resource
def request_path(path, version: API_VERSION, personal_access_token: nil, oauth_access_token: nil)
full_path = File.join('/api', version, path)
if oauth_access_token
query_string = "access_token=#{oauth_access_token}"
elsif personal_access_token
query_string = "private_token=#{personal_access_token}"
end
if query_string
full_path << (path.include?('?') ? '&' : '?')
full_path << query_string
end
full_path
end
end
end
end
end
......@@ -24,9 +24,7 @@ module QA
# based on `Runtime::Scenario#something_address`.
#
def visit(address, page, &block)
Browser::Session.new(address, page).tap do |session|
session.perform(&block)
end
Browser::Session.new(address, page).perform(&block)
end
def self.visit(address, page, &block)
......@@ -94,20 +92,15 @@ module QA
include Capybara::DSL
def initialize(instance, page = nil)
@instance = instance
@address = host + page&.path
@session_address = Runtime::Address.new(instance, page)
end
def host
if @instance.is_a?(Symbol)
Runtime::Scenario.send("#{@instance}_address")
else
@instance.to_s
end
def url
@session_address.address
end
def perform(&block)
visit(@address)
visit(url)
yield if block_given?
rescue
......@@ -130,7 +123,7 @@ module QA
# See gitlab-org/gitlab-qa#102
#
def clear!
visit(@address)
visit(url)
reset_session!
end
end
......
......@@ -3,6 +3,7 @@ module QA
module Env
extend self
# set to 'false' to have Chrome run visibly instead of headless
def chrome_headless?
(ENV['CHROME_HEADLESS'] =~ /^(false|no|0)$/i) != 0
end
......@@ -10,6 +11,11 @@ module QA
def running_in_ci?
ENV['CI'] || ENV['CI_SERVER']
end
# specifies token that can be used for the api
def personal_access_token
ENV['PERSONAL_ACCESS_TOKEN']
end
end
end
end
module QA
feature 'API users', :core do
before(:context) do
@api_client = Runtime::API::Client.new(:gitlab)
end
context 'when authenticated' do
let(:request) { Runtime::API::Request.new(@api_client, '/users') }
scenario 'get list of users' do
get request.url
expect_status(200)
end
scenario 'submit request with a valid user name' do
get request.url, { params: { username: 'root' } }
expect_status(200)
expect(json_body).to be_an Array
expect(json_body.size).to eq(1)
expect(json_body.first[:username]).to eq Runtime::User.name
end
scenario 'submit request with an invalid user name' do
get request.url, { params: { username: 'invalid' } }
expect_status(200)
expect(json_body).to be_an Array
expect(json_body.size).to eq(0)
end
end
scenario 'submit request with an invalid token' do
request = Runtime::API::Request.new(@api_client, '/users', personal_access_token: 'invalid')
get request.url
expect_status(401)
end
end
end
describe QA::Runtime::API::Client do
include Support::StubENV
describe 'initialization' do
it 'defaults to :gitlab address' do
expect(described_class.new.address).to eq :gitlab
end
it 'uses specified address' do
client = described_class.new('http:///example.com')
expect(client.address).to eq 'http:///example.com'
end
end
describe '#get_personal_access_token' do
it 'returns specified token from env' do
stub_env('PERSONAL_ACCESS_TOKEN', 'a_token')
expect(described_class.new.get_personal_access_token).to eq 'a_token'
end
it 'returns a created token' do
allow_any_instance_of(described_class)
.to receive(:create_personal_access_token).and_return('created_token')
expect(described_class.new.get_personal_access_token).to eq 'created_token'
end
end
end
describe QA::Runtime::API::Request do
include Support::StubENV
before do
stub_env('PERSONAL_ACCESS_TOKEN', 'a_token')
end
let(:client) { QA::Runtime::API::Client.new('http://example.com') }
let(:request) { described_class.new(client, '/users') }
describe '#url' do
it 'returns the full api request url' do
expect(request.url).to eq 'http://example.com/api/v4/users?private_token=a_token'
end
end
describe '#request_path' do
it 'prepends the api path' do
expect(request.request_path('/users')).to eq '/api/v4/users'
end
it 'adds the personal access token' do
expect(request.request_path('/users', personal_access_token: 'token'))
.to eq '/api/v4/users?private_token=token'
end
it 'adds the oauth access token' do
expect(request.request_path('/users', oauth_access_token: 'otoken'))
.to eq '/api/v4/users?access_token=otoken'
end
it 'respects query parameters' do
expect(request.request_path('/users?page=1')).to eq '/api/v4/users?page=1'
expect(request.request_path('/users?page=1', personal_access_token: 'token'))
.to eq '/api/v4/users?page=1&private_token=token'
end
it 'uses a different api version' do
expect(request.request_path('/users', version: 'v3')).to eq '/api/v3/users'
end
end
end
describe QA::Runtime::Env do
before do
allow(ENV).to receive(:[]).and_call_original
end
include Support::StubENV
describe '.chrome_headless?' do
context 'when there is an env variable set' do
......@@ -57,8 +55,4 @@ describe QA::Runtime::Env do
end
end
end
def stub_env(name, value)
allow(ENV).to receive(:[]).with(name).and_return(value)
end
end
require_relative '../qa'
Dir[File.join(__dir__, 'support', '**', '*.rb')].each { |f| require f }
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
......
# Inspired by https://github.com/ljkbennett/stub_env/blob/master/lib/stub_env/helpers.rb
module Support
module StubENV
def stub_env(key_or_hash, value = nil)
init_stub unless env_stubbed?
if key_or_hash.is_a? Hash
key_or_hash.each { |k, v| add_stubbed_value(k, v) }
else
add_stubbed_value key_or_hash, value
end
end
private
STUBBED_KEY = '__STUBBED__'.freeze
def add_stubbed_value(key, value)
allow(ENV).to receive(:[]).with(key).and_return(value)
allow(ENV).to receive(:key?).with(key).and_return(true)
allow(ENV).to receive(:fetch).with(key).and_return(value)
allow(ENV).to receive(:fetch).with(key, anything()) do |_, default_val|
value || default_val
end
end
def env_stubbed?
ENV[STUBBED_KEY]
end
def init_stub
allow(ENV).to receive(:[]).and_call_original
allow(ENV).to receive(:key?).and_call_original
allow(ENV).to receive(:fetch).and_call_original
add_stubbed_value(STUBBED_KEY, true)
end
end
end
......@@ -20,4 +20,24 @@ describe Dashboard::GroupsController do
expect(assigns(:groups)).to contain_exactly(member_of_group)
end
context 'when rendering an expanded hierarchy with public groups you are not a member of', :nested_groups do
let!(:top_level_result) { create(:group, name: 'chef-top') }
let!(:top_level_a) { create(:group, name: 'top-a') }
let!(:sub_level_result_a) { create(:group, name: 'chef-sub-a', parent: top_level_a) }
let!(:other_group) { create(:group, name: 'other') }
before do
top_level_result.add_master(user)
top_level_a.add_master(user)
end
it 'renders only groups the user is a member of when searching hierarchy correctly' do
get :index, filter: 'chef', format: :json
expect(response).to have_gitlab_http_status(200)
all_groups = [top_level_result, top_level_a, sub_level_result_a]
expect(assigns(:groups)).to contain_exactly(*all_groups)
end
end
end
......@@ -160,6 +160,30 @@ describe Groups::ChildrenController do
expect(json_response).to eq([])
end
it 'succeeds if multiple pages contain matching subgroups' do
create(:group, parent: group, name: 'subgroup-filter-1')
create(:group, parent: group, name: 'subgroup-filter-2')
# Creating the group-to-nest first so it would be loaded into the
# relation first before it's parents, this is what would cause the
# crash in: https://gitlab.com/gitlab-org/gitlab-ce/issues/40785.
#
# If we create the parent groups first, those would be loaded into the
# collection first, and the pagination would cut off the actual search
# result. In this case the hierarchy can be rendered without crashing,
# it's just incomplete.
group_to_nest = create(:group, parent: group, name: 'subsubgroup-filter-3')
subgroup = create(:group, parent: group)
3.times do |i|
subgroup = create(:group, parent: subgroup)
end
group_to_nest.update!(parent: subgroup)
get :index, group_id: group.to_param, filter: 'filter', per_page: 3, format: :json
expect(response).to have_gitlab_http_status(200)
end
it 'includes pagination headers' do
2.times { |i| create(:group, :public, parent: public_subgroup, name: "filterme#{i}") }
......
require 'spec_helper'
describe Projects::AvatarsController do
let(:project) { create(:project, avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")) }
let(:project) { create(:project, :repository, avatar: fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "image/png")) }
let(:user) { create(:user) }
before do
......@@ -10,6 +10,12 @@ describe Projects::AvatarsController do
controller.instance_variable_set(:@project, project)
end
it 'GET #show' do
get :show, namespace_id: project.namespace.id, project_id: project.id
expect(response).to have_gitlab_http_status(404)
end
it 'removes avatar from DB by calling destroy' do
delete :destroy, namespace_id: project.namespace.id, project_id: project.id
expect(project.avatar.present?).to be_falsey
......
......@@ -41,15 +41,21 @@ describe Projects::CommitsController do
context "when the ref name ends in .atom" do
context "when the ref does not exist with the suffix" do
it "renders as atom" do
before do
get(:show,
namespace_id: project.namespace,
project_id: project,
id: "master.atom")
end
it "renders as atom" do
expect(response).to be_success
expect(response.content_type).to eq('application/atom+xml')
end
it 'renders summary with type=html' do
expect(response.body).to include('<summary type="html">')
end
end
context "when the ref exists with the suffix" do
......
......@@ -69,6 +69,50 @@ describe Gitlab::LDAP::Person do
end
end
describe '.ldap_attributes' do
def stub_sync_ssh_keys(value)
stub_ldap_config(
options: {
'uid' => nil,
'attributes' => {
'name' => 'cn',
'email' => 'mail',
'username' => %w(uid mail memberof)
},
'sync_ssh_keys' => value
}
)
end
let(:config) { Gitlab::LDAP::Config.new('ldapmain') }
let(:ldap_attributes) { described_class.ldap_attributes(config) }
let(:expected_attributes) { %w(dn cn uid mail memberof) }
it 'includes a real attribute name' do
stub_sync_ssh_keys('my-ssh-attribute')
expect(ldap_attributes).to match_array(expected_attributes + ['my-ssh-attribute'])
end
it 'excludes integers' do
stub_sync_ssh_keys(0)
expect(ldap_attributes).to match_array(expected_attributes)
end
it 'excludes false values' do
stub_sync_ssh_keys(false)
expect(ldap_attributes).to match_array(expected_attributes)
end
it 'excludes true values' do
stub_sync_ssh_keys(true)
expect(ldap_attributes).to match_array(expected_attributes)
end
end
describe '#kerberos_principal' do
let(:entry) do
ldif = "dn: cn=foo, dc=bar, dc=com\nsAMAccountName: myName\n"
......
......@@ -49,6 +49,24 @@ describe PushRule do
end
end
methods_and_regexes = {
commit_message_allowed?: :commit_message_regex,
branch_name_allowed?: :branch_name_regex,
author_email_allowed?: :author_email_regex,
filename_blacklisted?: :file_name_regex
}
methods_and_regexes.each do |method_name, regex_attr|
describe "##{method_name}" do
it 'raises a MatchError when the regex is invalid' do
push_rule[regex_attr] = '+'
expect { push_rule.public_send(method_name, 'foo') } # rubocop:disable GitlabSecurity/PublicSend
.to raise_error(PushRule::MatchError, /\ARegular expression '\+' is invalid/)
end
end
end
describe '#commit_signature_allowed?' do
let!(:premium_license) { create(:license, plan: License::PREMIUM_PLAN) }
let(:signed_commit) { double(has_signature?: true) }
......
......@@ -145,6 +145,16 @@ describe Geo::RepositorySyncService do
expect(registry.reload.repository_retry_count).to be_nil
expect(registry.repository_retry_at).to be_nil
end
context 'with non empty repositories' do
let(:project) { create(:project, :repository) }
it 'syncs gitattributes to info/attributes' do
expect(repository).to receive(:copy_gitattributes)
subject.execute
end
end
end
context 'when repository sync fail' do
......
......@@ -395,12 +395,12 @@ feature 'Jobs' do
expect(page).to have_link('Trigger this manual action')
end
it 'plays manual action', :js do
it 'plays manual action and shows pending status', :js do
click_link 'Trigger this manual action'
wait_for_requests
expect(page).to have_content('This job has not been triggered')
expect(page).to have_content('This job is stuck, because the project doesn\'t have any runners online assigned to it.')
expect(page).to have_content('This job has not started yet')
expect(page).to have_content('This job is in pending state and is waiting to be picked by a runner')
expect(page).to have_content('pending')
end
end
......@@ -414,6 +414,20 @@ feature 'Jobs' do
it 'shows empty state' do
expect(page).to have_content('This job has not been triggered yet')
expect(page).to have_content('This job depends on upstream jobs that need to succeed in order for this job to be triggered')
end
end
context 'Pending job' do
let(:job) { create(:ci_build, :pending, pipeline: pipeline) }
before do
visit project_job_path(project, job)
end
it 'shows pending empty state' do
expect(page).to have_content('This job has not started yet')
expect(page).to have_content('This job is in pending state and is waiting to be picked by a runner')
end
end
end
......
......@@ -35,6 +35,15 @@ describe GroupDescendantsFinder do
expect(finder.execute).to contain_exactly(project)
end
it 'does not include projects shared with the group' do
project = create(:project, namespace: group)
other_project = create(:project)
other_project.project_group_links.create(group: group,
group_access: ProjectGroupLink::MASTER)
expect(finder.execute).to contain_exactly(project)
end
context 'when archived is `true`' do
let(:params) { { archived: 'true' } }
......@@ -189,6 +198,17 @@ describe GroupDescendantsFinder do
expect(finder.execute).to contain_exactly(subgroup, matching_project)
end
context 'with a small page size' do
let(:params) { { filter: 'test', per_page: 1 } }
it 'contains all the ancestors of a matching subgroup regardless the page size' do
subgroup = create(:group, :private, parent: group)
matching = create(:group, :private, name: 'testgroup', parent: subgroup)
expect(finder.execute).to contain_exactly(subgroup, matching)
end
end
it 'does not include the parent itself' do
group.update!(name: 'test')
......
require 'spec_helper'
describe ProjectsHelper do
include ProjectForksHelper
describe "#project_status_css_class" do
it "returns appropriate class" do
expect(project_status_css_class("started")).to eq("active")
......@@ -10,9 +12,9 @@ describe ProjectsHelper do
end
describe "can_change_visibility_level?" do
let(:project) { create(:project, :repository) }
let(:project) { create(:project) }
let(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
let(:fork_project) { Projects::ForkService.new(project, user).execute }
let(:forked_project) { fork_project(project, user) }
it "returns false if there are no appropriate permissions" do
allow(helper).to receive(:can?) { false }
......@@ -26,21 +28,29 @@ describe ProjectsHelper do
expect(helper.can_change_visibility_level?(project, user)).to be_truthy
end
it 'allows visibility level to be changed if the project is forked' do
allow(helper).to receive(:can?).with(user, :change_visibility_level, project) { true }
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
fork_project(project)
expect(helper.can_change_visibility_level?(project, user)).to be_truthy
end
context "forks" do
it "returns false if there are permissions and origin project is PRIVATE" do
allow(helper).to receive(:can?) { true }
project.update visibility_level: Gitlab::VisibilityLevel::PRIVATE
project.update(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
expect(helper.can_change_visibility_level?(fork_project, user)).to be_falsey
expect(helper.can_change_visibility_level?(forked_project, user)).to be_falsey
end
it "returns true if there are permissions and origin project is INTERNAL" do
allow(helper).to receive(:can?) { true }
project.update visibility_level: Gitlab::VisibilityLevel::INTERNAL
project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
expect(helper.can_change_visibility_level?(fork_project, user)).to be_truthy
expect(helper.can_change_visibility_level?(forked_project, user)).to be_truthy
end
end
end
......
......@@ -45,7 +45,7 @@ describe('iPython notebook renderer', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('does not show loading icon', () => {
......@@ -96,7 +96,7 @@ describe('iPython notebook renderer', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('does not show loading icon', () => {
......@@ -127,7 +127,7 @@ describe('iPython notebook renderer', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('does not show loading icon', () => {
......
......@@ -55,7 +55,7 @@ describe('Board card', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('returns false when detailIssue is empty', () => {
......
......@@ -60,7 +60,7 @@ describe('Board list component', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('renders component', () => {
......
......@@ -58,7 +58,7 @@ describe('Issue boards new issue form', () => {
afterEach(() => {
vm.$destroy();
mock.reset();
mock.restore();
});
it('calls submit if submit button is clicked', (done) => {
......
......@@ -34,7 +34,7 @@ describe('Store', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('starts with a blank state', () => {
......
......@@ -30,7 +30,7 @@ describe('List model', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('gets issues when created', (done) => {
......
......@@ -78,7 +78,7 @@ describe('EpicShowApp', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render epic-header', () => {
......
......@@ -167,30 +167,26 @@ describe('Fly out sidebar navigation', () => {
describe('mouseEnterTopItems', () => {
beforeEach(() => {
jasmine.clock().install();
el.innerHTML = '<div class="sidebar-sub-level-items" style="position: absolute; top: 0; left: 100px; height: 200px;"></div>';
});
afterEach(() => {
jasmine.clock().uninstall();
});
it('shows sub-items after 0ms if no menu is open', () => {
it('shows sub-items after 0ms if no menu is open', (done) => {
mouseEnterTopItems(el);
expect(
getHideSubItemsInterval(),
).toBe(0);
jasmine.clock().tick(0);
setTimeout(() => {
expect(
el.querySelector('.sidebar-sub-level-items').style.display,
).toBe('block');
expect(
el.querySelector('.sidebar-sub-level-items').style.display,
).toBe('block');
done();
});
});
it('shows sub-items after 300ms if a menu is currently open', () => {
it('shows sub-items after 300ms if a menu is currently open', (done) => {
documentMouseMove({
clientX: el.getBoundingClientRect().left,
clientY: el.getBoundingClientRect().top,
......@@ -203,17 +199,19 @@ describe('Fly out sidebar navigation', () => {
clientY: el.getBoundingClientRect().top + 10,
});
mouseEnterTopItems(el);
mouseEnterTopItems(el, 0);
expect(
getHideSubItemsInterval(),
).toBe(300);
jasmine.clock().tick(300);
setTimeout(() => {
expect(
el.querySelector('.sidebar-sub-level-items').style.display,
).toBe('block');
expect(
el.querySelector('.sidebar-sub-level-items').style.display,
).toBe('block');
done();
});
});
});
......
......@@ -3,7 +3,7 @@
import './class_spec_helper';
describe('ClassSpecHelper', () => {
describe('itShouldBeAStaticMethod', function () {
describe('itShouldBeAStaticMethod', () => {
beforeEach(() => {
class TestClass {
instanceMethod() { this.prop = 'val'; }
......@@ -14,23 +14,5 @@ describe('ClassSpecHelper', () => {
});
ClassSpecHelper.itShouldBeAStaticMethod(ClassSpecHelper, 'itShouldBeAStaticMethod');
it('should have a defined spec', () => {
expect(ClassSpecHelper.itShouldBeAStaticMethod(this.TestClass, 'staticMethod').description).toBe('should be a static method');
});
it('should pass for a static method', () => {
const spec = ClassSpecHelper.itShouldBeAStaticMethod(this.TestClass, 'staticMethod');
expect(spec.status()).toBe('passed');
});
it('should fail for an instance method', (done) => {
const spec = ClassSpecHelper.itShouldBeAStaticMethod(this.TestClass, 'instanceMethod');
spec.resultCallback = (result) => {
expect(result.status).toBe('failed');
done();
};
spec.execute();
});
});
});
......@@ -59,7 +59,7 @@ describe('Issuable output', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
realtimeRequestCount = 0;
vm.poll.stop();
......
......@@ -38,7 +38,7 @@ describe('Dashboard', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('shows up a loading state', (done) => {
......
......@@ -64,7 +64,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -87,7 +87,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -110,7 +110,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render error indicator', (done) => {
......@@ -158,7 +158,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -209,7 +209,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -233,7 +233,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render error indicator', (done) => {
......@@ -279,7 +279,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -331,7 +331,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -355,7 +355,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render error indicator', (done) => {
......@@ -401,7 +401,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -440,7 +440,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render error indicator', (done) => {
......@@ -487,7 +487,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render provided data', (done) => {
......@@ -518,7 +518,7 @@ describe('ee merge request widget options', () => {
});
afterEach(() => {
mock.reset();
mock.restore();
});
it('should render error indicator', (done) => {
......
......@@ -25,7 +25,7 @@ describe('Modal', () => {
});
describe('with id', () => {
it('does not render a primary button', () => {
describe('does not render a primary button', () => {
beforeEach(() => {
vm = mountComponent(modalComponent, {
id: 'my-modal',
......
......@@ -21,22 +21,21 @@ describe('collapsedGroupedDatePicker', () => {
});
});
it('toggleCollapse events', () => {
const toggleCollapse = jasmine.createSpy();
describe('toggleCollapse events', () => {
beforeEach((done) => {
spyOn(vm, 'toggleSidebar');
vm.minDate = new Date('07/17/2016');
Vue.nextTick(done);
});
it('should emit when sidebar is toggled', () => {
vm.$el.querySelector('.gutter-toggle').click();
expect(toggleCollapse).toHaveBeenCalled();
expect(vm.toggleSidebar).toHaveBeenCalled();
});
it('should emit when collapsed-calendar-icon is clicked', () => {
vm.$el.querySelector('.sidebar-collapsed-icon').click();
expect(toggleCollapse).toHaveBeenCalled();
expect(vm.toggleSidebar).toHaveBeenCalled();
});
});
......
require 'spec_helper'
describe Gitlab::Checks::ChangeAccess do
describe '#exec' do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:user_access) { Gitlab::UserAccess.new(user, project: project) }
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51' }
let(:ref) { 'refs/heads/master' }
let(:changes) { { oldrev: oldrev, newrev: newrev, ref: ref } }
let(:protocol) { 'ssh' }
subject(:change_access) do
described_class.new(
changes,
project: project,
user_access: user_access,
protocol: protocol
)
end
before do
project.add_developer(user)
end
context 'push rules checks' do
shared_examples 'check ignored when push rule unlicensed' do
before do
stub_licensed_features(push_rules: false)
end
it { is_expected.to be_truthy }
end
let(:project) { create(:project, :public, :repository, push_rule: push_rule) }
before do
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
)
end
context 'tag deletion' do
let(:push_rule) { create(:push_rule, deny_delete_tag: true) }
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
let(:ref) { 'refs/tags/v1.0.0' }
before do
project.add_master(user)
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the rule denies tag deletion' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You cannot delete a tag')
end
context 'when tag is deleted in web UI' do
let(:protocol) { 'web' }
it 'ignores the push rule' do
expect(subject.exec).to be_truthy
end
end
end
context 'commit message rules' do
let!(:push_rule) { create(:push_rule, :commit_message) }
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the rule fails' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Commit message does not follow the pattern '#{push_rule.commit_message_regex}'")
end
it 'returns an error if the regex is invalid' do
push_rule.commit_message_regex = '+'
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /\ARegular expression '\+' is invalid/)
end
end
context 'author email rules' do
let!(:push_rule) { create(:push_rule, author_email_regex: '.*@valid.com') }
before do
allow_any_instance_of(Commit).to receive(:committer_email).and_return('mike@valid.com')
allow_any_instance_of(Commit).to receive(:author_email).and_return('mike@valid.com')
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the rule fails for the committer' do
allow_any_instance_of(Commit).to receive(:committer_email).and_return('ana@invalid.com')
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Committer's email 'ana@invalid.com' does not follow the pattern '.*@valid.com'")
end
it 'returns an error if the rule fails for the author' do
allow_any_instance_of(Commit).to receive(:author_email).and_return('joan@invalid.com')
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Author's email 'joan@invalid.com' does not follow the pattern '.*@valid.com'")
end
it 'returns an error if the regex is invalid' do
push_rule.author_email_regex = '+'
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /\ARegular expression '\+' is invalid/)
end
end
context 'branch name rules' do
let!(:push_rule) { create(:push_rule, branch_name_regex: '^(w*)$') }
let(:ref) { 'refs/heads/a-branch-that-is-not-allowed' }
it_behaves_like 'check ignored when push rule unlicensed'
it 'rejects the branch that is not allowed' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Branch name does not follow the pattern '^(w*)$'")
end
it 'returns an error if the regex is invalid' do
push_rule.branch_name_regex = '+'
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /\ARegular expression '\+' is invalid/)
end
context 'when the ref is not a branch ref' do
let(:ref) { 'a/ref/thats/not/abranch' }
it 'allows the creation' do
expect { subject.exec }.not_to raise_error
end
end
context 'when no commits are present' do
before do
allow(project.repository).to receive(:new_commits) { [] }
end
it 'rejects the branch that is not allowed' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Branch name does not follow the pattern '^(w*)$'")
end
end
context 'when the default branch does not match the push rules' do
let(:push_rule) { create(:push_rule, branch_name_regex: 'not-master') }
let(:ref) { "refs/heads/#{project.default_branch}" }
it 'allows the default branch even if it does not match push rule' do
expect { subject.exec }.not_to raise_error
end
it 'memoizes the validate_path_locks? call' do
expect(project.path_locks).to receive(:any?).once.and_call_original
2.times { subject.exec }
end
end
end
context 'existing member rules' do
let(:push_rule) { create(:push_rule, member_check: true) }
before do
allow(User).to receive(:existing_member?).and_return(false)
allow_any_instance_of(Commit).to receive(:author_email).and_return('some@mail.com')
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the commit author is not a GitLab member' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Author 'some@mail.com' is not a member of team")
end
end
context 'file name rules' do
# Notice that the commit used creates a file named 'README'
context 'file name regex check' do
let!(:push_rule) { create(:push_rule, file_name_regex: 'READ*') }
it_behaves_like 'check ignored when push rule unlicensed'
it "returns an error if a new or renamed filed doesn't match the file name regex" do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "File name README was blacklisted by the pattern READ*.")
end
it 'returns an error if the regex is invalid' do
push_rule.file_name_regex = '+'
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /\ARegular expression '\+' is invalid/)
end
end
context 'blacklisted files check' do
let(:push_rule) { create(:push_rule, prevent_secrets: true) }
it_behaves_like 'check ignored when push rule unlicensed'
it "returns true if there is no blacklisted files" do
new_rev = nil
white_listed =
[
'readme.txt', 'any/ida_rsa.pub', 'any/id_dsa.pub', 'any_2/id_ed25519.pub',
'random_file.pdf', 'folder/id_ecdsa.pub', 'docs/aws/credentials.md', 'ending_withhistory'
]
white_listed.each do |file_path|
old_rev = 'be93687618e4b132087f430a4d8fc3a609c9b77c'
old_rev = new_rev if new_rev
new_rev = project.repository.create_file(user, file_path, "commit #{file_path}", message: "commit #{file_path}", branch_name: "master")
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between(old_rev, new_rev)
)
expect(subject.exec).to be_truthy
end
end
it "returns an error if a new or renamed filed doesn't match the file name regex" do
new_rev = nil
black_listed =
[
'aws/credentials', '.ssh/personal_rsa', 'config/server_rsa', '.ssh/id_rsa', '.ssh/id_dsa',
'.ssh/personal_dsa', 'config/server_ed25519', 'any/id_ed25519', '.ssh/personal_ecdsa', 'config/server_ecdsa',
'any_place/id_ecdsa', 'some_pLace/file.key', 'other_PlAcE/other_file.pem', 'bye_bug.history', 'pg_sql_history'
]
black_listed.each do |file_path|
old_rev = 'be93687618e4b132087f430a4d8fc3a609c9b77c'
old_rev = new_rev if new_rev
new_rev = project.repository.create_file(user, file_path, "commit #{file_path}", message: "commit #{file_path}", branch_name: "master")
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between(old_rev, new_rev)
)
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /File name #{file_path} was blacklisted by the pattern/)
end
end
end
end
context 'max file size rules' do
let(:push_rule) { create(:push_rule, max_file_size: 1) }
before do
allow_any_instance_of(Blob).to receive(:size).and_return(2.megabytes)
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if file exceeds the maximum file size' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "File \"README\" is larger than the allowed size of 1 MB")
end
end
context 'GPG sign rules' do
before do
stub_licensed_features(reject_unsigned_commits: true)
end
let(:push_rule) { create(:push_rule, reject_unsigned_commits: true) }
it_behaves_like 'check ignored when push rule unlicensed'
context 'when it is only enabled in Global settings' do
before do
project.push_rule.update_column(:reject_unsigned_commits, nil)
create(:push_rule_sample, reject_unsigned_commits: true)
end
context 'and commit is not signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(false)
end
it 'returns an error' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Commit must be signed with a GPG key")
end
end
end
context 'when enabled in Project' do
context 'and commit is not signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(false)
end
it 'returns an error' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Commit must be signed with a GPG key")
end
context 'but the change is made in the web application' do
let(:protocol) { 'web' }
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
end
end
context 'and commit is signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(true)
end
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
end
end
context 'when disabled in Project' do
let(:push_rule) { create(:push_rule, reject_unsigned_commits: false) }
context 'and commit is not signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(false)
end
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
end
end
end
end
context 'file lock rules' do
let!(:path_lock) { create(:path_lock, path: 'README', project: project) }
before do
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
)
end
it 'returns an error if the changes update a path locked by another user' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked by #{path_lock.user.name}")
end
end
context 'Check commit author rules' do
before do
stub_licensed_features(commit_committer_check: true)
end
let(:push_rule) { create(:push_rule, commit_committer_check: true) }
let(:project) { create(:project, :public, :repository, push_rule: push_rule) }
context 'with a commit from the authenticated user' do
before do
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
)
allow_any_instance_of(Commit).to receive(:committer_email).and_return(user.email)
end
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
it 'allows the commit when they were done with another email that belongs to the current user' do
user.emails.create(email: 'secondary_email@user.com', confirmed_at: Time.now)
allow_any_instance_of(Commit).to receive(:committer_email).and_return('secondary_email@user.com')
expect { subject.exec }.not_to raise_error
end
it 'raises an error when the commit was done with an unverified email' do
user.emails.create(email: 'secondary_email@user.com')
allow_any_instance_of(Commit).to receive(:committer_email).and_return('secondary_email@user.com')
expect { subject.exec }
.to raise_error(Gitlab::GitAccess::UnauthorizedError,
"Comitter email '%{commiter_email}' is not verified.")
end
it 'raises an error when using an unknown email' do
allow_any_instance_of(Commit).to receive(:committer_email).and_return('some@mail.com')
expect { subject.exec }
.to raise_error(Gitlab::GitAccess::UnauthorizedError,
"You cannot push commits for 'some@mail.com'. You can only push commits that were committed with one of your own verified emails.")
end
end
context 'for an ff merge request' do
# the signed-commits branch fast-forwards onto master
let(:newrev) { '2d1096e3' }
it 'does not raise errors for a fast forward' do
expect(change_access).not_to receive(:committer_check)
expect { subject.exec }.not_to raise_error
end
end
context 'for a normal merge' do
# This creates a merge commit without adding it to a target branch
# that is what the repository would look like during the `pre-receive` hook.
#
# That means only the merge commit should be validated.
let(:newrev) do
rugged = project.repository.raw_repository.rugged
base = oldrev
to_merge = '2d1096e3a0ecf1d2baf6dee036cc80775d4940ba'
merge_index = rugged.merge_commits(base, to_merge)
options = {
parents: [base, to_merge],
tree: merge_index.write_tree(rugged),
message: 'The merge commit',
author: { name: user.name, email: user.email, time: Time.now },
committer: { name: user.name, email: user.email, time: Time.now }
}
Rugged::Commit.create(rugged, options)
end
it 'does not raise errors for a merge commit' do
expect(change_access).to receive(:committer_check).once
.and_call_original
expect { subject.exec }.not_to raise_error
end
end
end
end
end
......@@ -7,6 +7,10 @@ describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsData,
.to receive(:commits_count=).and_return(nil)
end
after do
[MergeRequest, MergeRequestDiff].each(&:reset_column_information)
end
describe '#perform' do
let(:mr_with_event) { create(:merge_request) }
let!(:merged_event) { create(:event, :merged, target: mr_with_event) }
......
......@@ -208,387 +208,5 @@ describe Gitlab::Checks::ChangeAccess do
end
end
end
context 'push rules checks' do
shared_examples 'check ignored when push rule unlicensed' do
before do
stub_licensed_features(push_rules: false)
end
it { is_expected.to be_truthy }
end
let(:project) { create(:project, :public, :repository, push_rule: push_rule) }
before do
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
)
end
context 'tag deletion' do
let(:push_rule) { create(:push_rule, deny_delete_tag: true) }
let(:oldrev) { 'be93687618e4b132087f430a4d8fc3a609c9b77c' }
let(:newrev) { '0000000000000000000000000000000000000000' }
let(:ref) { 'refs/tags/v1.0.0' }
before do
project.add_master(user)
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the rule denies tag deletion' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You cannot delete a tag')
end
context 'when tag is deleted in web UI' do
let(:protocol) { 'web' }
it 'ignores the push rule' do
expect(subject.exec).to be_truthy
end
end
end
context 'commit message rules' do
let(:push_rule) { create(:push_rule, :commit_message) }
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the rule fails' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Commit message does not follow the pattern '#{push_rule.commit_message_regex}'")
end
end
context 'author email rules' do
let(:push_rule) { create(:push_rule, author_email_regex: '.*@valid.com') }
before do
allow_any_instance_of(Commit).to receive(:committer_email).and_return('mike@valid.com')
allow_any_instance_of(Commit).to receive(:author_email).and_return('mike@valid.com')
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the rule fails for the committer' do
allow_any_instance_of(Commit).to receive(:committer_email).and_return('ana@invalid.com')
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Committer's email 'ana@invalid.com' does not follow the pattern '.*@valid.com'")
end
it 'returns an error if the rule fails for the author' do
allow_any_instance_of(Commit).to receive(:author_email).and_return('joan@invalid.com')
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Author's email 'joan@invalid.com' does not follow the pattern '.*@valid.com'")
end
end
context 'branch name rules' do
let(:push_rule) { create(:push_rule, branch_name_regex: '^(w*)$') }
let(:ref) { 'refs/heads/a-branch-that-is-not-allowed' }
it_behaves_like 'check ignored when push rule unlicensed'
it 'rejects the branch that is not allowed' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Branch name does not follow the pattern '^(w*)$'")
end
context 'when the ref is not a branch ref' do
let(:ref) { 'a/ref/thats/not/abranch' }
it 'allows the creation' do
expect { subject.exec }.not_to raise_error
end
end
context 'when no commits are present' do
before do
allow(project.repository).to receive(:new_commits) { [] }
end
it 'rejects the branch that is not allowed' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Branch name does not follow the pattern '^(w*)$'")
end
end
context 'when the default branch does not match the push rules' do
let(:push_rule) { create(:push_rule, branch_name_regex: 'not-master') }
let(:ref) { "refs/heads/#{project.default_branch}" }
it 'allows the default branch even if it does not match push rule' do
expect { subject.exec }.not_to raise_error
end
it 'memoizes the validate_path_locks? call' do
expect(project.path_locks).to receive(:any?).once.and_call_original
2.times { subject.exec }
end
end
end
context 'existing member rules' do
let(:push_rule) { create(:push_rule, member_check: true) }
before do
allow(User).to receive(:existing_member?).and_return(false)
allow_any_instance_of(Commit).to receive(:author_email).and_return('some@mail.com')
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if the commit author is not a GitLab member' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Author 'some@mail.com' is not a member of team")
end
end
context 'file name rules' do
# Notice that the commit used creates a file named 'README'
context 'file name regex check' do
let(:push_rule) { create(:push_rule, file_name_regex: 'READ*') }
it_behaves_like 'check ignored when push rule unlicensed'
it "returns an error if a new or renamed filed doesn't match the file name regex" do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "File name README was blacklisted by the pattern READ*.")
end
end
context 'blacklisted files check' do
let(:push_rule) { create(:push_rule, prevent_secrets: true) }
it_behaves_like 'check ignored when push rule unlicensed'
it "returns true if there is no blacklisted files" do
new_rev = nil
white_listed =
[
'readme.txt', 'any/ida_rsa.pub', 'any/id_dsa.pub', 'any_2/id_ed25519.pub',
'random_file.pdf', 'folder/id_ecdsa.pub', 'docs/aws/credentials.md', 'ending_withhistory'
]
white_listed.each do |file_path|
old_rev = 'be93687618e4b132087f430a4d8fc3a609c9b77c'
old_rev = new_rev if new_rev
new_rev = project.repository.create_file(user, file_path, "commit #{file_path}", message: "commit #{file_path}", branch_name: "master")
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between(old_rev, new_rev)
)
expect(subject.exec).to be_truthy
end
end
it "returns an error if a new or renamed filed doesn't match the file name regex" do
new_rev = nil
black_listed =
[
'aws/credentials', '.ssh/personal_rsa', 'config/server_rsa', '.ssh/id_rsa', '.ssh/id_dsa',
'.ssh/personal_dsa', 'config/server_ed25519', 'any/id_ed25519', '.ssh/personal_ecdsa', 'config/server_ecdsa',
'any_place/id_ecdsa', 'some_pLace/file.key', 'other_PlAcE/other_file.pem', 'bye_bug.history', 'pg_sql_history'
]
black_listed.each do |file_path|
old_rev = 'be93687618e4b132087f430a4d8fc3a609c9b77c'
old_rev = new_rev if new_rev
new_rev = project.repository.create_file(user, file_path, "commit #{file_path}", message: "commit #{file_path}", branch_name: "master")
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between(old_rev, new_rev)
)
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /File name #{file_path} was blacklisted by the pattern/)
end
end
end
end
context 'max file size rules' do
let(:push_rule) { create(:push_rule, max_file_size: 1) }
before do
allow_any_instance_of(Blob).to receive(:size).and_return(2.megabytes)
end
it_behaves_like 'check ignored when push rule unlicensed'
it 'returns an error if file exceeds the maximum file size' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "File \"README\" is larger than the allowed size of 1 MB")
end
end
context 'GPG sign rules' do
before do
stub_licensed_features(reject_unsigned_commits: true)
end
let(:push_rule) { create(:push_rule, reject_unsigned_commits: true) }
it_behaves_like 'check ignored when push rule unlicensed'
context 'when it is only enabled in Global settings' do
before do
project.push_rule.update_column(:reject_unsigned_commits, nil)
create(:push_rule_sample, reject_unsigned_commits: true)
end
context 'and commit is not signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(false)
end
it 'returns an error' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Commit must be signed with a GPG key")
end
end
end
context 'when enabled in Project' do
context 'and commit is not signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(false)
end
it 'returns an error' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Commit must be signed with a GPG key")
end
context 'but the change is made in the web application' do
let(:protocol) { 'web' }
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
end
end
context 'and commit is signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(true)
end
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
end
end
context 'when disabled in Project' do
let(:push_rule) { create(:push_rule, reject_unsigned_commits: false) }
context 'and commit is not signed' do
before do
allow_any_instance_of(Commit).to receive(:has_signature?).and_return(false)
end
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
end
end
end
end
context 'file lock rules' do
let!(:path_lock) { create(:path_lock, path: 'README', project: project) }
before do
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
)
end
it 'returns an error if the changes update a path locked by another user' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked by #{path_lock.user.name}")
end
end
context 'Check commit author rules' do
before do
stub_licensed_features(commit_committer_check: true)
end
let(:push_rule) { create(:push_rule, commit_committer_check: true) }
let(:project) { create(:project, :public, :repository, push_rule: push_rule) }
context 'with a commit from the authenticated user' do
before do
allow(project.repository).to receive(:new_commits).and_return(
project.repository.commits_between('be93687618e4b132087f430a4d8fc3a609c9b77c', '54fcc214b94e78d7a41a9a8fe6d87a5e59500e51')
)
allow_any_instance_of(Commit).to receive(:committer_email).and_return(user.email)
end
it 'does not return an error' do
expect { subject.exec }.not_to raise_error
end
it 'allows the commit when they were done with another email that belongs to the current user' do
user.emails.create(email: 'secondary_email@user.com', confirmed_at: Time.now)
allow_any_instance_of(Commit).to receive(:committer_email).and_return('secondary_email@user.com')
expect { subject.exec }.not_to raise_error
end
it 'raises an error when the commit was done with an unverified email' do
user.emails.create(email: 'secondary_email@user.com')
allow_any_instance_of(Commit).to receive(:committer_email).and_return('secondary_email@user.com')
expect { subject.exec }
.to raise_error(Gitlab::GitAccess::UnauthorizedError,
"Comitter email '%{commiter_email}' is not verified.")
end
it 'raises an error when using an unknown email' do
allow_any_instance_of(Commit).to receive(:committer_email).and_return('some@mail.com')
expect { subject.exec }
.to raise_error(Gitlab::GitAccess::UnauthorizedError,
"You cannot push commits for 'some@mail.com'. You can only push commits that were committed with one of your own verified emails.")
end
end
context 'for an ff merge request' do
# the signed-commits branch fast-forwards onto master
let(:newrev) { '2d1096e3' }
it 'does not raise errors for a fast forward' do
expect(change_access).not_to receive(:committer_check)
expect { subject.exec }.not_to raise_error
end
end
context 'for a normal merge' do
# This creates a merge commit without adding it to a target branch
# that is what the repository would look like during the `pre-receive` hook.
#
# That means only the merge commit should be validated.
let(:newrev) do
rugged = project.repository.raw_repository.rugged
base = oldrev
to_merge = '2d1096e3a0ecf1d2baf6dee036cc80775d4940ba'
merge_index = rugged.merge_commits(base, to_merge)
options = {
parents: [base, to_merge],
tree: merge_index.write_tree(rugged),
message: 'The merge commit',
author: { name: user.name, email: user.email, time: Time.now },
committer: { name: user.name, email: user.email, time: Time.now }
}
Rugged::Commit.create(rugged, options)
end
it 'does not raise errors for a merge commit' do
expect(change_access).to receive(:committer_check).once
.and_call_original
expect { subject.exec }.not_to raise_error
end
end
end
end
end
......@@ -16,6 +16,18 @@ describe Gitlab::Git::Blob, seed_helper: true do
end
shared_examples 'finding blobs' do
context 'nil path' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, nil) }
it { expect(blob).to eq(nil) }
end
context 'blank path' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, '') }
it { expect(blob).to eq(nil) }
end
context 'file in subdir' do
let(:blob) { Gitlab::Git::Blob.find(repository, SeedRepo::Commit::ID, "files/ruby/popen.rb") }
......
......@@ -219,6 +219,9 @@ describe Gitlab::Git::GitlabProjects do
before do
FileUtils.mkdir_p(dest_repos_path)
# Undo spec_helper stub that deletes hooks
allow_any_instance_of(described_class).to receive(:fork_repository).and_call_original
end
after do
......
require 'spec_helper'
describe Gitlab::Profiler do
RSpec::Matchers.define_negated_matcher :not_change, :change
let(:null_logger) { Logger.new('/dev/null') }
let(:private_token) { 'private' }
describe '.profile' do
let(:app) { double(:app) }
before do
allow(ActionDispatch::Integration::Session).to receive(:new).and_return(app)
allow(app).to receive(:get)
end
it 'returns a profile result' do
expect(described_class.profile('/')).to be_an_instance_of(RubyProf::Profile)
end
it 'uses the custom logger given' do
expect(described_class).to receive(:create_custom_logger)
.with(null_logger, private_token: anything)
.and_call_original
described_class.profile('/', logger: null_logger)
end
it 'sends a POST request when data is passed' do
post_data = '{"a":1}'
expect(app).to receive(:post).with(anything, post_data, anything)
described_class.profile('/', post_data: post_data)
end
it 'uses the private_token for auth if given' do
expect(app).to receive(:get).with('/', nil, 'Private-Token' => private_token)
expect(app).to receive(:get).with('/api/v4/users')
described_class.profile('/', private_token: private_token)
end
it 'uses the user for auth if given' do
user = double(:user)
user_token = 'user'
allow(user).to receive_message_chain(:personal_access_tokens, :active, :pluck, :first).and_return(user_token)
expect(app).to receive(:get).with('/', nil, 'Private-Token' => user_token)
expect(app).to receive(:get).with('/api/v4/users')
described_class.profile('/', user: user)
end
it 'uses the private_token for auth if both it and user are set' do
user = double(:user)
user_token = 'user'
allow(user).to receive_message_chain(:personal_access_tokens, :active, :pluck, :first).and_return(user_token)
expect(app).to receive(:get).with('/', nil, 'Private-Token' => private_token)
expect(app).to receive(:get).with('/api/v4/users')
described_class.profile('/', user: user, private_token: private_token)
end
end
describe '.create_custom_logger' do
it 'does nothing when nil is passed' do
expect(described_class.create_custom_logger(nil)).to be_nil
end
context 'the new logger' do
let(:custom_logger) do
described_class.create_custom_logger(null_logger, private_token: private_token)
end
it 'does not affect the existing logger' do
expect(null_logger).not_to receive(:debug)
expect(custom_logger).to receive(:debug).and_call_original
custom_logger.debug('Foo')
end
it 'strips out the private token' do
expect(custom_logger).to receive(:add) do |severity, _progname, message|
expect(severity).to eq(Logger::DEBUG)
expect(message).to include('public').and include(described_class::FILTERED_STRING)
expect(message).not_to include(private_token)
end
custom_logger.debug("public #{private_token}")
end
it 'tracks model load times by model' do
custom_logger.debug('This is not a model load')
custom_logger.debug('User Load (1.2ms)')
custom_logger.debug('User Load (1.3ms)')
custom_logger.debug('Project Load (10.4ms)')
expect(custom_logger.load_times_by_model).to eq('User' => 2.5,
'Project' => 10.4)
end
it 'logs the backtrace, ignoring lines as appropriate' do
# Skip Rails's backtrace cleaning.
allow(Rails.backtrace_cleaner).to receive(:clean, &:itself)
expect(custom_logger).to receive(:add)
.with(Logger::DEBUG,
anything,
a_string_matching(File.basename(__FILE__)))
.twice
expect(custom_logger).not_to receive(:add).with(Logger::DEBUG,
anything,
a_string_matching('lib/gitlab/profiler.rb'))
# Force a part of the backtrace to be in the (ignored) profiler source
# file.
described_class.with_custom_logger(nil) { custom_logger.debug('Foo') }
end
end
end
describe '.with_custom_logger' do
context 'when the logger is set' do
it 'uses the replacement logger for the duration of the block' do
expect(null_logger).to receive(:debug).and_call_original
expect { described_class.with_custom_logger(null_logger) { ActiveRecord::Base.logger.debug('foo') } }
.to not_change { ActiveRecord::Base.logger }
.and not_change { ActionController::Base.logger }
.and not_change { ActiveSupport::LogSubscriber.colorize_logging }
end
it 'returns the result of the block' do
expect(described_class.with_custom_logger(null_logger) { 2 }).to eq(2)
end
end
context 'when the logger is nil' do
it 'returns the result of the block' do
expect(described_class.with_custom_logger(nil) { 2 }).to eq(2)
end
it 'does not modify the standard Rails loggers' do
expect { described_class.with_custom_logger(nil) { } }
.to not_change { ActiveRecord::Base.logger }
.and not_change { ActionController::Base.logger }
.and not_change { ActiveSupport::LogSubscriber.colorize_logging }
end
end
end
end
......@@ -3,12 +3,19 @@ require Rails.root.join('db', 'post_migrate', '20171114104051_remove_empty_fork_
describe RemoveEmptyForkNetworks, :migration do
let!(:fork_networks) { table(:fork_networks) }
let!(:projects) { table(:projects) }
let!(:fork_network_members) { table(:fork_network_members) }
let(:deleted_project) { create(:project) }
let!(:empty_network) { create(:fork_network, id: 1, root_project_id: deleted_project.id) }
let!(:other_network) { create(:fork_network, id: 2, root_project_id: create(:project).id) }
let(:deleted_project) { projects.create! }
let!(:empty_network) { fork_networks.create!(id: 1, root_project_id: deleted_project.id) }
let!(:other_network) { fork_networks.create!(id: 2, root_project_id: projects.create.id) }
before do
fork_network_members.create(fork_network_id: empty_network.id,
project_id: empty_network.root_project_id)
fork_network_members.create(fork_network_id: other_network.id,
project_id: other_network.root_project_id)
deleted_project.destroy!
end
......
......@@ -2273,6 +2273,10 @@ describe Project do
expect(second_fork.fork_source).to eq(project)
end
it 'returns nil if it is the root of the fork network' do
expect(project.fork_source).to be_nil
end
end
describe '#lfs_storage_project' do
......
......@@ -994,6 +994,14 @@ describe User do
expect(described_class.search(user3.username.upcase)).to eq([user3])
end
end
it 'returns no matches for an empty string' do
expect(described_class.search('')).to be_empty
end
it 'returns no matches for nil' do
expect(described_class.search(nil)).to be_empty
end
end
describe '.search_with_secondary_emails' do
......@@ -1048,6 +1056,14 @@ describe User do
it 'does not return users with a matching part of secondary email' do
expect(search_with_secondary_emails(email.email[1..4])).not_to include([email.user])
end
it 'returns no matches for an empty string' do
expect(search_with_secondary_emails('')).to be_empty
end
it 'returns no matches for nil' do
expect(search_with_secondary_emails(nil)).to be_empty
end
end
describe '.find_by_ssh_key_id' do
......
......@@ -317,35 +317,20 @@ describe API::Internal do
end
context "git pull" do
context "gitaly disabled", :disable_gitaly do
it "has the correct payload" do
pull(key, project)
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
expect(json_response["repository_path"]).to eq(project.repository.path_to_repo)
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gitaly"]).to be_nil
expect(user).to have_an_activity_record
end
end
context "gitaly enabled" do
it "has the correct payload" do
pull(key, project)
it "has the correct payload" do
pull(key, project)
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
expect(json_response["repository_path"]).to eq(project.repository.path_to_repo)
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
expect(user).to have_an_activity_record
end
expect(response).to have_gitlab_http_status(200)
expect(json_response["status"]).to be_truthy
expect(json_response["repository_path"]).to eq(project.repository.path_to_repo)
expect(json_response["gl_repository"]).to eq("project-#{project.id}")
expect(json_response["gitaly"]).not_to be_nil
expect(json_response["gitaly"]["repository"]).not_to be_nil
expect(json_response["gitaly"]["repository"]["storage_name"]).to eq(project.repository.gitaly_repository.storage_name)
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
expect(user).to have_an_activity_record
end
end
......
......@@ -65,6 +65,16 @@ describe API::Members do
expect(json_response.count).to eq(1)
expect(json_response.first['username']).to eq(master.username)
end
it 'finds all members with no query specified' do
get api("/#{source_type.pluralize}/#{source.id}/members", developer), query: ''
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.count).to eq(2)
expect(json_response.map { |u| u['id'] }).to match_array [master.id, developer.id]
end
end
end
......
......@@ -58,6 +58,16 @@ describe API::V3::Members do
expect(json_response.count).to eq(1)
expect(json_response.first['username']).to eq(master.username)
end
it 'finds all members with no query specified' do
get v3_api("/#{source_type.pluralize}/#{source.id}/members", developer), query: ''
expect(response).to have_gitlab_http_status(200)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.count).to eq(2)
expect(json_response.map { |u| u['id'] }).to match_array [master.id, developer.id]
end
end
end
......
......@@ -72,13 +72,15 @@ describe Files::UpdateService do
end
end
context 'when target branch is different than source branch' do
let(:branch_name) { "#{project.default_branch}-new" }
context 'with gitaly disabled', :skip_gitaly_mock do
context 'when target branch is different than source branch' do
let(:branch_name) { "#{project.default_branch}-new" }
it 'fires hooks only once' do
expect(Gitlab::Git::HooksService).to receive(:new).once.and_call_original
it 'fires hooks only once' do
expect(Gitlab::Git::HooksService).to receive(:new).once.and_call_original
subject.execute
subject.execute
end
end
end
end
......
......@@ -114,6 +114,16 @@ RSpec.configure do |config|
config.before(:example) do
# Skip pre-receive hook check so we can use the web editor and merge.
allow_any_instance_of(Gitlab::Git::Hook).to receive(:trigger).and_return([true, nil])
allow_any_instance_of(Gitlab::Git::GitlabProjects).to receive(:fork_repository).and_wrap_original do |m, *args|
m.call(*args)
shard_path, repository_relative_path = args
# We can't leave the hooks in place after a fork, as those would fail in tests
# The "internal" API is not available
FileUtils.rm_rf(File.join(shard_path, repository_relative_path, 'hooks'))
end
# Enable all features by default for testing
allow(Feature).to receive(:enabled?) { true }
end
......
......@@ -5,10 +5,16 @@ module CycleAnalyticsHelpers
end
def create_commit(message, project, user, branch_name, count: 1)
oldrev = project.repository.commit(branch_name).sha
repository = project.repository
oldrev = repository.commit(branch_name).sha
if Timecop.frozen? && Gitlab::GitalyClient.feature_enabled?(:operation_user_commit_files)
mock_gitaly_multi_action_dates(repository.raw)
end
commit_shas = Array.new(count) do |index|
commit_sha = project.repository.create_file(user, generate(:branch), "content", message: message, branch_name: branch_name)
project.repository.commit(commit_sha)
commit_sha = repository.create_file(user, generate(:branch), "content", message: message, branch_name: branch_name)
repository.commit(commit_sha)
commit_sha
end
......@@ -98,6 +104,25 @@ module CycleAnalyticsHelpers
pipeline: dummy_pipeline,
protected: false)
end
def mock_gitaly_multi_action_dates(raw_repository)
allow(raw_repository).to receive(:multi_action).and_wrap_original do |m, *args|
new_date = Time.now
branch_update = m.call(*args)
if branch_update.newrev
_, opts = args
commit = raw_repository.commit(branch_update.newrev).rugged_commit
branch_update.newrev = commit.amend(
update_ref: "#{Gitlab::Git::BRANCH_REF_PREFIX}#{opts[:branch_name]}",
author: commit.author.merge(time: new_date),
committer: commit.committer.merge(time: new_date)
)
end
branch_update
end
end
end
RSpec.configure do |config|
......
......@@ -38,10 +38,6 @@ module ProjectForksHelper
# so we have to explicitely call this method to clear the @exists variable.
# of the instance we're returning here.
forked_project.repository.after_import
# We can't leave the hooks in place after a fork, as those would fail in tests
# The "internal" API is not available
FileUtils.rm_rf("#{forked_project.repository.path}/hooks")
end
forked_project
......
......@@ -345,12 +345,6 @@ production:
replicas="$new_replicas"
fi
if [[ "$CI_PROJECT_VISIBILITY" != "public" ]]; then
secret_name='gitlab-registry'
else
secret_name=''
fi
helm upgrade --install \
--wait \
--set service.enabled="$service_enabled" \
......@@ -358,7 +352,6 @@ production:
--set image.repository="$CI_APPLICATION_REPOSITORY" \
--set image.tag="$CI_APPLICATION_TAG" \
--set image.pullPolicy=IfNotPresent \
--set image.secrets[0].name="$secret_name" \
--set application.track="$track" \
--set application.database_url="$DATABASE_URL" \
--set service.url="$CI_ENVIRONMENT_URL" \
......@@ -488,9 +481,6 @@ production:
function create_secret() {
echo "Create secret..."
if [[ "$CI_PROJECT_VISIBILITY" == "public" ]]; then
return
fi
kubectl create secret -n "$KUBE_NAMESPACE" \
docker-registry gitlab-registry \
......
......@@ -4185,9 +4185,9 @@ isurl@^1.0.0-alpha5:
has-to-string-tag-x "^1.2.0"
is-object "^1.0.1"
jasmine-core@^2.6.3:
version "2.6.3"
resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-2.6.3.tgz#45072950e4a42b1e322fe55c001100a465d77815"
jasmine-core@^2.9.0:
version "2.9.0"
resolved "https://registry.yarnpkg.com/jasmine-core/-/jasmine-core-2.9.0.tgz#bfbb56defcd30789adec5a3fbba8504233289c72"
jasmine-jquery@^2.1.1:
version "2.1.1"
......@@ -4269,7 +4269,7 @@ json-stable-stringify@~0.0.0:
dependencies:
jsonify "~0.0.0"
json-stringify-safe@5.0.x, json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1:
json-stringify-safe@5.0.x, json-stringify-safe@~5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
......@@ -6063,11 +6063,9 @@ raphael@^2.2.7:
dependencies:
eve-raphael "0.5.0"
raven-js@^3.14.0:
version "3.14.0"
resolved "https://registry.yarnpkg.com/raven-js/-/raven-js-3.14.0.tgz#94dda81d975fdc4a42f193db437cf70021d654e0"
dependencies:
json-stringify-safe "^5.0.1"
raven-js@^3.22.1:
version "3.22.1"
resolved "https://registry.yarnpkg.com/raven-js/-/raven-js-3.22.1.tgz#1117f00dfefaa427ef6e1a7d50bbb1fb998a24da"
raw-body@2:
version "2.3.2"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment