Commit 101cd6d6 authored by Robert Speicher's avatar Robert Speicher

Merge branch 'ce-to-ee-2018-06-26' into 'master'

CE upstream - 2018-06-26 15:22 UTC

See merge request gitlab-org/gitlab-ee!6292
parents 1ee18a7c 7bd0e09a
...@@ -4,27 +4,37 @@ ...@@ -4,27 +4,37 @@
- page_title 'New Group' - page_title 'New Group'
- header_title "Groups", dashboard_groups_path - header_title "Groups", dashboard_groups_path
%h3.page-title .row.prepend-top-default
New Group .col-lg-3.profile-settings-sidebar
%hr %h4.prepend-top-0
= _('New group')
%p
- group_docs_path = help_page_path('user/group/index')
- group_docs_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: group_docs_path }
= s_('%{group_docs_link_start}Groups%{group_docs_link_end} allow you to manage and collaborate across multiple projects. Members of a group have access to all of its projects.').html_safe % { group_docs_link_start: group_docs_link_start, group_docs_link_end: '</a>'.html_safe }
%p
- subgroup_docs_path = help_page_path('user/group/subgroups/index')
- subgroup_docs_link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: subgroup_docs_path }
= s_('Groups can also be nested by creating %{subgroup_docs_link_start}subgroups%{subgroup_docs_link_end}.').html_safe % { subgroup_docs_link_start: subgroup_docs_link_start, subgroup_docs_link_end: '</a>'.html_safe }
= form_for @group, html: { class: 'group-form gl-show-field-errors' } do |f| .col-lg-9
= form_errors(@group) = form_for @group, html: { class: 'group-form gl-show-field-errors' } do |f|
= render 'shared/group_form', f: f, autofocus: true = form_errors(@group)
= render 'shared/group_form', f: f, autofocus: true
.form-group.row.group-description-holder .form-group.row.group-description-holder
= f.label :avatar, "Group avatar", class: 'col-form-label col-sm-2' = f.label :avatar, "Group avatar", class: 'col-form-label col-sm-2'
.col-sm-10 .col-sm-10
= render 'shared/choose_group_avatar_button', f: f = render 'shared/choose_group_avatar_button', f: f
= render 'shared/visibility_level', f: f, visibility_level: default_group_visibility, can_change_visibility_level: true, form_model: @group = render 'shared/visibility_level', f: f, visibility_level: default_group_visibility, can_change_visibility_level: true, form_model: @group
= render 'create_chat_team', f: f if Gitlab.config.mattermost.enabled = render 'create_chat_team', f: f if Gitlab.config.mattermost.enabled
.form-group.row .form-group.row
.offset-sm-2.col-sm-10 .offset-sm-2.col-sm-10
= render 'shared/group_tips' = render 'shared/group_tips'
.form-actions .form-actions
= f.submit 'Create group', class: "btn btn-create" = f.submit 'Create group', class: "btn btn-create"
= link_to 'Cancel', dashboard_groups_path, class: 'btn btn-cancel' = link_to 'Cancel', dashboard_groups_path, class: 'btn btn-cancel'
...@@ -6,5 +6,4 @@ ...@@ -6,5 +6,4 @@
%fieldset %fieldset
= check_box_tag "#{prefix}[scopes][]", scope, token.scopes.include?(scope), id: "#{prefix}_scopes_#{scope}" = check_box_tag "#{prefix}[scopes][]", scope, token.scopes.include?(scope), id: "#{prefix}_scopes_#{scope}"
= label_tag ("#{prefix}_scopes_#{scope}"), scope, class: "label-light" = label_tag ("#{prefix}_scopes_#{scope}"), scope, class: "label-light"
%span= t(scope, scope: [:doorkeeper, :scopes])
.scope-description= t scope, scope: [:doorkeeper, :scope_desc] .scope-description= t scope, scope: [:doorkeeper, :scope_desc]
---
title: Update new group page to better explain what groups are
merge_request: 19991
author:
type: other
---
title: Remove performance bottleneck preventing large wiki pages from displaying
merge_request: 20174
author:
type: performance
---
title: Add index on deployable_type/id for deployments
merge_request:
author:
type: performance
...@@ -60,17 +60,23 @@ en: ...@@ -60,17 +60,23 @@ en:
scopes: scopes:
api: Access the authenticated user's API api: Access the authenticated user's API
read_user: Read the authenticated user's personal information read_user: Read the authenticated user's personal information
read_repository: Allows read-access to the repository
read_registry: Grants permission to read container registry images
openid: Authenticate using OpenID Connect openid: Authenticate using OpenID Connect
sudo: Perform API actions as any user in the system (if the authenticated user is an admin) sudo: Perform API actions as any user in the system
scope_desc: scope_desc:
api: api:
Full access to GitLab as the user, including read/write on all their groups and projects Grants complete read/write access to the API, including all groups and projects.
read_user: read_user:
Read-only access to the user's profile information, like username, public email and full name Grants read-only access to the authenticated user's profile through the /user API endpoint, which includes username, public email, and full name. Also grants access to read-only API endpoints under /users.
read_repository:
Grants read-only access to repositories on private projects using Git-over-HTTP (not using the API).
read_registry:
Grants read-only access to container registry images on private projects.
openid: openid:
The ability to authenticate using GitLab, and read-only access to the user's profile information and group memberships Grants permission to authenticate with GitLab using OpenID Connect. Also gives read-only access to the user's profile and group memberships.
sudo: sudo:
Access to the Sudo feature, to perform API actions as any user in the system (only available for admins) Grants permission to perform API actions as any user in the system, when authenticated as an admin user.
flash: flash:
applications: applications:
create: create:
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddIndexOnDeployableForDeployments < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :deployments, [:deployable_type, :deployable_id]
end
def down
remove_concurrent_index :deployments, [:deployable_type, :deployable_id]
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180612175636) do ActiveRecord::Schema.define(version: 20180626125654) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -868,6 +868,7 @@ ActiveRecord::Schema.define(version: 20180612175636) do ...@@ -868,6 +868,7 @@ ActiveRecord::Schema.define(version: 20180612175636) do
end end
add_index "deployments", ["created_at"], name: "index_deployments_on_created_at", using: :btree add_index "deployments", ["created_at"], name: "index_deployments_on_created_at", using: :btree
add_index "deployments", ["deployable_type", "deployable_id"], name: "index_deployments_on_deployable_type_and_deployable_id", using: :btree
add_index "deployments", ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id", using: :btree add_index "deployments", ["environment_id", "id"], name: "index_deployments_on_environment_id_and_id", using: :btree
add_index "deployments", ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id", using: :btree add_index "deployments", ["environment_id", "iid", "project_id"], name: "index_deployments_on_environment_id_and_iid_and_project_id", using: :btree
add_index "deployments", ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true, using: :btree add_index "deployments", ["project_id", "iid"], name: "index_deployments_on_project_id_and_iid", unique: true, using: :btree
......
...@@ -32,6 +32,7 @@ are very appreciative of the work done by translators and proofreaders! ...@@ -32,6 +32,7 @@ are very appreciative of the work done by translators and proofreaders!
- Filip Mech - [GitLab](https://gitlab.com/mehenz), [Crowdin](https://crowdin.com/profile/mehenz) - Filip Mech - [GitLab](https://gitlab.com/mehenz), [Crowdin](https://crowdin.com/profile/mehenz)
- Portuguese, Brazilian - Portuguese, Brazilian
- Paulo George Gomes Bezerra - [GitLab](https://gitlab.com/paulobezerra), [Crowdin](https://crowdin.com/profile/paulogomes.rep) - Paulo George Gomes Bezerra - [GitLab](https://gitlab.com/paulobezerra), [Crowdin](https://crowdin.com/profile/paulogomes.rep)
- André Gama - [GitLab](https://gitlab.com/andregamma), [Crowdin](https://crowdin.com/profile/ToeOficial)
- Russian - Russian
- Nikita Grylov - [GitLab](https://gitlab.com/nixel2007), [Crowdin](https://crowdin.com/profile/nixel2007) - Nikita Grylov - [GitLab](https://gitlab.com/nixel2007), [Crowdin](https://crowdin.com/profile/nixel2007)
- Alexy Lustin - [GitLab](https://gitlab.com/allustin), [Crowdin](https://crowdin.com/profile/lustin) - Alexy Lustin - [GitLab](https://gitlab.com/allustin), [Crowdin](https://crowdin.com/profile/lustin)
......
...@@ -195,22 +195,22 @@ end ...@@ -195,22 +195,22 @@ end
And that's it, we're done! And that's it, we're done!
## Changing Column Types For Large Tables ## Changing The Schema For Large Tables
While `change_column_type_concurrently` can be used for changing the type of a While `change_column_type_concurrently` and `rename_column_concurrently` can be
column without downtime it doesn't work very well for large tables. Because all used for changing the schema of a table without downtime it doesn't work very
of the work happens in sequence the migration can take a very long time to well for large tables. Because all of the work happens in sequence the migration
complete, preventing a deployment from proceeding. can take a very long time to complete, preventing a deployment from proceeding.
`change_column_type_concurrently` can also produce a lot of pressure on the They can also produce a lot of pressure on the database due to it rapidly
database due to it rapidly updating many rows in sequence. updating many rows in sequence.
To reduce database pressure you should instead use To reduce database pressure you should instead use
`change_column_type_using_background_migration` when migrating a column in a `change_column_type_using_background_migration` or `rename_column_concurrently`
large table (e.g. `issues`). This method works similar to when migrating a column in a large table (e.g. `issues`). These methods work
`change_column_type_concurrently` but uses background migration to spread the similarly to the concurrent counterparts but uses background migration to spread
work / load over a longer time period, without slowing down deployments. the work / load over a longer time period, without slowing down deployments.
Usage of this method is fairly simple: For example, to change the column type using a background migration:
```ruby ```ruby
class ExampleMigration < ActiveRecord::Migration class ExampleMigration < ActiveRecord::Migration
...@@ -296,6 +296,15 @@ class MigrateRemainingIssuesClosedAt < ActiveRecord::Migration ...@@ -296,6 +296,15 @@ class MigrateRemainingIssuesClosedAt < ActiveRecord::Migration
end end
``` ```
The same applies to `rename_column_using_background_migration`:
1. Create a migration using the helper, which will schedule background
migrations to spread the writes over a longer period of time.
2. In the next monthly release, create a clean-up migration to steal from the
Sidekiq queues, migrate any missing rows, and cleanup the rename. This
migration should skip the steps after stealing from the Sidekiq queues if the
column has already been renamed.
For more information, see [the documentation on cleaning up background For more information, see [the documentation on cleaning up background
migrations](background_migrations.md#cleaning-up). migrations](background_migrations.md#cleaning-up).
......
...@@ -45,6 +45,7 @@ module API ...@@ -45,6 +45,7 @@ module API
present( present(
paginate(::Kaminari.paginate_array(branches)), paginate(::Kaminari.paginate_array(branches)),
with: Entities::Branch, with: Entities::Branch,
current_user: current_user,
project: user_project, project: user_project,
merged_branch_names: merged_branch_names merged_branch_names: merged_branch_names
) )
...@@ -63,7 +64,7 @@ module API ...@@ -63,7 +64,7 @@ module API
get do get do
branch = find_branch!(params[:branch]) branch = find_branch!(params[:branch])
present branch, with: Entities::Branch, project: user_project present branch, with: Entities::Branch, current_user: current_user, project: user_project
end end
end end
...@@ -101,7 +102,7 @@ module API ...@@ -101,7 +102,7 @@ module API
end end
if protected_branch.valid? if protected_branch.valid?
present branch, with: Entities::Branch, project: user_project present branch, with: Entities::Branch, current_user: current_user, project: user_project
else else
render_api_error!(protected_branch.errors.full_messages, 422) render_api_error!(protected_branch.errors.full_messages, 422)
end end
...@@ -121,7 +122,7 @@ module API ...@@ -121,7 +122,7 @@ module API
protected_branch = user_project.protected_branches.find_by(name: branch.name) protected_branch = user_project.protected_branches.find_by(name: branch.name)
protected_branch&.destroy protected_branch&.destroy
present branch, with: Entities::Branch, project: user_project present branch, with: Entities::Branch, current_user: current_user, project: user_project
end end
desc 'Create branch' do desc 'Create branch' do
...@@ -140,6 +141,7 @@ module API ...@@ -140,6 +141,7 @@ module API
if result[:status] == :success if result[:status] == :success
present result[:branch], present result[:branch],
with: Entities::Branch, with: Entities::Branch,
current_user: current_user,
project: user_project project: user_project
else else
render_api_error!(result[:message], 400) render_api_error!(result[:message], 400)
......
...@@ -48,7 +48,7 @@ module Backup ...@@ -48,7 +48,7 @@ module Backup
end end
def backup_project(project) def backup_project(project)
gitaly_migrate(:repository_backup) do |is_enabled| gitaly_migrate(:repository_backup, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
backup_project_gitaly(project) backup_project_gitaly(project)
else else
...@@ -80,7 +80,7 @@ module Backup ...@@ -80,7 +80,7 @@ module Backup
end end
def delete_all_repositories(name, repository_storage) def delete_all_repositories(name, repository_storage)
gitaly_migrate(:delete_all_repositories) do |is_enabled| gitaly_migrate(:delete_all_repositories, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
Gitlab::GitalyClient::StorageService.new(name).delete_all_repositories Gitlab::GitalyClient::StorageService.new(name).delete_all_repositories
else else
...@@ -148,7 +148,7 @@ module Backup ...@@ -148,7 +148,7 @@ module Backup
end end
def backup_custom_hooks(project) def backup_custom_hooks(project)
gitaly_migrate(:backup_custom_hooks) do |is_enabled| gitaly_migrate(:backup_custom_hooks, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_backup_custom_hooks(project) gitaly_backup_custom_hooks(project)
else else
...@@ -159,7 +159,7 @@ module Backup ...@@ -159,7 +159,7 @@ module Backup
def restore_custom_hooks(project) def restore_custom_hooks(project)
in_path(path_to_tars(project)) do |dir| in_path(path_to_tars(project)) do |dir|
gitaly_migrate(:restore_custom_hooks) do |is_enabled| gitaly_migrate(:restore_custom_hooks, status: Gitlab::GitalyClient::MigrationStatus::OPT_OUT) do |is_enabled|
if is_enabled if is_enabled
gitaly_restore_custom_hooks(project, dir) gitaly_restore_custom_hooks(project, dir)
else else
......
...@@ -56,10 +56,12 @@ module Banzai ...@@ -56,10 +56,12 @@ module Banzai
# Pattern to match allowed image extensions # Pattern to match allowed image extensions
ALLOWED_IMAGE_EXTENSIONS = /.+(jpg|png|gif|svg|bmp)\z/i.freeze ALLOWED_IMAGE_EXTENSIONS = /.+(jpg|png|gif|svg|bmp)\z/i.freeze
# Do not perform linking inside these tags.
IGNORED_ANCESTOR_TAGS = %w(pre code tt).to_set
def call def call
doc.search(".//text()").each do |node| doc.search(".//text()").each do |node|
# Do not perform linking inside <code> blocks next if has_ancestor?(node, IGNORED_ANCESTOR_TAGS)
next unless node.ancestors('code').empty?
# A Gollum ToC tag is `[[_TOC_]]`, but due to MarkdownFilter running # A Gollum ToC tag is `[[_TOC_]]`, but due to MarkdownFilter running
# before this one, it will be converted into `[[<em>TOC</em>]]`, so it # before this one, it will be converted into `[[<em>TOC</em>]]`, so it
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Background migration for cleaning up a concurrent column rename.
class CleanupConcurrentRename < CleanupConcurrentSchemaChange
RESCHEDULE_DELAY = 10.minutes
def cleanup_concurrent_schema_change(table, old_column, new_column)
cleanup_concurrent_column_rename(table, old_column, new_column)
end
end
end
end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Base class for cleaning up concurrent schema changes.
class CleanupConcurrentSchemaChange
include Database::MigrationHelpers
# table - The name of the table the migration is performed for.
# old_column - The name of the old (to drop) column.
# new_column - The name of the new column.
def perform(table, old_column, new_column)
return unless column_exists?(table, new_column)
rows_to_migrate = define_model_for(table)
.where(new_column => nil)
.where
.not(old_column => nil)
if rows_to_migrate.any?
BackgroundMigrationWorker.perform_in(
RESCHEDULE_DELAY,
self.class.name,
[table, old_column, new_column]
)
else
cleanup_concurrent_schema_change(table, old_column, new_column)
end
end
# These methods are necessary so we can re-use the migration helpers in
# this class.
def connection
ActiveRecord::Base.connection
end
def method_missing(name, *args, &block)
connection.__send__(name, *args, &block) # rubocop: disable GitlabSecurity/PublicSend
end
def respond_to_missing?(*args)
connection.respond_to?(*args) || super
end
def define_model_for(table)
Class.new(ActiveRecord::Base) do
self.table_name = table
end
end
end
end
end
...@@ -2,52 +2,12 @@ ...@@ -2,52 +2,12 @@
module Gitlab module Gitlab
module BackgroundMigration module BackgroundMigration
# Background migration for cleaning up a concurrent column rename. # Background migration for cleaning up a concurrent column type changeb.
class CleanupConcurrentTypeChange class CleanupConcurrentTypeChange < CleanupConcurrentSchemaChange
include Database::MigrationHelpers
RESCHEDULE_DELAY = 10.minutes RESCHEDULE_DELAY = 10.minutes
# table - The name of the table the migration is performed for. def cleanup_concurrent_schema_change(table, old_column, new_column)
# old_column - The name of the old (to drop) column. cleanup_concurrent_column_type_change(table, old_column)
# new_column - The name of the new column.
def perform(table, old_column, new_column)
return unless column_exists?(:issues, new_column)
rows_to_migrate = define_model_for(table)
.where(new_column => nil)
.where
.not(old_column => nil)
if rows_to_migrate.any?
BackgroundMigrationWorker.perform_in(
RESCHEDULE_DELAY,
'CleanupConcurrentTypeChange',
[table, old_column, new_column]
)
else
cleanup_concurrent_column_type_change(table, old_column)
end
end
# These methods are necessary so we can re-use the migration helpers in
# this class.
def connection
ActiveRecord::Base.connection
end
def method_missing(name, *args, &block)
connection.__send__(name, *args, &block) # rubocop: disable GitlabSecurity/PublicSend
end
def respond_to_missing?(*args)
connection.respond_to?(*args) || super
end
def define_model_for(table)
Class.new(ActiveRecord::Base) do
self.table_name = table
end
end end
end end
end end
......
...@@ -596,6 +596,97 @@ module Gitlab ...@@ -596,6 +596,97 @@ module Gitlab
end end
end end
# Renames a column using a background migration.
#
# Because this method uses a background migration it's more suitable for
# large tables. For small tables it's better to use
# `rename_column_concurrently` since it can complete its work in a much
# shorter amount of time and doesn't rely on Sidekiq.
#
# Example usage:
#
# rename_column_using_background_migration(
# :users,
# :feed_token,
# :rss_token
# )
#
# table - The name of the database table containing the column.
#
# old - The old column name.
#
# new - The new column name.
#
# type - The type of the new column. If no type is given the old column's
# type is used.
#
# batch_size - The number of rows to schedule in a single background
# migration.
#
# interval - The time interval between every background migration.
def rename_column_using_background_migration(
table,
old_column,
new_column,
type: nil,
batch_size: 10_000,
interval: 10.minutes
)
check_trigger_permissions!(table)
old_col = column_for(table, old_column)
new_type = type || old_col.type
max_index = 0
add_column(table, new_column, new_type,
limit: old_col.limit,
precision: old_col.precision,
scale: old_col.scale)
# We set the default value _after_ adding the column so we don't end up
# updating any existing data with the default value. This isn't
# necessary since we copy over old values further down.
change_column_default(table, new_column, old_col.default) if old_col.default
install_rename_triggers(table, old_column, new_column)
model = Class.new(ActiveRecord::Base) do
self.table_name = table
include ::EachBatch
end
# Schedule the jobs that will copy the data from the old column to the
# new one. Rows with NULL values in our source column are skipped since
# the target column is already NULL at this point.
model.where.not(old_column => nil).each_batch(of: batch_size) do |batch, index|
start_id, end_id = batch.pluck('MIN(id), MAX(id)').first
max_index = index
BackgroundMigrationWorker.perform_in(
index * interval,
'CopyColumn',
[table, old_column, new_column, start_id, end_id]
)
end
# Schedule the renaming of the column to happen (initially) 1 hour after
# the last batch finished.
BackgroundMigrationWorker.perform_in(
(max_index * interval) + 1.hour,
'CleanupConcurrentRename',
[table, old_column, new_column]
)
if perform_background_migration_inline?
# To ensure the schema is up to date immediately we perform the
# migration inline in dev / test environments.
Gitlab::BackgroundMigration.steal('CopyColumn')
Gitlab::BackgroundMigration.steal('CleanupConcurrentRename')
end
end
def perform_background_migration_inline? def perform_background_migration_inline?
Rails.env.test? || Rails.env.development? Rails.env.test? || Rails.env.development?
end end
......
...@@ -1178,6 +1178,61 @@ describe Gitlab::Database::MigrationHelpers do ...@@ -1178,6 +1178,61 @@ describe Gitlab::Database::MigrationHelpers do
end end
end end
describe '#rename_column_using_background_migration' do
let!(:issue) { create(:issue, :closed, closed_at: Time.zone.now) }
it 'renames a column using a background migration' do
expect(model)
.to receive(:add_column)
.with(
'issues',
:closed_at_timestamp,
:datetime_with_timezone,
limit: anything,
precision: anything,
scale: anything
)
expect(model)
.to receive(:install_rename_triggers)
.with('issues', :closed_at, :closed_at_timestamp)
expect(BackgroundMigrationWorker)
.to receive(:perform_in)
.ordered
.with(
10.minutes,
'CopyColumn',
['issues', :closed_at, :closed_at_timestamp, issue.id, issue.id]
)
expect(BackgroundMigrationWorker)
.to receive(:perform_in)
.ordered
.with(
1.hour + 10.minutes,
'CleanupConcurrentRename',
['issues', :closed_at, :closed_at_timestamp]
)
expect(Gitlab::BackgroundMigration)
.to receive(:steal)
.ordered
.with('CopyColumn')
expect(Gitlab::BackgroundMigration)
.to receive(:steal)
.ordered
.with('CleanupConcurrentRename')
model.rename_column_using_background_migration(
'issues',
:closed_at,
:closed_at_timestamp
)
end
end
describe '#perform_background_migration_inline?' do describe '#perform_background_migration_inline?' do
it 'returns true in a test environment' do it 'returns true in a test environment' do
allow(Rails.env) allow(Rails.env)
......
...@@ -155,6 +155,12 @@ describe API::Branches do ...@@ -155,6 +155,12 @@ describe API::Branches do
end end
it_behaves_like 'repository branch' it_behaves_like 'repository branch'
it 'returns that the current user cannot push' do
get api(route, current_user)
expect(json_response['can_push']).to eq(false)
end
end end
context 'when unauthenticated', 'and project is private' do context 'when unauthenticated', 'and project is private' do
...@@ -169,6 +175,12 @@ describe API::Branches do ...@@ -169,6 +175,12 @@ describe API::Branches do
it_behaves_like 'repository branch' it_behaves_like 'repository branch'
it 'returns that the current user can push' do
get api(route, current_user)
expect(json_response['can_push']).to eq(true)
end
context 'when branch contains a dot' do context 'when branch contains a dot' do
let(:branch_name) { branch_with_dot.name } let(:branch_name) { branch_with_dot.name }
...@@ -202,6 +214,23 @@ describe API::Branches do ...@@ -202,6 +214,23 @@ describe API::Branches do
end end
end end
context 'when authenticated', 'as a developer and branch is protected' do
let(:current_user) { create(:user) }
let!(:protected_branch) { create(:protected_branch, project: project, name: branch_name) }
before do
project.add_developer(current_user)
end
it_behaves_like 'repository branch'
it 'returns that the current user cannot push' do
get api(route, current_user)
expect(json_response['can_push']).to eq(false)
end
end
context 'when authenticated', 'as a guest' do context 'when authenticated', 'as a guest' do
it_behaves_like '403 response' do it_behaves_like '403 response' do
let(:request) { get api(route, guest) } let(:request) { get api(route, guest) }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment