Commit bde12f75 authored by Nick Thomas's avatar Nick Thomas

Merge branch 'master' into ce-to-ee-2017-06-07

parents be5c1601 e7f76734
...@@ -53,7 +53,7 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController ...@@ -53,7 +53,7 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
private private
def set_application_setting def set_application_setting
@application_setting = ApplicationSetting.current @application_setting = current_application_settings
end end
def application_setting_params def application_setting_params
...@@ -178,7 +178,8 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController ...@@ -178,7 +178,8 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
:check_namespace_plan, :check_namespace_plan,
:mirror_max_delay, :mirror_max_delay,
:mirror_max_capacity, :mirror_max_capacity,
:mirror_capacity_threshold :mirror_capacity_threshold,
:authorized_keys_enabled
] ]
end end
end end
module Projects
class IssueLinksController < ApplicationController
before_action :authorize_admin_issue_link!, only: [:create, :destroy]
def index
render json: issues
end
def create
create_params = params.slice(:issue_references)
result = IssueLinks::CreateService.new(issue, current_user, create_params).execute
render json: { message: result[:message], issues: issues }, status: result[:http_status]
end
def destroy
issue_link = IssueLink.find(params[:id])
return render_403 unless can?(current_user, :admin_issue_link, issue_link.target.project)
IssueLinks::DestroyService.new(issue_link, current_user).execute
render json: { issues: issues }
end
private
def issues
IssueLinks::ListService.new(issue, current_user).execute
end
def authorize_admin_issue_link!
render_403 unless can?(current_user, :admin_issue_link, @project)
end
def issue
@issue ||=
IssuesFinder.new(current_user, project_id: @project.id)
.execute
.find_by!(iid: params[:issue_id])
end
end
end
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
# label_name: string # label_name: string
# sort: string # sort: string
# non_archived: boolean # non_archived: boolean
# feature_availability_check: boolean (default: true)
# iids: integer[] # iids: integer[]
# #
class IssuableFinder class IssuableFinder
...@@ -25,11 +26,15 @@ class IssuableFinder ...@@ -25,11 +26,15 @@ class IssuableFinder
ARRAY_PARAMS = { label_name: [], iids: [] }.freeze ARRAY_PARAMS = { label_name: [], iids: [] }.freeze
VALID_PARAMS = (SCALAR_PARAMS + [ARRAY_PARAMS]).freeze VALID_PARAMS = (SCALAR_PARAMS + [ARRAY_PARAMS]).freeze
DEFAULT_PARAMS = {
feature_availability_check: true
}.freeze
attr_accessor :current_user, :params attr_accessor :current_user, :params
def initialize(current_user, params = {}) def initialize(current_user, params = {})
@current_user = current_user @current_user = current_user
@params = params @params = DEFAULT_PARAMS.merge(params).with_indifferent_access
end end
def execute def execute
...@@ -126,7 +131,20 @@ class IssuableFinder ...@@ -126,7 +131,20 @@ class IssuableFinder
ProjectsFinder.new(current_user: current_user, project_ids_relation: item_project_ids(items)).execute ProjectsFinder.new(current_user: current_user, project_ids_relation: item_project_ids(items)).execute
end end
@projects = projects.with_feature_available_for_user(klass, current_user).reorder(nil) # Querying through feature availability for an user is expensive
# (i.e. https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/1719#note_31406525),
# and there are cases which a project level access check should be enough.
# In any case, `feature_availability_check` param should be kept with `true`
# by default.
#
projects =
if params[:feature_availability_check]
projects.with_feature_available_for_user(klass, current_user)
else
projects
end
@projects = projects.reorder(nil)
end end
def search def search
......
...@@ -56,7 +56,8 @@ module Ci ...@@ -56,7 +56,8 @@ module Ci
trigger: 3, trigger: 3,
schedule: 4, schedule: 4,
api: 5, api: 5,
external: 6 external: 6,
pipeline: 7
} }
state_machine :status, initial: :created do state_machine :status, initial: :created do
...@@ -382,7 +383,8 @@ module Ci ...@@ -382,7 +383,8 @@ module Ci
def predefined_variables def predefined_variables
[ [
{ key: 'CI_PIPELINE_ID', value: id.to_s, public: true } { key: 'CI_PIPELINE_ID', value: id.to_s, public: true },
{ key: 'CI_PIPELINE_SOURCE', value: source.to_s, public: true }
] ]
end end
......
module Geo
module Model
extend ActiveSupport::Concern
included do
def self.table_name_prefix
"geo_"
end
end
end
end
...@@ -32,6 +32,7 @@ module EE ...@@ -32,6 +32,7 @@ module EE
module ClassMethods module ClassMethods
def defaults def defaults
super.merge( super.merge(
authorized_keys_enabled: true, # TODO default to false if the instance is configured to use AuthorizedKeysCommand
elasticsearch_url: ENV['ELASTIC_URL'] || 'http://localhost:9200', elasticsearch_url: ENV['ELASTIC_URL'] || 'http://localhost:9200',
elasticsearch_aws: false, elasticsearch_aws: false,
elasticsearch_aws_region: ENV['ELASTIC_REGION'] || 'us-east-1', elasticsearch_aws_region: ENV['ELASTIC_REGION'] || 'us-east-1',
......
module Geo
class EventLog < ActiveRecord::Base
include Geo::Model
belongs_to :repository_updated_event,
class_name: 'Geo::RepositoryUpdatedEvent',
foreign_key: :repository_updated_event_id
end
end
module Geo
class RepositoryUpdatedEvent < ActiveRecord::Base
include Geo::Model
REPOSITORY = 0
WIKI = 1
belongs_to :project
enum source: { repository: REPOSITORY, wiki: WIKI }
validates :project, presence: true
end
end
class IssueLink < ActiveRecord::Base
belongs_to :source, class_name: 'Issue'
belongs_to :target, class_name: 'Issue'
validates :source, presence: true
validates :target, presence: true
validates :source, uniqueness: { scope: :target_id, message: 'is already related' }
validate :check_self_relation
private
def check_self_relation
return unless source && target
if source == target
errors.add(:source, 'cannot be related to itself')
end
end
end
...@@ -6,11 +6,13 @@ class License < ActiveRecord::Base ...@@ -6,11 +6,13 @@ class License < ActiveRecord::Base
GEO_FEATURE = 'GitLab_Geo'.freeze GEO_FEATURE = 'GitLab_Geo'.freeze
AUDITOR_USER_FEATURE = 'GitLab_Auditor_User'.freeze AUDITOR_USER_FEATURE = 'GitLab_Auditor_User'.freeze
SERVICE_DESK_FEATURE = 'GitLab_ServiceDesk'.freeze SERVICE_DESK_FEATURE = 'GitLab_ServiceDesk'.freeze
RELATED_ISSUES_FEATURE = 'RelatedIssues'.freeze
FEATURE_CODES = { FEATURE_CODES = {
geo: GEO_FEATURE, geo: GEO_FEATURE,
auditor_user: AUDITOR_USER_FEATURE, auditor_user: AUDITOR_USER_FEATURE,
service_desk: SERVICE_DESK_FEATURE, service_desk: SERVICE_DESK_FEATURE,
related_issues: RELATED_ISSUES_FEATURE,
# Features that make sense to Namespace: # Features that make sense to Namespace:
deploy_board: DEPLOY_BOARD_FEATURE, deploy_board: DEPLOY_BOARD_FEATURE,
file_lock: FILE_LOCK_FEATURE file_lock: FILE_LOCK_FEATURE
...@@ -22,7 +24,7 @@ class License < ActiveRecord::Base ...@@ -22,7 +24,7 @@ class License < ActiveRecord::Base
EARLY_ADOPTER_PLAN = 'early_adopter'.freeze EARLY_ADOPTER_PLAN = 'early_adopter'.freeze
EES_FEATURES = [ EES_FEATURES = [
# .. { RELATED_ISSUES_FEATURE => 1 }
].freeze ].freeze
EEP_FEATURES = [ EEP_FEATURES = [
......
...@@ -3,7 +3,7 @@ class SystemNoteMetadata < ActiveRecord::Base ...@@ -3,7 +3,7 @@ class SystemNoteMetadata < ActiveRecord::Base
commit description merge confidential visible label assignee cross_reference commit description merge confidential visible label assignee cross_reference
title time_tracking branch milestone discussion task moved opened closed merged title time_tracking branch milestone discussion task moved opened closed merged
outdated outdated
approved unapproved approved unapproved relate unrelate
].freeze ].freeze
validates :note, presence: true validates :note, presence: true
......
...@@ -22,6 +22,11 @@ module EE ...@@ -22,6 +22,11 @@ module EE
cannot! :create_note cannot! :create_note
cannot! :read_project cannot! :read_project
end end
unless project.feature_available?(:related_issues)
cannot! :read_issue_link
cannot! :admin_issue_link
end
end end
end end
end end
...@@ -55,6 +55,9 @@ class ProjectPolicy < BasePolicy ...@@ -55,6 +55,9 @@ class ProjectPolicy < BasePolicy
can! :read_pipeline_schedule can! :read_pipeline_schedule
can! :read_build can! :read_build
end end
# EE-only
can! :read_issue_link
end end
def reporter_access! def reporter_access!
...@@ -79,6 +82,9 @@ class ProjectPolicy < BasePolicy ...@@ -79,6 +82,9 @@ class ProjectPolicy < BasePolicy
if project.feature_available?(:deploy_board) || Rails.env.development? if project.feature_available?(:deploy_board) || Rails.env.development?
can! :read_deploy_board can! :read_deploy_board
end end
# EE-only
can! :admin_issue_link
end end
# Permissions given when an user is team member of a project # Permissions given when an user is team member of a project
...@@ -321,5 +327,8 @@ class ProjectPolicy < BasePolicy ...@@ -321,5 +327,8 @@ class ProjectPolicy < BasePolicy
# NOTE: may be overridden by IssuePolicy # NOTE: may be overridden by IssuePolicy
can! :read_issue can! :read_issue
# EE-only
can! :read_issue_link
end end
end end
...@@ -2,7 +2,7 @@ module Ci ...@@ -2,7 +2,7 @@ module Ci
class CreatePipelineService < BaseService class CreatePipelineService < BaseService
attr_reader :pipeline attr_reader :pipeline
def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, mirror_update: false) def execute(source, ignore_skip_ci: false, save_on_errors: true, trigger_request: nil, schedule: nil, mirror_update: false, &block)
@pipeline = Ci::Pipeline.new( @pipeline = Ci::Pipeline.new(
source: source, source: source,
project: project, project: project,
...@@ -51,7 +51,7 @@ module Ci ...@@ -51,7 +51,7 @@ module Ci
return error('No stages / jobs for this pipeline.') return error('No stages / jobs for this pipeline.')
end end
_create_pipeline _create_pipeline(&block)
end end
private private
...@@ -60,7 +60,13 @@ module Ci ...@@ -60,7 +60,13 @@ module Ci
Ci::Pipeline.transaction do Ci::Pipeline.transaction do
update_merge_requests_head_pipeline if pipeline.save update_merge_requests_head_pipeline if pipeline.save
<<<<<<< HEAD
Ci::CreatePipelineStagesService Ci::CreatePipelineStagesService
=======
yield(pipeline) if block_given?
Ci::CreatePipelineBuildsService
>>>>>>> master
.new(project, current_user) .new(project, current_user)
.execute(pipeline) .execute(pipeline)
end end
......
module Ci
class PipelineTriggerService < BaseService
def execute
if trigger_from_token
create_pipeline_from_trigger(trigger_from_token)
elsif job_from_token
create_pipeline_from_job(job_from_token)
end
end
private
def create_pipeline_from_trigger(trigger)
# this check is to not leak the presence of the project if user cannot read it
return unless trigger.project == project
trigger_request = trigger.trigger_requests.create(variables: params[:variables])
pipeline = Ci::CreatePipelineService.new(project, trigger.owner, ref: params[:ref])
.execute(:trigger, ignore_skip_ci: true, trigger_request: trigger_request)
if pipeline.persisted?
success(pipeline: pipeline)
else
error(pipeline.errors.messages, 400)
end
end
def create_pipeline_from_job(job)
# this check is to not leak the presence of the project if user cannot read it
return unless can?(job.user, :read_project, project)
return error("400 Job has to be running", 400) unless job.running?
return error("400 Variables not supported", 400) if params[:variables].any?
pipeline = Ci::CreatePipelineService.new(project, job.user, ref: params[:ref]).
execute(:pipeline, ignore_skip_ci: true) do |pipeline|
job.sourced_pipelines.create!(
source_pipeline: job.pipeline,
source_project: job.project,
pipeline: pipeline,
project: project)
end
if pipeline.persisted?
success(pipeline: pipeline)
else
error(pipeline.errors.messages, 400)
end
end
def trigger_from_token
return @trigger if defined?(@trigger)
@trigger = Ci::Trigger.find_by_token(params[:token].to_s)
end
def job_from_token
return @job if defined?(@job)
@job = Ci::Build.find_by_token(params[:token].to_s)
end
end
end
module EE
module WikiPages
# BaseService EE mixin
#
# This module is intended to encapsulate EE-specific service logic
# and be included in the `WikiPages::BaseService` service
module BaseService
extend ActiveSupport::Concern
private
def execute_hooks(page, action = 'create')
super
process_wiki_repository_update
end
def process_wiki_repository_update
if ::Gitlab::Geo.primary?
# Create wiki repository updated event on Geo event log
::Geo::RepositoryUpdatedEventStore.new(project, source: Geo::RepositoryUpdatedEvent::WIKI).create
# Triggers repository update on secondary nodes
::Gitlab::Geo.notify_wiki_update(project)
end
end
end
end
end
module Geo
class RepositoryUpdatedEventStore
attr_reader :project, :source, :refs, :changes
def initialize(project, refs: [], changes: [], source: Geo::RepositoryUpdatedEvent::REPOSITORY)
@project = project
@refs = refs
@changes = changes
@source = source
end
def create
return unless Gitlab::Geo.primary?
Geo::EventLog.transaction do
event_log = Geo::EventLog.new
event_log.repository_updated_event = build_event
event_log.save!
end
rescue ActiveRecord::RecordInvalid
log("#{Geo::PushEvent.sources.key(source).humanize} updated event could not be created")
end
private
def build_event
Geo::RepositoryUpdatedEvent.new(
project: project,
source: source,
ref: ref,
branches_affected: branches_affected,
tags_affected: tags_affected,
new_branch: push_to_new_branch?,
remove_branch: push_remove_branch?
)
end
def ref
refs.first if refs.length == 1
end
def branches_affected
refs.count { |ref| Gitlab::Git.branch_ref?(ref) }
end
def tags_affected
refs.count { |ref| Gitlab::Git.tag_ref?(ref) }
end
def push_to_new_branch?
changes.any? { |change| Gitlab::Git.branch_ref?(change[:ref]) && Gitlab::Git.blank_ref?(change[:before]) }
end
def push_remove_branch?
changes.any? { |change| Gitlab::Git.branch_ref?(change[:ref]) && Gitlab::Git.blank_ref?(change[:after]) }
end
def log(message)
Rails.logger.info("#{self.class.name}: #{message} for project #{project.path_with_namespace} (#{project.id})")
end
end
end
module IssueLinks
class CreateService < BaseService
def initialize(issue, user, params)
@issue, @current_user, @params = issue, user, params.dup
end
def execute
if referenced_issues.blank?
return error('No Issue found for given reference', 401)
end
create_issue_links
success
end
private
def create_issue_links
referenced_issues.each do |referenced_issue|
create_notes(referenced_issue) if relate_issues(referenced_issue)
end
end
def relate_issues(referenced_issue)
IssueLink.create(source: @issue, target: referenced_issue)
end
def create_notes(referenced_issue)
SystemNoteService.relate_issue(@issue, referenced_issue, current_user)
SystemNoteService.relate_issue(referenced_issue, @issue, current_user)
end
def referenced_issues
@referenced_issues ||= begin
issue_references = params[:issue_references]
text = issue_references.join(' ')
extractor = Gitlab::ReferenceExtractor.new(@issue.project, @current_user)
extractor.analyze(text)
extractor.issues.select do |issue|
can?(current_user, :admin_issue_link, issue)
end
end
end
end
end
module IssueLinks
class DestroyService < BaseService
def initialize(issue_link, user)
@issue_link = issue_link
@current_user = user
@issue = issue_link.source
@referenced_issue = issue_link.target
end
def execute
remove_relation
create_notes
success(message: 'Relation was removed')
end
private
def remove_relation
@issue_link.destroy!
end
def create_notes
SystemNoteService.unrelate_issue(@issue, @referenced_issue, current_user)
SystemNoteService.unrelate_issue(@referenced_issue, @issue, current_user)
end
end
end
module IssueLinks
class ListService
include Gitlab::Routing
def initialize(issue, user)
@issue, @current_user, @project = issue, user, issue.project
end
def execute
issues.map do |referenced_issue|
{
id: referenced_issue.id,
iid: referenced_issue.iid,
title: referenced_issue.title,
state: referenced_issue.state,
project_path: referenced_issue.project.path,
namespace_full_path: referenced_issue.project.namespace.full_path,
path: namespace_project_issue_path(referenced_issue.project.namespace, referenced_issue.project, referenced_issue.iid),
destroy_relation_path: destroy_relation_path(referenced_issue)
}
end
end
private
def issues
related_issues = Issue
.select(['issues.*', 'issue_links.id AS issue_links_id'])
.joins("INNER JOIN issue_links ON
(issue_links.source_id = issues.id AND issue_links.target_id = #{@issue.id})
OR
(issue_links.target_id = issues.id AND issue_links.source_id = #{@issue.id})")
.preload(project: :namespace)
.reorder('issue_links_id')
Ability.issues_readable_by_user(related_issues, @current_user)
end
def destroy_relation_path(issue)
# Make sure the user can admin both the current issue AND the
# referenced issue projects in order to return the removal link.
if can_destroy_issue_link_on_current_project? && can_destroy_issue_link?(issue.project)
namespace_project_issue_link_path(@project.namespace,
@issue.project,
@issue.iid,
issue.issue_links_id)
end
end
def can_destroy_issue_link_on_current_project?
return @can_destroy_on_current_project if defined?(@can_destroy_on_current_project)
@can_destroy_on_current_project = can_destroy_issue_link?(@project)
end
def can_destroy_issue_link?(project)
Ability.allowed?(@current_user, :admin_issue_link, project)
end
end
end
...@@ -552,6 +552,38 @@ module SystemNoteService ...@@ -552,6 +552,38 @@ module SystemNoteService
create_note(NoteSummary.new(noteable, project, author, body, action: 'moved')) create_note(NoteSummary.new(noteable, project, author, body, action: 'moved'))
end end
#
# noteable - Noteable object
# noteable_ref - Referenced noteable object
# user - User performing reference
#
# Example Note text:
#
# "marked this issue as related to gitlab-ce#9001"
#
# Returns the created Note object
def relate_issue(noteable, noteable_ref, user)
body = "marked this issue as related to #{noteable_ref.to_reference(noteable.project)}"
create_note(NoteSummary.new(noteable, noteable.project, user, body, action: 'relate'))
end
#
# noteable - Noteable object
# noteable_ref - Referenced noteable object
# user - User performing reference
#
# Example Note text:
#
# "removed the relation with gitlab-ce#9001"
#
# Returns the created Note object
def unrelate_issue(noteable, noteable_ref, user)
body = "removed the relation with #{noteable_ref.to_reference(noteable.project)}"
create_note(NoteSummary.new(noteable, noteable.project, user, body, action: 'unrelate'))
end
# Called when the merge request is approved by user # Called when the merge request is approved by user
# #
# noteable - Noteable object # noteable - Noteable object
......
module WikiPages module WikiPages
class BaseService < ::BaseService class BaseService < ::BaseService
prepend EE::WikiPages::BaseService
def hook_data(page, action) def hook_data(page, action)
hook_data = { hook_data = {
object_kind: page.class.name.underscore, object_kind: page.class.name.underscore,
......
...@@ -3,9 +3,6 @@ module WikiPages ...@@ -3,9 +3,6 @@ module WikiPages
def execute(page) def execute(page)
if page&.delete if page&.delete
execute_hooks(page, 'delete') execute_hooks(page, 'delete')
# Triggers repository update on secondary nodes when Geo is enabled
Gitlab::Geo.notify_wiki_update(project) if Gitlab::Geo.primary?
end end
page page
......
...@@ -652,6 +652,22 @@ ...@@ -652,6 +652,22 @@
installations. Set to 0 to completely disable polling. installations. Set to 0 to completely disable polling.
= link_to icon('question-circle'), help_page_path('administration/polling') = link_to icon('question-circle'), help_page_path('administration/polling')
%fieldset
%legend Performance optimization
.form-group
.col-sm-offset-2.col-sm-10
.checkbox
= f.label :authorized_keys_enabled do
= f.check_box :authorized_keys_enabled
Write to "authorized_keys" file
.help-block
By default, we write to the "authorized_keys" file to support Git
over SSH without additional configuration. GitLab can be optimized
to authenticate SSH keys via the database file. Only uncheck this
if you have configured your OpenSSH server to use the
AuthorizedKeysCommand. Click on the help icon for more details.
= link_to icon('question-circle'), help_page_path('administration/operations/speed_up_ssh', anchor: 'the-solution')
- if Gitlab::Geo.license_allows? - if Gitlab::Geo.license_allows?
%fieldset %fieldset
%legend GitLab Geo %legend GitLab Geo
......
module EE
# PostReceive EE mixin
#
# This module is intended to encapsulate EE-specific model logic
# and be prepended in the `PostReceive` worker
module PostReceive
extend ActiveSupport::Concern
extend ::Gitlab::CurrentSettings
private
def after_project_changes_hooks(post_received, user, refs, changes)
super
# Generate repository updated event on Geo event log when Geo is enabled
::Geo::RepositoryUpdatedEventStore.new(post_received.project, refs: refs, changes: changes).create
end
def process_wiki_changes(post_received)
super
update_wiki_es_indexes(post_received)
if ::Gitlab::Geo.enabled?
# Create wiki repository updated event on Geo event log
::Geo::RepositoryUpdatedEventStore.new(post_received.project, source: Geo::RepositoryUpdatedEvent::WIKI).create
# Triggers repository update on secondary nodes
::Gitlab::Geo.notify_wiki_update(post_received.project)
end
end
def update_wiki_es_indexes(post_received)
return unless current_application_settings.elasticsearch_indexing?
post_received.project.wiki.index_blobs
end
end
end
class PostReceive class PostReceive
include Sidekiq::Worker include Sidekiq::Worker
include DedicatedSidekiqQueue include DedicatedSidekiqQueue
extend Gitlab::CurrentSettings prepend EE::PostReceive
def perform(project_identifier, identifier, changes) def perform(project_identifier, identifier, changes)
project, is_wiki = parse_project_identifier(project_identifier) project, is_wiki = parse_project_identifier(project_identifier)
...@@ -18,37 +18,18 @@ class PostReceive ...@@ -18,37 +18,18 @@ class PostReceive
post_received = Gitlab::GitPostReceive.new(project, identifier, changes) post_received = Gitlab::GitPostReceive.new(project, identifier, changes)
if is_wiki if is_wiki
update_wiki_es_indexes(post_received) process_wiki_changes(post_received)
# Triggers repository update on secondary nodes when Geo is enabled
Gitlab::Geo.notify_wiki_update(post_received.project) if Gitlab::Geo.enabled?
else else
process_project_changes(post_received) process_project_changes(post_received)
process_repository_update(post_received)
end end
end end
def process_repository_update(post_received) private
def process_project_changes(post_received)
changes = [] changes = []
refs = Set.new refs = Set.new
post_received.changes_refs do |oldrev, newrev, ref|
@user ||= post_received.identify(newrev)
unless @user
log("Triggered hook for non-existing user \"#{post_received.identifier}\"")
return false
end
changes << Gitlab::DataBuilder::Repository.single_change(oldrev, newrev, ref)
refs << ref
end
hook_data = Gitlab::DataBuilder::Repository.update(post_received.project, @user, changes, refs.to_a)
SystemHooksService.new.execute_hooks(hook_data, :repository_update_hooks)
end
def process_project_changes(post_received)
post_received.changes_refs do |oldrev, newrev, ref| post_received.changes_refs do |oldrev, newrev, ref|
@user ||= post_received.identify(newrev) @user ||= post_received.identify(newrev)
...@@ -62,16 +43,22 @@ class PostReceive ...@@ -62,16 +43,22 @@ class PostReceive
elsif Gitlab::Git.branch_ref?(ref) elsif Gitlab::Git.branch_ref?(ref)
GitPushService.new(post_received.project, @user, oldrev: oldrev, newrev: newrev, ref: ref).execute GitPushService.new(post_received.project, @user, oldrev: oldrev, newrev: newrev, ref: ref).execute
end end
changes << Gitlab::DataBuilder::Repository.single_change(oldrev, newrev, ref)
refs << ref
end end
end
def update_wiki_es_indexes(post_received) after_project_changes_hooks(post_received, @user, refs.to_a, changes)
return unless current_application_settings.elasticsearch_indexing? end
post_received.project.wiki.index_blobs def after_project_changes_hooks(post_received, user, refs, changes)
hook_data = Gitlab::DataBuilder::Repository.update(post_received.project, user, changes, refs)
SystemHooksService.new.execute_hooks(hook_data, :repository_update_hooks)
end end
private def process_wiki_changes(post_received)
# Nothing defined here yet.
end
# To maintain backwards compatibility, we accept both gl_repository or # To maintain backwards compatibility, we accept both gl_repository or
# repository paths as project identifiers. Our plan is to migrate to # repository paths as project identifiers. Our plan is to migrate to
......
---
title: Allows manually adding bi-directional relationships between issues in the issue page (EES feature)
merge_request:
author:
---
title: Add push events to Geo event log
merge_request:
author:
---
title: Allow to Trigger Pipeline using CI Job Token
merge_request:
author:
---
title: Lookup users by email in LDAP if lookup by DN fails during sync
merge_request: 2003
author:
---
title: "[Elasticsearch] Improve code search for camel case"
merge_request:
author:
...@@ -10,6 +10,6 @@ ...@@ -10,6 +10,6 @@
# end # end
# #
ActiveSupport::Inflector.inflections do |inflect| ActiveSupport::Inflector.inflections do |inflect|
inflect.uncountable %w(award_emoji project_statistics project_registry file_registry system_note_metadata) inflect.uncountable %w(award_emoji project_statistics system_note_metadata event_log project_registry file_registry)
inflect.acronym 'EE' inflect.acronym 'EE'
end end
require 'active_record/connection_adapters/abstract_mysql_adapter'
module ActiveRecord
module ConnectionAdapters
class AbstractMysqlAdapter
NATIVE_DATABASE_TYPES.merge!(
bigserial: { name: 'bigint(20) auto_increment PRIMARY KEY' }
)
end
end
end
if Gitlab::Geo.secondary_role_enabled? if File.exist?(Rails.root.join('config/database_geo.yml')) &&
Gitlab::Geo.secondary_role_enabled?
Rails.application.configure do Rails.application.configure do
config.geo_database = config_for(:database_geo) config.geo_database = config_for(:database_geo)
end end
......
...@@ -3,6 +3,11 @@ ...@@ -3,6 +3,11 @@
en: en:
hello: "Hello world" hello: "Hello world"
activerecord:
attributes:
issue_link:
source: Source issue
target: Target issue
errors: errors:
messages: messages:
label_already_exists_at_group_level: "already exists at group level for %{group}. Please choose another one." label_already_exists_at_group_level: "already exists at group level for %{group}. Please choose another one."
......
...@@ -311,6 +311,8 @@ constraints(ProjectUrlConstrainer.new) do ...@@ -311,6 +311,8 @@ constraints(ProjectUrlConstrainer.new) do
post :bulk_update post :bulk_update
post :export_csv post :export_csv
end end
resources :issue_links, only: [:index, :create, :destroy], as: 'links', path: 'links'
end end
resources :project_members, except: [:show, :new, :edit], constraints: { id: /[a-zA-Z.\/0-9_\-#%+]+/ }, concerns: :access_requestable do resources :project_members, except: [:show, :new, :edit], constraints: { id: /[a-zA-Z.\/0-9_\-#%+]+/ }, concerns: :access_requestable do
......
class CreateIssueLinksTable < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
create_table :issue_links do |t|
t.integer :source_id, null: false, index: true
t.integer :target_id, null: false, index: true
t.timestamps null: true
end
add_index :issue_links, [:source_id, :target_id], unique: true
add_concurrent_foreign_key :issue_links, :issues, column: :source_id
add_concurrent_foreign_key :issue_links, :issues, column: :target_id
end
def down
drop_table :issue_links
end
end
class CreateGeoRepositoryUpdatedEvents < ActiveRecord::Migration
DOWNTIME = false
def change
create_table :geo_repository_updated_events, id: :bigserial do |t|
t.datetime :created_at, null: false
t.integer :branches_affected, null: false
t.integer :tags_affected, null: false
t.references :project, index: true, foreign_key: { on_delete: :cascade }, null: false
t.integer :source, limit: 2, index: true, null: false
t.boolean :new_branch, default: false, null: false
t.boolean :remove_branch, default: false, null: false
t.text :ref
end
end
end
class CreateGeoEventLog < ActiveRecord::Migration
DOWNTIME = false
def change
create_table :geo_event_log, id: :bigserial do |t|
t.datetime :created_at, null: false
t.integer :repository_updated_event_id, limit: 8, index: true
t.foreign_key :geo_repository_updated_events,
column: :repository_updated_event_id, on_delete: :cascade
end
end
end
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class AddAuthorizedKeysEnabledToApplicationSettings < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
def change
# allow_null: true because we want to set the default based on if the
# instance is configured to use AuthorizedKeysCommand
add_column :application_settings, :authorized_keys_enabled, :boolean, allow_null: true
end
end
...@@ -141,6 +141,7 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -141,6 +141,7 @@ ActiveRecord::Schema.define(version: 20170602003304) do
t.integer "mirror_max_delay", default: 5, null: false t.integer "mirror_max_delay", default: 5, null: false
t.integer "mirror_max_capacity", default: 100, null: false t.integer "mirror_max_capacity", default: 100, null: false
t.integer "mirror_capacity_threshold", default: 50, null: false t.integer "mirror_capacity_threshold", default: 50, null: false
t.boolean "authorized_keys_enabled"
end end
create_table "approvals", force: :cascade do |t| create_table "approvals", force: :cascade do |t|
...@@ -574,6 +575,13 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -574,6 +575,13 @@ ActiveRecord::Schema.define(version: 20170602003304) do
add_index "forked_project_links", ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true, using: :btree add_index "forked_project_links", ["forked_to_project_id"], name: "index_forked_project_links_on_forked_to_project_id", unique: true, using: :btree
create_table "geo_event_log", id: :bigserial, force: :cascade do |t|
t.datetime "created_at", null: false
t.integer "repository_updated_event_id", limit: 8
end
add_index "geo_event_log", ["repository_updated_event_id"], name: "index_geo_event_log_on_repository_updated_event_id", using: :btree
create_table "geo_nodes", force: :cascade do |t| create_table "geo_nodes", force: :cascade do |t|
t.string "schema" t.string "schema"
t.string "host" t.string "host"
...@@ -594,6 +602,20 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -594,6 +602,20 @@ ActiveRecord::Schema.define(version: 20170602003304) do
add_index "geo_nodes", ["host"], name: "index_geo_nodes_on_host", using: :btree add_index "geo_nodes", ["host"], name: "index_geo_nodes_on_host", using: :btree
add_index "geo_nodes", ["primary"], name: "index_geo_nodes_on_primary", using: :btree add_index "geo_nodes", ["primary"], name: "index_geo_nodes_on_primary", using: :btree
create_table "geo_repository_updated_events", id: :bigserial, force: :cascade do |t|
t.datetime "created_at", null: false
t.integer "branches_affected", null: false
t.integer "tags_affected", null: false
t.integer "project_id", null: false
t.integer "source", limit: 2, null: false
t.boolean "new_branch", default: false, null: false
t.boolean "remove_branch", default: false, null: false
t.text "ref"
end
add_index "geo_repository_updated_events", ["project_id"], name: "index_geo_repository_updated_events_on_project_id", using: :btree
add_index "geo_repository_updated_events", ["source"], name: "index_geo_repository_updated_events_on_source", using: :btree
create_table "historical_data", force: :cascade do |t| create_table "historical_data", force: :cascade do |t|
t.date "date", null: false t.date "date", null: false
t.integer "active_user_count" t.integer "active_user_count"
...@@ -631,6 +653,17 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -631,6 +653,17 @@ ActiveRecord::Schema.define(version: 20170602003304) do
add_index "issue_assignees", ["issue_id", "user_id"], name: "index_issue_assignees_on_issue_id_and_user_id", unique: true, using: :btree add_index "issue_assignees", ["issue_id", "user_id"], name: "index_issue_assignees_on_issue_id_and_user_id", unique: true, using: :btree
add_index "issue_assignees", ["user_id"], name: "index_issue_assignees_on_user_id", using: :btree add_index "issue_assignees", ["user_id"], name: "index_issue_assignees_on_user_id", using: :btree
create_table "issue_links", force: :cascade do |t|
t.integer "source_id", null: false
t.integer "target_id", null: false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "issue_links", ["source_id", "target_id"], name: "index_issue_links_on_source_id_and_target_id", unique: true, using: :btree
add_index "issue_links", ["source_id"], name: "index_issue_links_on_source_id", using: :btree
add_index "issue_links", ["target_id"], name: "index_issue_links_on_target_id", using: :btree
create_table "issue_metrics", force: :cascade do |t| create_table "issue_metrics", force: :cascade do |t|
t.integer "issue_id", null: false t.integer "issue_id", null: false
t.datetime "first_mentioned_in_commit_at" t.datetime "first_mentioned_in_commit_at"
...@@ -1751,8 +1784,12 @@ ActiveRecord::Schema.define(version: 20170602003304) do ...@@ -1751,8 +1784,12 @@ ActiveRecord::Schema.define(version: 20170602003304) do
add_foreign_key "ci_triggers", "users", column: "owner_id", name: "fk_e8e10d1964", on_delete: :cascade add_foreign_key "ci_triggers", "users", column: "owner_id", name: "fk_e8e10d1964", on_delete: :cascade
add_foreign_key "ci_variables", "projects", name: "fk_ada5eb64b3", on_delete: :cascade add_foreign_key "ci_variables", "projects", name: "fk_ada5eb64b3", on_delete: :cascade
add_foreign_key "container_repositories", "projects" add_foreign_key "container_repositories", "projects"
add_foreign_key "geo_event_log", "geo_repository_updated_events", column: "repository_updated_event_id", on_delete: :cascade
add_foreign_key "geo_repository_updated_events", "projects", on_delete: :cascade
add_foreign_key "issue_assignees", "issues", name: "fk_b7d881734a", on_delete: :cascade add_foreign_key "issue_assignees", "issues", name: "fk_b7d881734a", on_delete: :cascade
add_foreign_key "issue_assignees", "users", name: "fk_5e0c8d9154", on_delete: :cascade add_foreign_key "issue_assignees", "users", name: "fk_5e0c8d9154", on_delete: :cascade
add_foreign_key "issue_links", "issues", column: "source_id", name: "fk_c900194ff2", on_delete: :cascade
add_foreign_key "issue_links", "issues", column: "target_id", name: "fk_e71bb44f1f", on_delete: :cascade
add_foreign_key "issue_metrics", "issues", on_delete: :cascade add_foreign_key "issue_metrics", "issues", on_delete: :cascade
add_foreign_key "label_priorities", "labels", on_delete: :cascade add_foreign_key "label_priorities", "labels", on_delete: :cascade
add_foreign_key "label_priorities", "projects", on_delete: :cascade add_foreign_key "label_priorities", "projects", on_delete: :cascade
......
...@@ -248,23 +248,10 @@ main: # 'main' is the GitLab 'provider ID' of this LDAP server ...@@ -248,23 +248,10 @@ main: # 'main' is the GitLab 'provider ID' of this LDAP server
### User DN has changed ### User DN has changed
When an LDAP user is created in GitLab, their LDAP DN is stored for later reference. When an LDAP user is created in GitLab, their LDAP DN is stored for later reference.
If a user's DN changes, it can cause problems for LDAP sync. Administrators can
manually update a user's stored DN in this case. If GitLab cannot find a user by their DN, it will attempt to fallback
to finding the user by their email. If the lookup is successful, GitLab will
> **Note:** If GitLab cannot find a user by their DN, it will attempt to fallback update the stored DN to the new value.
to finding the user by their email. If the lookup is successful, GitLab will
update the stored DN to the new value.
1. Sign in to GitLab as an administrator user.
1. Navigate to **Admin area -> Users**.
1. Search for the user
1. Open the user, by clicking on their name. Do not click 'Edit'.
1. Navigate to the **Identities** tab.
1. Click 'Edit' next to the LDAP identity.
1. Change the 'Identifier' to match the user's new LDAP DN.
1. Save the identity.
Now the user should sync correctly.
### User is not being added to a group ### User is not being added to a group
......
...@@ -6,3 +6,4 @@ ...@@ -6,3 +6,4 @@
- [Cleaning up Redis sessions](operations/cleaning_up_redis_sessions.md) - [Cleaning up Redis sessions](operations/cleaning_up_redis_sessions.md)
- [Understanding Unicorn and unicorn-worker-killer](operations/unicorn.md) - [Understanding Unicorn and unicorn-worker-killer](operations/unicorn.md)
- [Moving repositories to a new location](operations/moving_repositories.md) - [Moving repositories to a new location](operations/moving_repositories.md)
- [Speed up SSH operations](operations/speed_up_ssh.md)
# Speed up SSH operations
## The problem
SSH operations become slow as the number of users grows.
## The reason
OpenSSH searches for a key to authorize a user via a linear search. In the worst case, such as when the user is not authorized to access GitLab, OpenSSH will scan the entire file to search for a key. This can take significant time and disk I/O, which will delay users attempting to push or pull to a repository. Making matters worse, if users add or remove keys frequently, the operating system may not be able to cache the authorized_keys file, which causes the disk to be accessed repeatedly.
## The solution
GitLab Shell provides a way to authorize SSH users via a fast, indexed lookup to the GitLab database. GitLab Shell uses the fingerprint of the SSH key to check whether the user is authorized to access GitLab.
> **Warning:** OpenSSH version 6.9+ is required because `AuthorizedKeysCommand` must be able to accept a fingerprint. These instructions will break installations using older versions of OpenSSH, such as those included with CentOS as of May 2017.
Create this file at `/opt/gitlab-shell/authorized_keys`:
```
#!/bin/bash
if [[ "$1" == "git" ]]; then
/opt/gitlab/embedded/service/gitlab-shell/bin/authorized_keys $2
fi
```
Set appropriate ownership and permissions:
```
sudo chown root:git /opt/gitlab-shell/authorized_keys
sudo chmod 0650 /opt/gitlab-shell/authorized_keys
```
Add the following to `/etc/ssh/sshd_config`:
```
AuthorizedKeysCommand /opt/gitlab-shell/authorized_keys %u %k
AuthorizedKeysCommandUser git
```
Reload the sshd service:
```
sudo service sshd reload
```
Confirm that SSH is working by removing your user's SSH key in the UI, adding a new one, and attempting to pull a repo.
> **Warning:** Do not disable writes until SSH is confirmed to be working perfectly because the file will quickly become out-of-date.
In the case of lookup failures (which are not uncommon), the `authorized_keys` file will still be scanned. So git SSH performance will still be slow for many users as long as a large file exists.
You can disable any more writes to the `authorized_keys` file by unchecking `Write to "authorized_keys" file` in the Application Settings of your GitLab installation.
![Write to authorized keys setting](img/write_to_authorized_keys_setting.png)
Again, confirm that SSH is working by removing your user's SSH key in the UI, adding a new one, and attempting to pull a repo.
Then you can backup and delete your `authorized_keys` file for best performance.
## How to go back to using the `authorized_keys` file
This is a brief overview. Please refer to the above instructions for more context.
1. [Rebuild the `authorized_keys` file](../raketasks/maintenance.md#rebuild-authorized_keys-file)
1. Enable writes to the `authorized_keys` file in Application Settings
1. Remove the `AuthorizedKeysCommand` lines from `/etc/ssh/sshd_config`
1. Reload sshd: `sudo service sshd reload`
1. Remove the `/opt/gitlab-shell/authorized_keys` file
...@@ -4,7 +4,8 @@ ...@@ -4,7 +4,8 @@
- [Introduced][ci-229] in GitLab CE 7.14. - [Introduced][ci-229] in GitLab CE 7.14.
- GitLab 8.12 has a completely redesigned job permissions system. Read all - GitLab 8.12 has a completely redesigned job permissions system. Read all
about the [new model and its implications](../../user/project/new_ci_build_permissions_model.md#job-triggers). about the [new model and its implications](../../user/project/new_ci_build_permissions_model.md#job-triggers).
- GitLab 9.0 introduced a trigger ownership to solve permission problems. - GitLab 9.0 introduced a trigger ownership to solve permission problems,
- GitLab 9.3 introduced an ability to use CI Job Token to trigger dependent pipelines,
Triggers can be used to force a rebuild of a specific `ref` (branch or tag) Triggers can be used to force a rebuild of a specific `ref` (branch or tag)
with an API call. with an API call.
...@@ -161,6 +162,25 @@ probably not the wisest idea, so you might want to use a ...@@ -161,6 +162,25 @@ probably not the wisest idea, so you might want to use a
[secure variable](../variables/README.md#user-defined-variables-secure-variables) [secure variable](../variables/README.md#user-defined-variables-secure-variables)
for that purpose._ for that purpose._
---
Since GitLab 9.3 you can trigger a new pipeline using a CI_JOB_TOKEN.
This method currently doesn't support Variables.
The support for them will be included in 9.4 of GitLab.
This way of triggering creates a dependent pipeline relation visible on the Pipeline Graph.
```yaml
build_docs:
stage: deploy
script:
- "curl --request POST --form "token=$CI_JOB_TOKEN" --form ref=master https://gitlab.example.com/api/v4/projects/9/trigger/pipeline"
only:
- tags
```
Pipelines triggered that way do expose a special variable: `CI_PIPELINE_SOURCE=pipeline`.
### Making use of trigger variables ### Making use of trigger variables
Using trigger variables can be proven useful for a variety of reasons. Using trigger variables can be proven useful for a variety of reasons.
......
...@@ -54,6 +54,7 @@ future GitLab releases.** ...@@ -54,6 +54,7 @@ future GitLab releases.**
| **CI_RUNNER_ID** | 8.10 | 0.5 | The unique id of runner being used | | **CI_RUNNER_ID** | 8.10 | 0.5 | The unique id of runner being used |
| **CI_RUNNER_TAGS** | 8.10 | 0.5 | The defined runner tags | | **CI_RUNNER_TAGS** | 8.10 | 0.5 | The defined runner tags |
| **CI_PIPELINE_ID** | 8.10 | 0.5 | The unique id of the current pipeline that GitLab CI uses internally | | **CI_PIPELINE_ID** | 8.10 | 0.5 | The unique id of the current pipeline that GitLab CI uses internally |
| **CI_PIPELINE_SOURCE** | 9.3 | all | The variable indicates how the pipeline was triggered, possible options are: push, web, trigger, schedule, api, pipeline |
| **CI_PIPELINE_TRIGGERED** | all | all | The flag to indicate that job was [triggered] | | **CI_PIPELINE_TRIGGERED** | all | all | The flag to indicate that job was [triggered] |
| **CI_PROJECT_DIR** | all | all | The full path where the repository is cloned and where the job is run | | **CI_PROJECT_DIR** | all | all | The full path where the repository is cloned and where the job is run |
| **CI_PROJECT_ID** | all | all | The unique id of the current project that GitLab CI uses internally | | **CI_PROJECT_ID** | all | all | The unique id of the current project that GitLab CI uses internally |
......
...@@ -284,6 +284,22 @@ sudo -u git -H bundle exec rake gitlab:check RAILS_ENV=production ...@@ -284,6 +284,22 @@ sudo -u git -H bundle exec rake gitlab:check RAILS_ENV=production
If all items are green, then congratulations, the upgrade is complete! If all items are green, then congratulations, the upgrade is complete!
### 13. Elasticsearch index update (if you currently use Elasticsearch)
In 9.3 release we changed the index mapping to improve partial word matching. Please re-create your index by using one of two ways listed below:
1. Re-create the index. The following command is acceptable for not very big GitLab instances (storage size no more than few gigabytes).
```
# Omnibus installations
sudo gitlab-rake gitlab:elastic:index
# Installations from source
bundle exec rake gitlab:elastic:index
```
1. For very big GitLab instances we recommend following [Add GitLab's data to the Elasticsearch index](../integration/elasticsearch.md#add-gitlabs-data-to-the-elasticsearch-index).
## Things went south? Revert to previous version (9.2) ## Things went south? Revert to previous version (9.2)
### 1. Revert the code to the previous version ### 1. Revert the code to the previous version
......
...@@ -11,28 +11,26 @@ module API ...@@ -11,28 +11,26 @@ module API
end end
params do params do
requires :ref, type: String, desc: 'The commit sha or name of a branch or tag' requires :ref, type: String, desc: 'The commit sha or name of a branch or tag'
requires :token, type: String, desc: 'The unique token of trigger' requires :token, type: String, desc: 'The unique token of trigger or job token'
optional :variables, type: Hash, desc: 'The list of variables to be injected into build' optional :variables, type: Hash, desc: 'The list of variables to be injected into build'
end end
post ":id/(ref/:ref/)trigger/pipeline", requirements: { ref: /.+/ } do post ":id/(ref/:ref/)trigger/pipeline", requirements: { ref: /.+/ } do
project = find_project(params[:id])
trigger = Ci::Trigger.find_by_token(params[:token].to_s)
not_found! unless project && trigger
unauthorized! unless trigger.project == project
# validate variables # validate variables
variables = params[:variables].to_h params[:variables] = params[:variables].to_h
unless variables.all? { |key, value| key.is_a?(String) && value.is_a?(String) } unless params[:variables].all? { |key, value| key.is_a?(String) && value.is_a?(String) }
render_api_error!('variables needs to be a map of key-valued strings', 400) render_api_error!('variables needs to be a map of key-valued strings', 400)
end end
# create request and trigger builds project = find_project(params[:id])
trigger_request = Ci::CreateTriggerRequestService.new.execute(project, trigger, params[:ref].to_s, variables) not_found! unless project
if trigger_request
present trigger_request.pipeline, with: Entities::Pipeline result = Ci::PipelineTriggerService.new(project, nil, params).execute
not_found! unless result
if result[:http_status]
render_api_error!(result[:message], result[:http_status])
else else
errors = 'No pipeline created' present result[:pipeline], with: Entities::Pipeline
render_api_error!(errors, 400)
end end
end end
......
...@@ -2,6 +2,19 @@ module EE ...@@ -2,6 +2,19 @@ module EE
module Gitlab module Gitlab
module LDAP module LDAP
module Person module Person
extend ActiveSupport::Concern
class_methods do
def find_by_email(email, adapter)
email_attributes = Array(adapter.config.attributes['email'])
email_attributes.each do |possible_attribute|
found_user = adapter.user(possible_attribute, email)
return found_user if found_user
end
end
end
def ssh_keys def ssh_keys
if config.sync_ssh_keys? && entry.respond_to?(config.sync_ssh_keys) if config.sync_ssh_keys? && entry.respond_to?(config.sync_ssh_keys)
entry[config.sync_ssh_keys.to_sym] entry[config.sync_ssh_keys.to_sym]
......
...@@ -33,7 +33,7 @@ module Elasticsearch ...@@ -33,7 +33,7 @@ module Elasticsearch
code_analyzer: { code_analyzer: {
type: 'custom', type: 'custom',
tokenizer: 'standard', tokenizer: 'standard',
filter: %w(code lowercase asciifolding), filter: %w(code edgeNGram_filter lowercase asciifolding),
char_filter: ["code_mapping"] char_filter: ["code_mapping"]
}, },
code_search_analyzer: { code_search_analyzer: {
...@@ -61,8 +61,14 @@ module Elasticsearch ...@@ -61,8 +61,14 @@ module Elasticsearch
preserve_original: 1, preserve_original: 1,
patterns: [ patterns: [
"(\\p{Ll}+|\\p{Lu}\\p{Ll}+|\\p{Lu}+)", "(\\p{Ll}+|\\p{Lu}\\p{Ll}+|\\p{Lu}+)",
"(\\d+)" "(\\d+)",
"(?=([\\p{Lu}]+[\\p{L}]+))"
] ]
},
edgeNGram_filter: {
type: 'edgeNGram',
min_gram: 2,
max_gram: 40
} }
}, },
char_filter: { char_filter: {
......
module Gitlab module Gitlab
class GitAccessWiki < GitAccess class GitAccessWiki < GitAccess
ERROR_MESSAGES = { ERROR_MESSAGES = {
<<<<<<< HEAD
geo: "You can't push code to a secondary GitLab Geo node.", geo: "You can't push code to a secondary GitLab Geo node.",
=======
>>>>>>> master
write_to_wiki: "You are not allowed to write to this project's wiki." write_to_wiki: "You are not allowed to write to this project's wiki."
}.freeze }.freeze
...@@ -14,10 +17,13 @@ module Gitlab ...@@ -14,10 +17,13 @@ module Gitlab
end end
def check_single_change_access(change) def check_single_change_access(change)
<<<<<<< HEAD
if Gitlab::Geo.enabled? && Gitlab::Geo.secondary? if Gitlab::Geo.enabled? && Gitlab::Geo.secondary?
raise UnauthorizedError, ERROR_MESSAGES[:geo] raise UnauthorizedError, ERROR_MESSAGES[:geo]
end end
=======
>>>>>>> master
unless user_access.can_do_action?(:create_wiki) unless user_access.can_do_action?(:create_wiki)
raise UnauthorizedError, ERROR_MESSAGES[:write_to_wiki] raise UnauthorizedError, ERROR_MESSAGES[:write_to_wiki]
end end
......
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
module Gitlab module Gitlab
module LDAP module LDAP
class Access class Access
attr_reader :adapter, :provider, :user, :ldap_user attr_reader :adapter, :provider, :user, :ldap_user, :ldap_identity
def self.open(user, &block) def self.open(user, &block)
Gitlab::LDAP::Adapter.open(user.ldap_identity.provider) do |adapter| Gitlab::LDAP::Adapter.open(user.ldap_identity.provider) do |adapter|
...@@ -32,7 +32,8 @@ module Gitlab ...@@ -32,7 +32,8 @@ module Gitlab
def initialize(user, adapter = nil) def initialize(user, adapter = nil)
@adapter = adapter @adapter = adapter
@user = user @user = user
@provider = user.ldap_identity.provider @provider = adapter&.provider || user.ldap_identity.provider
@ldap_identity = user.identities.find_by(provider: @provider)
end end
def allowed? def allowed?
...@@ -43,7 +44,7 @@ module Gitlab ...@@ -43,7 +44,7 @@ module Gitlab
end end
# Block user in GitLab if he/she was blocked in AD # Block user in GitLab if he/she was blocked in AD
if Gitlab::LDAP::Person.disabled_via_active_directory?(user.ldap_identity.extern_uid, adapter) if Gitlab::LDAP::Person.disabled_via_active_directory?(ldap_identity.extern_uid, adapter)
block_user(user, 'is disabled in Active Directory') block_user(user, 'is disabled in Active Directory')
false false
else else
...@@ -65,15 +66,24 @@ module Gitlab ...@@ -65,15 +66,24 @@ module Gitlab
Gitlab::LDAP::Config.new(provider) Gitlab::LDAP::Config.new(provider)
end end
def find_ldap_user
found_user = Gitlab::LDAP::Person.find_by_dn(ldap_identity.extern_uid, adapter)
return found_user if found_user
if user.ldap_email?
Gitlab::LDAP::Person.find_by_email(user.email, adapter)
end
end
def ldap_user def ldap_user
@ldap_user ||= Gitlab::LDAP::Person.find_by_dn(user.ldap_identity.extern_uid, adapter) @ldap_user ||= find_ldap_user
end end
def block_user(user, reason) def block_user(user, reason)
user.ldap_block user.ldap_block
Gitlab::AppLogger.info( Gitlab::AppLogger.info(
"LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \ "LDAP account \"#{ldap_identity.extern_uid}\" #{reason}, " \
"blocking Gitlab user \"#{user.name}\" (#{user.email})" "blocking Gitlab user \"#{user.name}\" (#{user.email})"
) )
end end
...@@ -82,7 +92,7 @@ module Gitlab ...@@ -82,7 +92,7 @@ module Gitlab
user.activate user.activate
Gitlab::AppLogger.info( Gitlab::AppLogger.info(
"LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \ "LDAP account \"#{ldap_identity.extern_uid}\" #{reason}, " \
"unblocking Gitlab user \"#{user.name}\" (#{user.email})" "unblocking Gitlab user \"#{user.name}\" (#{user.email})"
) )
end end
...@@ -90,6 +100,7 @@ module Gitlab ...@@ -90,6 +100,7 @@ module Gitlab
def update_user def update_user
update_email update_email
update_memberships update_memberships
update_identity
update_ssh_keys if sync_ssh_keys? update_ssh_keys if sync_ssh_keys?
update_kerberos_identity if import_kerberos_identities? update_kerberos_identity if import_kerberos_identities?
end end
...@@ -156,6 +167,15 @@ module Gitlab ...@@ -156,6 +167,15 @@ module Gitlab
user.update(email: ldap_email) user.update(email: ldap_email)
end end
def update_identity
return if ldap_user.dn.empty? || ldap_user.dn == ldap_identity.extern_uid
unless ldap_identity.update(extern_uid: ldap_user.dn)
Rails.logger.error "Could not update DN for #{user.name} (#{user.id})\n"\
"error messages: #{user.ldap_identity.errors.messages}"
end
end
delegate :sync_ssh_keys?, to: :ldap_config delegate :sync_ssh_keys?, to: :ldap_config
def import_kerberos_identities? def import_kerberos_identities?
......
...@@ -197,6 +197,8 @@ module Gitlab ...@@ -197,6 +197,8 @@ module Gitlab
# add_key("key-42", "sha-rsa ...") # add_key("key-42", "sha-rsa ...")
# #
def add_key(key_id, key_content) def add_key(key_id, key_content)
return unless self.authorized_keys_enabled?
Gitlab::Utils.system_silent([gitlab_shell_keys_path, Gitlab::Utils.system_silent([gitlab_shell_keys_path,
'add-key', key_id, self.class.strip_key(key_content)]) 'add-key', key_id, self.class.strip_key(key_content)])
end end
...@@ -206,6 +208,8 @@ module Gitlab ...@@ -206,6 +208,8 @@ module Gitlab
# Ex. # Ex.
# batch_add_keys { |adder| adder.add_key("key-42", "sha-rsa ...") } # batch_add_keys { |adder| adder.add_key("key-42", "sha-rsa ...") }
def batch_add_keys(&block) def batch_add_keys(&block)
return unless self.authorized_keys_enabled?
IO.popen(%W(#{gitlab_shell_path}/bin/gitlab-keys batch-add-keys), 'w') do |io| IO.popen(%W(#{gitlab_shell_path}/bin/gitlab-keys batch-add-keys), 'w') do |io|
yield(KeyAdder.new(io)) yield(KeyAdder.new(io))
end end
...@@ -217,6 +221,8 @@ module Gitlab ...@@ -217,6 +221,8 @@ module Gitlab
# remove_key("key-342", "sha-rsa ...") # remove_key("key-342", "sha-rsa ...")
# #
def remove_key(key_id, key_content) def remove_key(key_id, key_content)
return unless self.authorized_keys_enabled?
Gitlab::Utils.system_silent([gitlab_shell_keys_path, Gitlab::Utils.system_silent([gitlab_shell_keys_path,
'rm-key', key_id, key_content]) 'rm-key', key_id, key_content])
end end
...@@ -227,6 +233,8 @@ module Gitlab ...@@ -227,6 +233,8 @@ module Gitlab
# remove_all_keys # remove_all_keys
# #
def remove_all_keys def remove_all_keys
return unless self.authorized_keys_enabled?
Gitlab::Utils.system_silent([gitlab_shell_keys_path, 'clear']) Gitlab::Utils.system_silent([gitlab_shell_keys_path, 'clear'])
end end
...@@ -356,5 +364,9 @@ module Gitlab ...@@ -356,5 +364,9 @@ module Gitlab
def gitlab_shell_keys_path def gitlab_shell_keys_path
File.join(gitlab_shell_path, 'bin', 'gitlab-keys') File.join(gitlab_shell_path, 'bin', 'gitlab-keys')
end end
def authorized_keys_enabled?
current_application_settings.authorized_keys_enabled
end
end end
end end
...@@ -284,13 +284,18 @@ describe Projects::MergeRequestsController do ...@@ -284,13 +284,18 @@ describe Projects::MergeRequestsController do
context 'number of queries' do context 'number of queries' do
it 'verifies number of queries' do it 'verifies number of queries' do
RequestStore.begin!
# pre-create objects # pre-create objects
merge_request merge_request
recorded = ActiveRecord::QueryRecorder.new { go(format: :json) } recorded = ActiveRecord::QueryRecorder.new { go(format: :json) }
expect(recorded.count).to be_within(10).of(100) expect(recorded.count).to be_within(1).of(31)
expect(recorded.cached_count).to eq(0) expect(recorded.cached_count).to eq(0)
RequestStore.end!
RequestStore.clear!
end end
end end
end end
......
FactoryGirl.define do
factory :issue_link do
source factory: :issue
target factory: :issue
end
end
require 'rails_helper'
describe 'New/edit issue (EE)', :feature, :js do
include GitlabRoutingHelper
include ActionView::Helpers::JavaScriptHelper
include FormHelper
let!(:project) { create(:project) }
let!(:user) { create(:user)}
let!(:user2) { create(:user)}
let!(:milestone) { create(:milestone, project: project) }
let!(:label) { create(:label, project: project) }
let!(:label2) { create(:label, project: project) }
let!(:issue) { create(:issue, project: project, assignees: [user], milestone: milestone) }
before do
project.team << [user, :master]
project.team << [user2, :master]
login_as(user)
end
context 'new issue' do
before do
visit new_namespace_project_issue_path(project.namespace, project)
end
describe 'shorten users API pagination limit (CE)' do
before do
# Using `allow_any_instance_of`/`and_wrap_original`, `original` would
# somehow refer to the very block we defined to _wrap_ that method, instead of
# the original method, resulting in infinite recurison when called.
# This is likely a bug with helper modules included into dynamically generated view classes.
# To work around this, we have to hold on to and call to the original implementation manually.
original_issue_dropdown_options = FormHelper.instance_method(:issue_dropdown_options)
allow_any_instance_of(FormHelper).to receive(:issue_dropdown_options).and_wrap_original do |original, *args|
options = original_issue_dropdown_options.bind(original.receiver).call(*args)
options[:data][:per_page] = 2
options
end
visit new_namespace_project_issue_path(project.namespace, project)
click_button 'Unassigned'
wait_for_requests
end
it 'should display selected users even if they are not part of the original API call' do
find('.dropdown-input-field').native.send_keys user2.name
page.within '.dropdown-menu-user' do
expect(page).to have_content user2.name
click_link user2.name
end
find('.js-dropdown-input-clear').click
page.within '.dropdown-menu-user' do
expect(page).to have_content user.name
expect(find('.dropdown-menu-user a.is-active').first(:xpath, '..')['data-user-id']).to eq(user2.id.to_s)
end
end
end
describe 'multiple assignees' do
before do
click_button 'Unassigned'
wait_for_requests
end
it 'unselects other assignees when unassigned is selected' do
page.within '.dropdown-menu-user' do
click_link user2.name
end
page.within '.dropdown-menu-user' do
click_link 'Unassigned'
end
expect(find('input[name="issue[assignee_ids][]"]', visible: false).value).to match('0')
end
it 'toggles assign to me when current user is selected and unselected' do
page.within '.dropdown-menu-user' do
click_link user.name
end
expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
page.within('.dropdown-menu-user') do
click_link user.name
end
expect(find('a', text: 'Assign to me')).to be_visible
end
end
end
end
...@@ -24,7 +24,7 @@ describe 'New/edit issue', :feature, :js do ...@@ -24,7 +24,7 @@ describe 'New/edit issue', :feature, :js do
visit new_namespace_project_issue_path(project.namespace, project) visit new_namespace_project_issue_path(project.namespace, project)
end end
describe 'single assignee' do xdescribe 'shorten users API pagination limit (CE)' do
before do before do
# Using `allow_any_instance_of`/`and_wrap_original`, `original` would # Using `allow_any_instance_of`/`and_wrap_original`, `original` would
# somehow refer to the very block we defined to _wrap_ that method, instead of # somehow refer to the very block we defined to _wrap_ that method, instead of
...@@ -54,6 +54,7 @@ describe 'New/edit issue', :feature, :js do ...@@ -54,6 +54,7 @@ describe 'New/edit issue', :feature, :js do
click_link user2.name click_link user2.name
end end
find('.js-assignee-search').click
find('.js-dropdown-input-clear').click find('.js-dropdown-input-clear').click
page.within '.dropdown-menu-user' do page.within '.dropdown-menu-user' do
...@@ -63,7 +64,7 @@ describe 'New/edit issue', :feature, :js do ...@@ -63,7 +64,7 @@ describe 'New/edit issue', :feature, :js do
end end
end end
describe 'multiple assignees' do xdescribe 'single assignee (CE)' do
before do before do
click_button 'Unassigned' click_button 'Unassigned'
...@@ -75,6 +76,8 @@ describe 'New/edit issue', :feature, :js do ...@@ -75,6 +76,8 @@ describe 'New/edit issue', :feature, :js do
click_link user2.name click_link user2.name
end end
click_button user2.name
page.within '.dropdown-menu-user' do page.within '.dropdown-menu-user' do
click_link 'Unassigned' click_link 'Unassigned'
end end
...@@ -89,11 +92,13 @@ describe 'New/edit issue', :feature, :js do ...@@ -89,11 +92,13 @@ describe 'New/edit issue', :feature, :js do
expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible expect(find('a', text: 'Assign to me', visible: false)).not_to be_visible
click_button user.name
page.within('.dropdown-menu-user') do page.within('.dropdown-menu-user') do
click_link user.name click_link user.name
end end
expect(find('a', text: 'Assign to me')).to be_visible expect(page.find('.dropdown-menu-user', visible: false)).not_to be_visible
end end
end end
......
...@@ -75,7 +75,7 @@ feature 'Geo clone instructions', feature: true, js: true do ...@@ -75,7 +75,7 @@ feature 'Geo clone instructions', feature: true, js: true do
when 'ssh' when 'ssh'
project.ssh_url_to_repo project.ssh_url_to_repo
when 'http' when 'http'
project.http_url_to_repo(developer) project.http_url_to_repo
end end
end end
end end
...@@ -288,6 +288,18 @@ describe IssuesFinder do ...@@ -288,6 +288,18 @@ describe IssuesFinder do
expect(issues.count).to eq 0 expect(issues.count).to eq 0
end end
it 'returns disabled issues if feature_availability_check param set to false' do
[project1, project2].each do |project|
project.project_feature.update!(issues_access_level: ProjectFeature::DISABLED)
end
issues = described_class
.new(search_user, params.reverse_merge(scope: scope, state: 'opened', feature_availability_check: false))
.execute
expect(issues.count).to eq 3
end
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe Gitlab::LDAP::Person do describe Gitlab::LDAP::Person do
include LdapHelpers
it 'includes the EE module' do it 'includes the EE module' do
expect(described_class).to include(EE::Gitlab::LDAP::Person) expect(described_class).to include(EE::Gitlab::LDAP::Person)
end end
describe '.find_by_email' do
it 'tries finding for each configured email attribute' do
adapter = ldap_adapter
expect(adapter).to receive(:user).with('mail', 'jane@gitlab.com')
expect(adapter).to receive(:user).with('email', 'jane@gitlab.com')
expect(adapter).to receive(:user).with('userPrincipalName', 'jane@gitlab.com')
described_class.find_by_email('jane@gitlab.com', adapter)
end
end
describe '#kerberos_principal' do describe '#kerberos_principal' do
let(:entry) do let(:entry) do
ldif = "dn: cn=foo, dc=bar, dc=com\n" ldif = "dn: cn=foo, dc=bar, dc=com\n"
......
...@@ -178,15 +178,14 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -178,15 +178,14 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
before { project.add_master(user) } before { project.add_master(user) }
it 'returns an error if the rule denies tag deletion' do it 'returns an error if the rule denies tag deletion' do
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, 'You cannot delete a tag')
expect(subject.message).to eq('You cannot delete a tag')
end end
context 'when tag is deleted in web UI' do context 'when tag is deleted in web UI' do
let(:protocol) { 'web' } let(:protocol) { 'web' }
it 'ignores the push rule' do it 'ignores the push rule' do
expect(subject.status).to be(true) expect(subject).to be_truthy
end end
end end
end end
...@@ -195,8 +194,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -195,8 +194,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
let(:push_rule) { create(:push_rule, :commit_message) } let(:push_rule) { create(:push_rule, :commit_message) }
it 'returns an error if the rule fails' do it 'returns an error if the rule fails' do
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Commit message does not follow the pattern '#{push_rule.commit_message_regex}'")
expect(subject.message).to eq("Commit message does not follow the pattern '#{push_rule.commit_message_regex}'")
end end
end end
...@@ -211,15 +209,13 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -211,15 +209,13 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
it 'returns an error if the rule fails for the committer' do it 'returns an error if the rule fails for the committer' do
allow_any_instance_of(Commit).to receive(:committer_email).and_return('ana@invalid.com') allow_any_instance_of(Commit).to receive(:committer_email).and_return('ana@invalid.com')
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Committer's email 'ana@invalid.com' does not follow the pattern '.*@valid.com'")
expect(subject.message).to eq("Committer's email 'ana@invalid.com' does not follow the pattern '.*@valid.com'")
end end
it 'returns an error if the rule fails for the author' do it 'returns an error if the rule fails for the author' do
allow_any_instance_of(Commit).to receive(:author_email).and_return('joan@invalid.com') allow_any_instance_of(Commit).to receive(:author_email).and_return('joan@invalid.com')
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Author's email 'joan@invalid.com' does not follow the pattern '.*@valid.com'")
expect(subject.message).to eq("Author's email 'joan@invalid.com' does not follow the pattern '.*@valid.com'")
end end
end end
...@@ -232,8 +228,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -232,8 +228,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
end end
it 'returns an error if the commit author is not a GitLab member' do it 'returns an error if the commit author is not a GitLab member' do
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "Author 'some@mail.com' is not a member of team")
expect(subject.message).to eq("Author 'some@mail.com' is not a member of team")
end end
end end
...@@ -243,23 +238,14 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -243,23 +238,14 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
let(:push_rule) { create(:push_rule, file_name_regex: 'READ*') } let(:push_rule) { create(:push_rule, file_name_regex: 'READ*') }
it "returns an error if a new or renamed filed doesn't match the file name regex" do it "returns an error if a new or renamed filed doesn't match the file name regex" do
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "File name README was blacklisted by the pattern READ*.")
expect(subject.message).to eq("File name README was blacklisted by the pattern READ*.")
end end
end end
context 'blacklisted files check' do context 'blacklisted files check' do
let(:push_rule) { create(:push_rule, prevent_secrets: true) } let(:push_rule) { create(:push_rule, prevent_secrets: true) }
let(:checker) do
described_class.new(
changes,
project: project,
user_access: user_access,
protocol: protocol
)
end
it "returns status true if there is no blacklisted files" do it "returns true if there is no blacklisted files" do
new_rev = nil new_rev = nil
white_listed = white_listed =
...@@ -277,7 +263,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -277,7 +263,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
project.repository.commits_between(old_rev, new_rev) project.repository.commits_between(old_rev, new_rev)
) )
expect(checker.exec.status).to be(true) expect(subject).to be_truthy
end end
end end
...@@ -300,10 +286,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -300,10 +286,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
project.repository.commits_between(old_rev, new_rev) project.repository.commits_between(old_rev, new_rev)
) )
result = checker.exec expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, /File name #{file_path} was blacklisted by the pattern/)
expect(result.status).to be(false)
expect(result.message).to include("File name #{file_path} was blacklisted by the pattern")
end end
end end
end end
...@@ -315,8 +298,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -315,8 +298,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
before { allow_any_instance_of(Blob).to receive(:size).and_return(2.megabytes) } before { allow_any_instance_of(Blob).to receive(:size).and_return(2.megabytes) }
it 'returns an error if file exceeds the maximum file size' do it 'returns an error if file exceeds the maximum file size' do
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "File \"README\" is larger than the allowed size of 1 MB")
expect(subject.message).to eq("File \"README\" is larger than the allowed size of 1 MB")
end end
end end
end end
...@@ -331,8 +313,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do ...@@ -331,8 +313,7 @@ describe Gitlab::Checks::ChangeAccess, lib: true do
end end
it 'returns an error if the changes update a path locked by another user' do it 'returns an error if the changes update a path locked by another user' do
expect(subject.status).to be(false) expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "The path 'README' is locked by #{path_lock.user.name}")
expect(subject.message).to eq("The path 'README' is locked by #{path_lock.user.name}")
end end
end end
end end
......
...@@ -378,8 +378,8 @@ describe Gitlab::Elastic::SearchResults, lib: true do ...@@ -378,8 +378,8 @@ describe Gitlab::Elastic::SearchResults, lib: true do
results = described_class.new(user, 'def', limit_project_ids) results = described_class.new(user, 'def', limit_project_ids)
blobs = results.objects('blobs') blobs = results.objects('blobs')
expect(blobs.first["_source"]["blob"]["content"]).to include("def") expect(blobs.first['_source']['blob']['content']).to include('def')
expect(results.blobs_count).to eq 5 expect(results.blobs_count).to eq 7
end end
it 'finds blobs from public projects only' do it 'finds blobs from public projects only' do
...@@ -388,10 +388,11 @@ describe Gitlab::Elastic::SearchResults, lib: true do ...@@ -388,10 +388,11 @@ describe Gitlab::Elastic::SearchResults, lib: true do
Gitlab::Elastic::Helper.refresh_index Gitlab::Elastic::Helper.refresh_index
results = described_class.new(user, 'def', [project_1.id]) results = described_class.new(user, 'def', [project_1.id])
expect(results.blobs_count).to eq 5 expect(results.blobs_count).to eq 7
results = described_class.new(user, 'def', [project_1.id, project_2.id]) results = described_class.new(user, 'def', [project_1.id, project_2.id])
expect(results.blobs_count).to eq 10
expect(results.blobs_count).to eq 14
end end
it 'returns zero when blobs are not found' do it 'returns zero when blobs are not found' do
...@@ -399,6 +400,45 @@ describe Gitlab::Elastic::SearchResults, lib: true do ...@@ -399,6 +400,45 @@ describe Gitlab::Elastic::SearchResults, lib: true do
expect(results.blobs_count).to eq 0 expect(results.blobs_count).to eq 0
end end
context 'Searches CamelCased methods' do
before do
project_1.repository.create_file(
user,
'test.txt',
' function writeStringToFile(){} ',
message: 'added test file',
branch_name: 'master')
project_1.repository.index_blobs
Gitlab::Elastic::Helper.refresh_index
end
def search_for(term)
blobs = described_class.new(user, term, [project_1.id]).objects('blobs')
blobs.map do |blob|
blob['_source']['blob']['path']
end
end
it 'find by first word' do
expect(search_for('write')).to include('test.txt')
end
it 'find by first two words' do
expect(search_for('writeString')).to include('test.txt')
end
it 'find by last two words' do
expect(search_for('ToFile')).to include('test.txt')
end
it 'find by exact match' do
expect(search_for('writeStringToFile')).to include('test.txt')
end
end
end end
describe 'Wikis' do describe 'Wikis' do
...@@ -415,7 +455,7 @@ describe Gitlab::Elastic::SearchResults, lib: true do ...@@ -415,7 +455,7 @@ describe Gitlab::Elastic::SearchResults, lib: true do
it 'finds wiki blobs' do it 'finds wiki blobs' do
blobs = results.objects('wiki_blobs') blobs = results.objects('wiki_blobs')
expect(blobs.first["_source"]["blob"]["content"]).to include("term") expect(blobs.first['_source']['blob']['content']).to include("term")
expect(results.wiki_blobs_count).to eq 1 expect(results.wiki_blobs_count).to eq 1
end end
...@@ -423,7 +463,7 @@ describe Gitlab::Elastic::SearchResults, lib: true do ...@@ -423,7 +463,7 @@ describe Gitlab::Elastic::SearchResults, lib: true do
project_1.add_guest(user) project_1.add_guest(user)
blobs = results.objects('wiki_blobs') blobs = results.objects('wiki_blobs')
expect(blobs.first["_source"]["blob"]["content"]).to include("term") expect(blobs.first['_source']['blob']['content']).to include("term")
expect(results.wiki_blobs_count).to eq 1 expect(results.wiki_blobs_count).to eq 1
end end
......
...@@ -8,6 +8,10 @@ describe Gitlab::GitAccess, lib: true do ...@@ -8,6 +8,10 @@ describe Gitlab::GitAccess, lib: true do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:actor) { user } let(:actor) { user }
let(:protocol) { 'ssh' } let(:protocol) { 'ssh' }
<<<<<<< HEAD
=======
>>>>>>> master
let(:authentication_abilities) do let(:authentication_abilities) do
[ [
:read_project, :read_project,
...@@ -168,6 +172,7 @@ describe Gitlab::GitAccess, lib: true do ...@@ -168,6 +172,7 @@ describe Gitlab::GitAccess, lib: true do
before do before do
allow(Gitlab.config.gitlab_shell).to receive(:upload_pack).and_return(false) allow(Gitlab.config.gitlab_shell).to receive(:upload_pack).and_return(false)
end end
<<<<<<< HEAD
context 'when calling git-upload-pack' do context 'when calling git-upload-pack' do
it { expect { pull_access_check }.to raise_unauthorized('Pulling over HTTP is not allowed.') } it { expect { pull_access_check }.to raise_unauthorized('Pulling over HTTP is not allowed.') }
...@@ -194,6 +199,34 @@ describe Gitlab::GitAccess, lib: true do ...@@ -194,6 +199,34 @@ describe Gitlab::GitAccess, lib: true do
end end
end end
=======
context 'when calling git-upload-pack' do
it { expect { pull_access_check }.to raise_unauthorized('Pulling over HTTP is not allowed.') }
end
context 'when calling git-receive-pack' do
it { expect { push_access_check }.not_to raise_error }
end
end
context 'when the git-receive-pack command is disabled in config' do
before do
allow(Gitlab.config.gitlab_shell).to receive(:receive_pack).and_return(false)
end
context 'when calling git-receive-pack' do
it { expect { push_access_check }.to raise_unauthorized('Pushing over HTTP is not allowed.') }
end
context 'when calling git-upload-pack' do
it { expect { pull_access_check }.not_to raise_error }
end
end
end
end
>>>>>>> master
describe '#check_download_access!' do describe '#check_download_access!' do
describe 'master permissions' do describe 'master permissions' do
before { project.team << [user, :master] } before { project.team << [user, :master] }
...@@ -856,13 +889,12 @@ describe Gitlab::GitAccess, lib: true do ...@@ -856,13 +889,12 @@ describe Gitlab::GitAccess, lib: true do
end end
context 'when the repository is read only' do context 'when the repository is read only' do
let(:project) { create(:project, :read_only_repository) }
it 'denies push access' do it 'denies push access' do
project = create(:project, :read_only_repository)
project.team << [user, :master] project.team << [user, :master]
check = access.check('git-receive-pack', '_any') expect { push_access_check }.to raise_unauthorized('The repository is temporarily read-only. Please try again later.')
expect(check).not_to be_allowed
end end
end end
......
...@@ -2,7 +2,7 @@ require 'spec_helper' ...@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::GitAccessWiki, lib: true do describe Gitlab::GitAccessWiki, lib: true do
let(:access) { Gitlab::GitAccessWiki.new(user, project, 'web', authentication_abilities: authentication_abilities) } let(:access) { Gitlab::GitAccessWiki.new(user, project, 'web', authentication_abilities: authentication_abilities) }
let(:project) { create(:project, :repository) } let!(:project) { create(:project, :repository) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] } let(:changes) { ['6f6d7e7ed 570e7b2ab refs/heads/master'] }
let(:authentication_abilities) do let(:authentication_abilities) do
...@@ -28,9 +28,16 @@ describe Gitlab::GitAccessWiki, lib: true do ...@@ -28,9 +28,16 @@ describe Gitlab::GitAccessWiki, lib: true do
before do before do
allow(Gitlab::Geo).to receive(:enabled?) { true } allow(Gitlab::Geo).to receive(:enabled?) { true }
allow(Gitlab::Geo).to receive(:secondary?) { true } allow(Gitlab::Geo).to receive(:secondary?) { true }
allow(Gitlab::Geo).to receive(:license_allows?) { true }
end end
<<<<<<< HEAD
it { expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "You can't push code to a secondary GitLab Geo node.") } it { expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "You can't push code to a secondary GitLab Geo node.") }
=======
it 'does not give access to upload wiki code' do
expect { subject }.to raise_error(Gitlab::GitAccess::UnauthorizedError, "You can't push code to a secondary GitLab Geo node.")
end
>>>>>>> master
end end
end end
end end
......
...@@ -5,6 +5,23 @@ describe Gitlab::LDAP::Access, lib: true do ...@@ -5,6 +5,23 @@ describe Gitlab::LDAP::Access, lib: true do
let(:access) { Gitlab::LDAP::Access.new user } let(:access) { Gitlab::LDAP::Access.new user }
let(:user) { create(:omniauth_user) } let(:user) { create(:omniauth_user) }
describe '#find_ldap_user' do
it 'finds a user by dn first' do
expect(Gitlab::LDAP::Person).to receive(:find_by_dn).and_return(:ldap_user)
expect(user).not_to receive(:ldap_email?)
access.find_ldap_user
end
it 'finds a user by email if the email came from LDAP' do
expect(Gitlab::LDAP::Person).to receive(:find_by_dn).and_return(nil)
expect(user).to receive(:ldap_email?).and_return(true)
expect(Gitlab::LDAP::Person).to receive(:find_by_email)
access.find_ldap_user
end
end
describe '#allowed?' do describe '#allowed?' do
subject { access.allowed? } subject { access.allowed? }
...@@ -193,6 +210,12 @@ describe Gitlab::LDAP::Access, lib: true do ...@@ -193,6 +210,12 @@ describe Gitlab::LDAP::Access, lib: true do
subject subject
end end
it 'updates the ldap identity' do
expect(access).to receive(:update_identity)
subject
end
end end
describe '#update_kerberos_identity' do describe '#update_kerberos_identity' do
...@@ -358,4 +381,19 @@ describe Gitlab::LDAP::Access, lib: true do ...@@ -358,4 +381,19 @@ describe Gitlab::LDAP::Access, lib: true do
access.update_memberships access.update_memberships
end end
end end
describe '#update_identity' do
it 'updates the external UID if it changed in the entry' do
entry = ldap_user_entry('another uid')
provider = user.ldap_identity.provider
person = Gitlab::LDAP::Person.new(entry, provider)
allow(access).to receive(:ldap_user).and_return(person)
access.update_identity
expect(user.ldap_identity.reload.extern_uid)
.to eq('uid=another uid,ou=users,dc=example,dc=com')
end
end
end end
...@@ -104,13 +104,101 @@ describe Gitlab::Shell, lib: true do ...@@ -104,13 +104,101 @@ describe Gitlab::Shell, lib: true do
end end
describe '#add_key' do describe '#add_key' do
it 'removes trailing garbage' do context 'when authorized_keys_enabled is true' do
allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path) it 'removes trailing garbage' do
expect(Gitlab::Utils).to receive(:system_silent).with( allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
[:gitlab_shell_keys_path, 'add-key', 'key-123', 'ssh-rsa foobar'] expect(Gitlab::Utils).to receive(:system_silent).with(
) [:gitlab_shell_keys_path, 'add-key', 'key-123', 'ssh-rsa foobar']
)
gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
end
end
context 'when authorized_keys_enabled is false' do
before do
stub_application_setting(authorized_keys_enabled: false)
end
it 'does nothing' do
expect(Gitlab::Utils).not_to receive(:system_silent)
gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage')
end
end
end
describe '#batch_add_keys' do
context 'when authorized_keys_enabled is true' do
it 'instantiates KeyAdder' do
expect_any_instance_of(Gitlab::Shell::KeyAdder).to receive(:add_key).with('key-123', 'ssh-rsa foobar')
gitlab_shell.batch_add_keys do |adder|
adder.add_key('key-123', 'ssh-rsa foobar')
end
end
end
context 'when authorized_keys_enabled is false' do
before do
stub_application_setting(authorized_keys_enabled: false)
end
it 'does nothing' do
expect_any_instance_of(Gitlab::Shell::KeyAdder).not_to receive(:add_key)
gitlab_shell.batch_add_keys do |adder|
adder.add_key('key-123', 'ssh-rsa foobar')
end
end
end
end
gitlab_shell.add_key('key-123', 'ssh-rsa foobar trailing garbage') describe '#remove_key' do
context 'when authorized_keys_enabled is true' do
it 'removes trailing garbage' do
allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
expect(Gitlab::Utils).to receive(:system_silent).with(
[:gitlab_shell_keys_path, 'rm-key', 'key-123', 'ssh-rsa foobar']
)
gitlab_shell.remove_key('key-123', 'ssh-rsa foobar')
end
end
context 'when authorized_keys_enabled is false' do
before do
stub_application_setting(authorized_keys_enabled: false)
end
it 'does nothing' do
expect(Gitlab::Utils).not_to receive(:system_silent)
gitlab_shell.remove_key('key-123', 'ssh-rsa foobar')
end
end
end
describe '#remove_all_keys' do
context 'when authorized_keys_enabled is true' do
it 'removes trailing garbage' do
allow(gitlab_shell).to receive(:gitlab_shell_keys_path).and_return(:gitlab_shell_keys_path)
expect(Gitlab::Utils).to receive(:system_silent).with([:gitlab_shell_keys_path, 'clear'])
gitlab_shell.remove_all_keys
end
end
context 'when authorized_keys_enabled is false' do
before do
stub_application_setting(authorized_keys_enabled: false)
end
it 'does nothing' do
expect(Gitlab::Utils).not_to receive(:system_silent)
gitlab_shell.remove_all_keys
end
end end
end end
......
require 'spec_helper'
RSpec.describe Geo::EventLog, type: :model do
describe 'relationships' do
it { is_expected.to belong_to(:repository_updated_event).class_name('Geo::RepositoryUpdatedEvent').with_foreign_key('repository_updated_event_id') }
end
end
require 'spec_helper'
RSpec.describe Geo::RepositoryUpdatedEvent, type: :model do
describe 'relationships' do
it { is_expected.to belong_to(:project) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
end
describe '#source' do
it { is_expected.to define_enum_for(:source).with([:repository, :wiki]) }
end
end
require 'spec_helper'
describe IssueLink do
describe 'Associations' do
it { is_expected.to belong_to(:source).class_name('Issue') }
it { is_expected.to belong_to(:target).class_name('Issue') }
end
describe 'Validation' do
subject { create :issue_link }
it { is_expected.to validate_presence_of(:source) }
it { is_expected.to validate_presence_of(:target) }
it do
is_expected.to validate_uniqueness_of(:source)
.scoped_to(:target_id)
.with_message(/already related/)
end
context 'self relation' do
let(:issue) { create :issue }
context 'cannot be validated' do
it 'does not invalidate object with self relation error' do
issue_link = build :issue_link, source: issue, target: nil
issue_link.valid?
expect(issue_link.errors[:source]).to be_empty
end
end
context 'can be invalidated' do
it 'invalidates object' do
issue_link = build :issue_link, source: issue, target: issue
expect(issue_link).to be_invalid
expect(issue_link.errors[:source]).to include('cannot be related to itself')
end
end
end
end
end
...@@ -10,10 +10,14 @@ describe ProjectPolicy, models: true do ...@@ -10,10 +10,14 @@ describe ProjectPolicy, models: true do
let(:admin) { create(:admin) } let(:admin) { create(:admin) }
let(:project) { create(:empty_project, :public, namespace: owner.namespace) } let(:project) { create(:empty_project, :public, namespace: owner.namespace) }
before do
allow_any_instance_of(License).to receive(:feature_available?) { true }
end
let(:guest_permissions) do let(:guest_permissions) do
%i[ %i[
read_project read_board read_list read_wiki read_issue read_label read_project read_board read_list read_wiki read_issue read_label
read_milestone read_project_snippet read_project_member read_issue_link read_milestone read_project_snippet read_project_member
read_note create_project create_issue create_note read_note create_project create_issue create_note
upload_file upload_file
] ]
...@@ -22,7 +26,7 @@ describe ProjectPolicy, models: true do ...@@ -22,7 +26,7 @@ describe ProjectPolicy, models: true do
let(:reporter_permissions) do let(:reporter_permissions) do
%i[ %i[
download_code fork_project create_project_snippet update_issue download_code fork_project create_project_snippet update_issue
admin_issue admin_label admin_list read_commit_status read_build admin_issue admin_label admin_issue_link admin_list read_commit_status read_build
read_container_image read_pipeline read_environment read_deployment read_container_image read_pipeline read_environment read_deployment
read_merge_request download_wiki_code read_merge_request download_wiki_code
] ]
...@@ -71,7 +75,7 @@ describe ProjectPolicy, models: true do ...@@ -71,7 +75,7 @@ describe ProjectPolicy, models: true do
let(:auditor_permissions) do let(:auditor_permissions) do
%i[ %i[
download_code download_wiki_code read_project read_board read_list download_code download_wiki_code read_project read_board read_list
read_wiki read_issue read_label read_milestone read_project_snippet read_wiki read_issue read_label read_issue_link read_milestone read_project_snippet
read_project_member read_note read_cycle_analytics read_pipeline read_project_member read_note read_cycle_analytics read_pipeline
read_build read_commit_status read_container_image read_environment read_build read_commit_status read_container_image read_environment
read_deployment read_merge_request read_pages read_deployment read_merge_request read_pages
......
...@@ -36,12 +36,6 @@ describe API::Triggers do ...@@ -36,12 +36,6 @@ describe API::Triggers do
expect(response).to have_http_status(404) expect(response).to have_http_status(404)
end end
it 'returns unauthorized if token is for different project' do
post api("/projects/#{project2.id}/trigger/pipeline"), options.merge(ref: 'master')
expect(response).to have_http_status(401)
end
end end
context 'Have a commit' do context 'Have a commit' do
...@@ -61,7 +55,7 @@ describe API::Triggers do ...@@ -61,7 +55,7 @@ describe API::Triggers do
post api("/projects/#{project.id}/trigger/pipeline"), options.merge(ref: 'other-branch') post api("/projects/#{project.id}/trigger/pipeline"), options.merge(ref: 'other-branch')
expect(response).to have_http_status(400) expect(response).to have_http_status(400)
expect(json_response['message']).to eq('No pipeline created') expect(json_response['message']).to eq('base' => ["Reference not found"])
end end
context 'Validates variables' do context 'Validates variables' do
...@@ -93,6 +87,12 @@ describe API::Triggers do ...@@ -93,6 +87,12 @@ describe API::Triggers do
end end
context 'when triggering a pipeline from a trigger token' do context 'when triggering a pipeline from a trigger token' do
it 'does not leak the presence of project when token is for different project' do
post api("/projects/#{project2.id}/ref/master/trigger/pipeline?token=#{trigger_token}"), { ref: 'refs/heads/other-branch' }
expect(response).to have_http_status(404)
end
it 'creates builds from the ref given in the URL, not in the body' do it 'creates builds from the ref given in the URL, not in the body' do
expect do expect do
post api("/projects/#{project.id}/ref/master/trigger/pipeline?token=#{trigger_token}"), { ref: 'refs/heads/other-branch' } post api("/projects/#{project.id}/ref/master/trigger/pipeline?token=#{trigger_token}"), { ref: 'refs/heads/other-branch' }
...@@ -113,6 +113,93 @@ describe API::Triggers do ...@@ -113,6 +113,93 @@ describe API::Triggers do
end end
end end
end end
context 'when triggering a pipeline from a job token' do
let(:other_job) { create(:ci_build, :running, user: other_user) }
let(:params) { { ref: 'refs/heads/other-branch' } }
subject do
post api("/projects/#{project.id}/ref/master/trigger/pipeline?token=#{other_job.token}"), params
end
context 'without user' do
let(:other_user) { nil }
it 'does not leak the presence of project when using valid token' do
subject
expect(response).to have_http_status(404)
end
end
context 'for unreleated user' do
let(:other_user) { create(:user) }
it 'does not leak the presence of project when using valid token' do
subject
expect(response).to have_http_status(404)
end
end
context 'for related user' do
let(:other_user) { create(:user) }
context 'with reporter permissions' do
before do
project.add_reporter(other_user)
end
it 'forbids to create a pipeline' do
subject
expect(response).to have_http_status(400)
expect(json_response['message']).to eq("base" => ["Insufficient permissions to create a new pipeline"])
end
end
context 'with developer permissions' do
before do
project.add_developer(other_user)
end
it 'creates a new pipeline' do
expect { subject }.to change(Ci::Pipeline, :count)
expect(response).to have_http_status(201)
expect(Ci::Pipeline.last.source).to eq('pipeline')
expect(Ci::Pipeline.last.triggered_by_pipeline).not_to be_nil
end
context 'when build is complete' do
before do
other_job.success
end
it 'does not create a pipeline' do
subject
expect(response).to have_http_status(400)
expect(json_response['message']).to eq('400 Job has to be running')
end
end
context 'when variables are defined' do
let(:params) do
{ ref: 'refs/heads/other-branch',
variables: { 'KEY' => 'VALUE' } }
end
it 'forbids to create a pipeline' do
subject
expect(response).to have_http_status(400)
expect(json_response['message']).to eq('400 Variables not supported')
end
end
end
end
end
end end
describe 'GET /projects/:id/triggers' do describe 'GET /projects/:id/triggers' do
......
This diff is collapsed.
require 'rails_helper'
describe Projects::IssueLinksController do
let(:user) { create :user }
let(:project) { create(:project_empty_repo) }
let(:issue) { create :issue, project: project }
before do
allow_any_instance_of(License).to receive(:feature_available?) { false }
allow_any_instance_of(License).to receive(:feature_available?).with(:related_issues) { true }
end
describe 'GET /*namespace_id/:project_id/issues/:issue_id/links' do
let(:issue_b) { create :issue, project: project }
let!(:issue_link) { create :issue_link, source: issue, target: issue_b }
before do
project.team << [user, :guest]
login_as user
end
it 'returns JSON response' do
list_service_response = IssueLinks::ListService.new(issue, user).execute
get namespace_project_issue_links_path(issue_links_params)
expect(response).to have_http_status(200)
expect(json_response).to eq(list_service_response.as_json)
end
end
describe 'POST /*namespace_id/:project_id/issues/:issue_id/links' do
let(:issue_b) { create :issue, project: project }
before do
project.team << [user, user_role]
login_as user
end
context 'with success' do
let(:user_role) { :developer }
let(:issue_references) { [issue_b.to_reference] }
it 'returns success JSON' do
post namespace_project_issue_links_path(issue_links_params(issue_references: issue_references))
list_service_response = IssueLinks::ListService.new(issue, user).execute
expect(response).to have_http_status(200)
expect(json_response).to eq('message' => nil,
'issues' => list_service_response.as_json)
end
end
context 'with failure' do
context 'when unauthorized' do
let(:user_role) { :guest }
let(:issue_references) { [issue_b.to_reference] }
it 'returns 403' do
post namespace_project_issue_links_path(issue_links_params(issue_references: issue_references))
expect(response).to have_http_status(403)
end
end
context 'when failing service result' do
let(:user_role) { :developer }
let(:issue_references) { ['#999'] }
it 'returns failure JSON' do
post namespace_project_issue_links_path(issue_links_params(issue_references: issue_references))
list_service_response = IssueLinks::ListService.new(issue, user).execute
expect(response).to have_http_status(401)
expect(json_response).to eq('message' => 'No Issue found for given reference', 'issues' => list_service_response.as_json)
end
end
end
end
describe 'DELETE /*namespace_id/:project_id/issues/:issue_id/link/:id' do
let(:issue_link) { create :issue_link, target: referenced_issue }
before do
project.team << [user, user_role]
login_as user
end
context 'when unauthorized' do
context 'when no authorization on current project' do
let(:referenced_issue) { create :issue, project: project }
let(:user_role) { :guest }
it 'returns 403' do
delete namespace_project_issue_link_path(issue_links_params(id: issue_link.id))
expect(response).to have_http_status(403)
end
end
context 'when no authorization on the related issue project' do
# unauthorized project issue
let(:referenced_issue) { create :issue }
let(:user_role) { :developer }
it 'returns 403' do
delete namespace_project_issue_link_path(issue_links_params(id: issue_link.id))
expect(response).to have_http_status(403)
end
end
end
context 'when authorized' do
let(:referenced_issue) { create :issue, project: project }
let(:user_role) { :developer }
it 'returns success JSON' do
delete namespace_project_issue_link_path(issue_links_params(id: issue_link.id))
list_service_response = IssueLinks::ListService.new(issue, user).execute
expect(json_response).to eq('issues' => list_service_response.as_json)
end
end
end
def issue_links_params(opts = {})
opts.reverse_merge(namespace_id: issue.project.namespace,
project_id: issue.project,
issue_id: issue,
format: :json)
end
end
require 'spec_helper'
describe WikiPages::CreateService, services: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
let(:opts) do
{
title: 'Title',
content: 'Content for wiki page',
format: 'markdown'
}
end
subject(:service) { described_class.new(project, user, opts) }
before do
project.add_master(user)
end
describe '#execute' do
context 'when running on a Geo primary node' do
before do
allow(Gitlab::Geo).to receive(:primary?) { true }
end
it 'triggers Geo::RepositoryUpdatedEventStore when Geo is enabled' do
expect(Geo::RepositoryUpdatedEventStore).to receive(:new).with(instance_of(Project), source: Geo::RepositoryUpdatedEvent::WIKI).and_call_original
expect_any_instance_of(Geo::RepositoryUpdatedEventStore).to receive(:create)
service.execute
end
it 'triggers wiki update on secondary nodes' do
expect(Gitlab::Geo).to receive(:notify_wiki_update).with(instance_of(Project))
service.execute
end
end
end
end
require 'spec_helper'
describe WikiPages::DestroyService, services: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
let(:page) { create(:wiki_page) }
subject(:service) { described_class.new(project, user) }
before do
project.add_master(user)
end
describe '#execute' do
context 'when running on a Geo primary node' do
before do
allow(Gitlab::Geo).to receive(:primary?) { true }
end
it 'triggers Geo::RepositoryUpdatedEventStore when Geo is enabled' do
expect(Geo::RepositoryUpdatedEventStore).to receive(:new).with(instance_of(Project), source: Geo::RepositoryUpdatedEvent::WIKI).and_call_original
expect_any_instance_of(Geo::RepositoryUpdatedEventStore).to receive(:create)
service.execute(page)
end
it 'triggers wiki update on secondary nodes' do
expect(Gitlab::Geo).to receive(:notify_wiki_update).with(instance_of(Project))
service.execute(page)
end
end
end
end
require 'spec_helper'
describe WikiPages::UpdateService, services: true do
let(:project) { create(:empty_project) }
let(:user) { create(:user) }
let(:page) { create(:wiki_page) }
let(:opts) do
{
content: 'New content for wiki page',
format: 'markdown',
message: 'New wiki message'
}
end
subject(:service) { described_class.new(project, user, opts) }
before do
project.add_master(user)
end
describe '#execute' do
context 'when running on a Geo primary node' do
before do
allow(Gitlab::Geo).to receive(:primary?) { true }
end
it 'triggers Geo::RepositoryUpdatedEventStore when Geo is enabled' do
expect(Geo::RepositoryUpdatedEventStore).to receive(:new).with(instance_of(Project), source: Geo::RepositoryUpdatedEvent::WIKI).and_call_original
expect_any_instance_of(Geo::RepositoryUpdatedEventStore).to receive(:create)
service.execute(page)
end
it 'triggers wiki update on secondary nodes' do
expect(Gitlab::Geo).to receive(:notify_wiki_update).with(instance_of(Project))
service.execute(page)
end
end
end
end
require 'spec_helper'
describe Geo::RepositoryUpdatedEventStore, services: true do
let(:project) { create(:project) }
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:refs) { ['refs/heads/tést', 'refs/tags/tag'] }
let(:changes) do
[
{ before: '123456', after: '789012', ref: 'refs/heads/tést' },
{ before: '654321', after: '210987', ref: 'refs/tags/tag' }
]
end
describe '#create' do
it 'does not create a push event when not running on a primary node' do
allow(Gitlab::Geo).to receive(:primary?) { false }
subject = described_class.new(project, refs: refs, changes: changes)
expect { subject.create }.not_to change(Geo::RepositoryUpdatedEvent, :count)
end
context 'when running on a primary node' do
before do
allow(Gitlab::Geo).to receive(:primary?) { true }
end
it 'creates a push event' do
subject = described_class.new(project, refs: refs, changes: changes)
expect { subject.create }.to change(Geo::RepositoryUpdatedEvent, :count).by(1)
end
context 'when repository is being updated' do
it 'does not track ref name when post-receive event affect multiple refs' do
subject = described_class.new(project, refs: refs, changes: changes)
subject.create
expect(Geo::RepositoryUpdatedEvent.last.ref).to be_nil
end
it 'tracks ref name when post-receive event affect single ref' do
refs = ['refs/heads/tést']
changes = [{ before: '123456', after: blankrev, ref: 'refs/heads/tést' }]
subject = described_class.new(project, refs: refs, changes: changes)
subject.create
expect(Geo::RepositoryUpdatedEvent.last.ref).to eq 'refs/heads/tést'
end
it 'tracks number of branches post-receive event affects' do
subject = described_class.new(project, refs: refs, changes: changes)
subject.create
expect(Geo::RepositoryUpdatedEvent.last.branches_affected).to eq 1
end
it 'tracks number of tags post-receive event affects' do
subject = described_class.new(project, refs: refs, changes: changes)
subject.create
expect(Geo::RepositoryUpdatedEvent.last.tags_affected).to eq 1
end
it 'tracks when post-receive event create new branches' do
refs = ['refs/heads/tést', 'refs/heads/feature']
changes = [
{ before: '123456', after: '789012', ref: 'refs/heads/tést' },
{ before: blankrev, after: '210987', ref: 'refs/heads/feature' }
]
subject = described_class.new(project, refs: refs, changes: changes)
subject.create
expect(Geo::RepositoryUpdatedEvent.last.new_branch).to eq true
end
it 'tracks when post-receive event remove branches' do
refs = ['refs/heads/tést', 'refs/heads/feature']
changes = [
{ before: '123456', after: '789012', ref: 'refs/heads/tést' },
{ before: '654321', after: blankrev, ref: 'refs/heads/feature' }
]
subject = described_class.new(project, refs: refs, changes: changes)
subject.create
expect(Geo::RepositoryUpdatedEvent.last.remove_branch).to eq true
end
end
context 'when wiki is being updated' do
it 'does not track any information' do
subject = described_class.new(project, source: Geo::RepositoryUpdatedEvent::WIKI)
subject.create
push_event = Geo::RepositoryUpdatedEvent.last
expect(push_event.ref).to be_nil
expect(push_event.branches_affected).to be_zero
expect(push_event.tags_affected).to be_zero
expect(push_event.new_branch).to eq false
expect(push_event.remove_branch).to eq false
end
end
end
end
end
require 'spec_helper'
describe IssueLinks::CreateService, service: true do
describe '#execute' do
let(:namespace) { create :namespace }
let(:project) { create :empty_project, namespace: namespace }
let(:issue) { create :issue, project: project }
let(:user) { create :user }
let(:params) do
{}
end
before do
allow_any_instance_of(License).to receive(:feature_available?) { false }
allow_any_instance_of(License).to receive(:feature_available?).with(:related_issues) { true }
project.team << [user, :developer]
end
subject { described_class.new(issue, user, params).execute }
context 'when the reference list is empty' do
let(:params) do
{ issue_references: [] }
end
it 'returns error' do
is_expected.to eq(message: 'No Issue found for given reference', status: :error, http_status: 401)
end
end
context 'when Issue not found' do
let(:params) do
{ issue_references: ['#999'] }
end
it 'returns error' do
is_expected.to eq(message: 'No Issue found for given reference', status: :error, http_status: 401)
end
it 'no relationship is created' do
expect { subject }.not_to change(IssueLink, :count)
end
end
context 'when user has no permission to target project Issue' do
let(:target_issue) { create :issue }
let(:params) do
{ issue_references: [target_issue.to_reference(project)] }
end
it 'returns error' do
target_issue.project.add_guest(user)
is_expected.to eq(message: 'No Issue found for given reference', status: :error, http_status: 401)
end
it 'no relationship is created' do
expect { subject }.not_to change(IssueLink, :count)
end
end
context 'when there is an issue to relate' do
let(:issue_a) { create :issue, project: project }
let(:another_project) { create :empty_project, namespace: project.namespace }
let(:another_project_issue) { create :issue, project: another_project }
let(:issue_a_ref) { issue_a.to_reference }
let(:another_project_issue_ref) { another_project_issue.to_reference(project) }
let(:params) do
{ issue_references: [issue_a_ref, another_project_issue_ref] }
end
before do
another_project.team << [user, :developer]
end
it 'creates relationships' do
expect { subject }.to change(IssueLink, :count).from(0).to(2)
expect(IssueLink.find_by!(target: issue_a)).to have_attributes(source: issue)
expect(IssueLink.find_by!(target: another_project_issue)).to have_attributes(source: issue)
end
it 'returns success status' do
is_expected.to eq(status: :success)
end
it 'creates notes' do
# First two-way relation notes
expect(SystemNoteService).to receive(:relate_issue)
.with(issue, issue_a, user)
expect(SystemNoteService).to receive(:relate_issue)
.with(issue_a, issue, user)
# Second two-way relation notes
expect(SystemNoteService).to receive(:relate_issue)
.with(issue, another_project_issue, user)
expect(SystemNoteService).to receive(:relate_issue)
.with(another_project_issue, issue, user)
subject
end
end
context 'when reference of any already related issue is present' do
let(:issue_a) { create :issue, project: project }
let(:issue_b) { create :issue, project: project }
before do
create :issue_link, source: issue, target: issue_a
end
let(:params) do
{ issue_references: [issue_b.to_reference, issue_a.to_reference] }
end
it 'returns success status' do
is_expected.to eq(status: :success)
end
it 'valid relations are created' do
expect { subject }.to change(IssueLink, :count).from(1).to(2)
expect(IssueLink.find_by!(target: issue_b)).to have_attributes(source: issue)
end
end
end
end
require 'spec_helper'
describe IssueLinks::DestroyService, service: true do
describe '#execute' do
let(:user) { create :user }
let!(:issue_link) { create :issue_link }
subject { described_class.new(issue_link, user).execute }
it 'removes related issue' do
expect { subject }.to change(IssueLink, :count).from(1).to(0)
end
it 'creates notes' do
# Two-way notes creation
expect(SystemNoteService).to receive(:unrelate_issue)
.with(issue_link.source, issue_link.target, user)
expect(SystemNoteService).to receive(:unrelate_issue)
.with(issue_link.target, issue_link.source, user)
subject
end
it 'returns success message' do
is_expected.to eq(message: 'Relation was removed', status: :success)
end
end
end
require 'spec_helper'
describe IssueLinks::ListService, service: true do
let(:user) { create :user }
let(:project) { create(:project_empty_repo, :private) }
let(:issue) { create :issue, project: project }
let(:user_role) { :developer }
before do
allow_any_instance_of(License).to receive(:feature_available?) { false }
allow_any_instance_of(License).to receive(:feature_available?).with(:related_issues) { true }
project.team << [user, user_role]
end
describe '#execute' do
subject { described_class.new(issue, user).execute }
context 'user can see all issues' do
let(:issue_b) { create :issue, project: project }
let(:issue_c) { create :issue, project: project }
let(:issue_d) { create :issue, project: project }
let!(:issue_link_c) do
create(:issue_link, source: issue_d,
target: issue)
end
let!(:issue_link_b) do
create(:issue_link, source: issue,
target: issue_c)
end
let!(:issue_link_a) do
create(:issue_link, source: issue,
target: issue_b)
end
it 'ensures no N+1 queries are made' do
control_count = ActiveRecord::QueryRecorder.new { subject }.count
project = create :empty_project, :public
issue_x = create :issue, project: project
issue_y = create :issue, project: project
issue_z = create :issue, project: project
create :issue_link, source: issue_x, target: issue_y
create :issue_link, source: issue_x, target: issue_z
create :issue_link, source: issue_y, target: issue_z
expect { subject }.not_to exceed_query_limit(control_count)
end
it 'returns related issues JSON' do
expect(subject.size).to eq(3)
expect(subject).to include(include(id: issue_b.id,
iid: issue_b.iid,
title: issue_b.title,
state: issue_b.state,
path: "/#{project.full_path}/issues/#{issue_b.iid}",
project_path: issue_b.project.path,
namespace_full_path: issue_b.project.namespace.full_path,
destroy_relation_path: "/#{project.full_path}/issues/#{issue.iid}/links/#{issue_link_a.id}"))
expect(subject).to include(include(id: issue_c.id,
iid: issue_c.iid,
title: issue_c.title,
state: issue_c.state,
path: "/#{project.full_path}/issues/#{issue_c.iid}",
project_path: issue_c.project.path,
namespace_full_path: issue_c.project.namespace.full_path,
destroy_relation_path: "/#{project.full_path}/issues/#{issue.iid}/links/#{issue_link_b.id}"))
expect(subject).to include(include(id: issue_d.id,
iid: issue_d.iid,
title: issue_d.title,
state: issue_d.state,
path: "/#{project.full_path}/issues/#{issue_d.iid}",
project_path: issue_d.project.path,
namespace_full_path: issue_d.project.namespace.full_path,
destroy_relation_path: "/#{project.full_path}/issues/#{issue.iid}/links/#{issue_link_c.id}"))
end
end
context 'referencing a public project issue' do
let(:public_project) { create :empty_project, :public }
let(:issue_b) { create :issue, project: public_project }
let!(:issue_link) do
create(:issue_link, source: issue, target: issue_b)
end
it 'presents issue' do
expect(subject.size).to eq(1)
end
end
context 'referencing issue with removed relationships' do
context 'when referenced a deleted issue' do
let(:issue_b) { create :issue, project: project }
let!(:issue_link) do
create(:issue_link, source: issue, target: issue_b)
end
it 'ignores issue' do
issue_b.destroy!
is_expected.to eq([])
end
end
context 'when referenced an issue with deleted project' do
let(:issue_b) { create :issue, project: project }
let!(:issue_link) do
create(:issue_link, source: issue, target: issue_b)
end
it 'ignores issue' do
project.destroy!
is_expected.to eq([])
end
end
context 'when referenced an issue with deleted namespace' do
let(:issue_b) { create :issue, project: project }
let!(:issue_link) do
create(:issue_link, source: issue, target: issue_b)
end
it 'ignores issue' do
project.namespace.destroy!
is_expected.to eq([])
end
end
end
context 'user cannot see relations' do
context 'when user cannot see the referenced issue' do
let!(:issue_link) do
create(:issue_link, source: issue)
end
it 'returns an empty list' do
is_expected.to eq([])
end
end
context 'when user cannot see the issue that referenced' do
let!(:issue_link) do
create(:issue_link, target: issue)
end
it 'returns an empty list' do
is_expected.to eq([])
end
end
end
context 'remove relations' do
let!(:issue_link) do
create(:issue_link, source: issue, target: referenced_issue)
end
context 'user can admin related issues just on target project' do
let(:user_role) { :guest }
let(:target_project) { create :empty_project }
let(:referenced_issue) { create :issue, project: target_project }
it 'returns no destroy relation path' do
target_project.add_developer(user)
expect(subject.first[:destroy_relation_path]).to be_nil
end
end
context 'user can admin related issues just on source project' do
let(:user_role) { :developer }
let(:target_project) { create :empty_project }
let(:referenced_issue) { create :issue, project: target_project }
it 'returns no destroy relation path' do
target_project.add_guest(user)
expect(subject.first[:destroy_relation_path]).to be_nil
end
end
context 'when user can admin related issues on both projects' do
let(:referenced_issue) { create :issue, project: project }
it 'returns related issue destroy relation path' do
expect(subject.first[:destroy_relation_path])
.to eq("/#{project.full_path}/issues/#{issue.iid}/links/#{issue_link.id}")
end
end
end
end
end
...@@ -899,6 +899,38 @@ describe SystemNoteService, services: true do ...@@ -899,6 +899,38 @@ describe SystemNoteService, services: true do
end end
end end
describe '.relate_issue' do
let(:noteable_ref) { create(:issue) }
subject { described_class.relate_issue(noteable, noteable_ref, author) }
it_behaves_like 'a system note' do
let(:action) { 'relate' }
end
context 'when issue marks another as related' do
it 'sets the note text' do
expect(subject.note).to eq "marked this issue as related to #{noteable_ref.to_reference(project)}"
end
end
end
describe '.unrelate_issue' do
let(:noteable_ref) { create(:issue) }
subject { described_class.unrelate_issue(noteable, noteable_ref, author) }
it_behaves_like 'a system note' do
let(:action) { 'unrelate' }
end
context 'when issue relation is removed' do
it 'sets the note text' do
expect(subject.note).to eq "removed the relation with #{noteable_ref.to_reference(project)}"
end
end
end
describe '.approve_mr' do describe '.approve_mr' do
let(:noteable) { create(:merge_request, source_project: project) } let(:noteable) { create(:merge_request, source_project: project) }
subject { described_class.approve_mr(noteable, author) } subject { described_class.approve_mr(noteable, author) }
......
...@@ -2,7 +2,11 @@ require 'spec_helper' ...@@ -2,7 +2,11 @@ require 'spec_helper'
describe WikiPages::CreateService, services: true do describe WikiPages::CreateService, services: true do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project) }
<<<<<<< HEAD
let(:user) { create(:user) } let(:user) { create(:user) }
=======
let(:user) { create(:user) }
>>>>>>> master
let(:opts) do let(:opts) do
{ {
...@@ -15,7 +19,11 @@ describe WikiPages::CreateService, services: true do ...@@ -15,7 +19,11 @@ describe WikiPages::CreateService, services: true do
subject(:service) { described_class.new(project, user, opts) } subject(:service) { described_class.new(project, user, opts) }
before do before do
<<<<<<< HEAD
project.add_developer(user) project.add_developer(user)
=======
project.add_master(user)
>>>>>>> master
end end
describe '#execute' do describe '#execute' do
...@@ -23,6 +31,7 @@ describe WikiPages::CreateService, services: true do ...@@ -23,6 +31,7 @@ describe WikiPages::CreateService, services: true do
page = service.execute page = service.execute
expect(page).to be_valid expect(page).to be_valid
<<<<<<< HEAD
expect(page.title).to eq(opts[:title]) expect(page.title).to eq(opts[:title])
expect(page.content).to eq(opts[:content]) expect(page.content).to eq(opts[:content])
expect(page.format).to eq(opts[:format].to_sym) expect(page.format).to eq(opts[:format].to_sym)
...@@ -31,6 +40,13 @@ describe WikiPages::CreateService, services: true do ...@@ -31,6 +40,13 @@ describe WikiPages::CreateService, services: true do
it 'executes webhooks' do it 'executes webhooks' do
expect(service).to receive(:execute_hooks).once expect(service).to receive(:execute_hooks).once
.with(instance_of(WikiPage), 'create') .with(instance_of(WikiPage), 'create')
=======
expect(page).to have_attributes(title: opts[:title], content: opts[:content], format: opts[:format].to_sym)
end
it 'executes webhooks' do
expect(service).to receive(:execute_hooks).once.with(instance_of(WikiPage), 'create')
>>>>>>> master
service.execute service.execute
end end
......
...@@ -2,19 +2,32 @@ require 'spec_helper' ...@@ -2,19 +2,32 @@ require 'spec_helper'
describe WikiPages::DestroyService, services: true do describe WikiPages::DestroyService, services: true do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project) }
<<<<<<< HEAD
let(:user) { create(:user) } let(:user) { create(:user) }
let(:page) { create(:wiki_page) } let(:page) { create(:wiki_page) }
=======
let(:user) { create(:user) }
let(:page) { create(:wiki_page) }
>>>>>>> master
subject(:service) { described_class.new(project, user) } subject(:service) { described_class.new(project, user) }
before do before do
<<<<<<< HEAD
project.add_developer(user) project.add_developer(user)
=======
project.add_master(user)
>>>>>>> master
end end
describe '#execute' do describe '#execute' do
it 'executes webhooks' do it 'executes webhooks' do
<<<<<<< HEAD
expect(service).to receive(:execute_hooks).once expect(service).to receive(:execute_hooks).once
.with(instance_of(WikiPage), 'delete') .with(instance_of(WikiPage), 'delete')
=======
expect(service).to receive(:execute_hooks).once.with(instance_of(WikiPage), 'delete')
>>>>>>> master
service.execute(page) service.execute(page)
end end
......
...@@ -2,8 +2,13 @@ require 'spec_helper' ...@@ -2,8 +2,13 @@ require 'spec_helper'
describe WikiPages::UpdateService, services: true do describe WikiPages::UpdateService, services: true do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project) }
<<<<<<< HEAD
let(:user) { create(:user) } let(:user) { create(:user) }
let(:page) { create(:wiki_page) } let(:page) { create(:wiki_page) }
=======
let(:user) { create(:user) }
let(:page) { create(:wiki_page) }
>>>>>>> master
let(:opts) do let(:opts) do
{ {
...@@ -16,7 +21,11 @@ describe WikiPages::UpdateService, services: true do ...@@ -16,7 +21,11 @@ describe WikiPages::UpdateService, services: true do
subject(:service) { described_class.new(project, user, opts) } subject(:service) { described_class.new(project, user, opts) }
before do before do
<<<<<<< HEAD
project.add_developer(user) project.add_developer(user)
=======
project.add_master(user)
>>>>>>> master
end end
describe '#execute' do describe '#execute' do
...@@ -24,6 +33,7 @@ describe WikiPages::UpdateService, services: true do ...@@ -24,6 +33,7 @@ describe WikiPages::UpdateService, services: true do
updated_page = service.execute(page) updated_page = service.execute(page)
expect(updated_page).to be_valid expect(updated_page).to be_valid
<<<<<<< HEAD
expect(updated_page.message).to eq(opts[:message]) expect(updated_page.message).to eq(opts[:message])
expect(updated_page.content).to eq(opts[:content]) expect(updated_page.content).to eq(opts[:content])
expect(updated_page.format).to eq(opts[:format].to_sym) expect(updated_page.format).to eq(opts[:format].to_sym)
...@@ -32,6 +42,13 @@ describe WikiPages::UpdateService, services: true do ...@@ -32,6 +42,13 @@ describe WikiPages::UpdateService, services: true do
it 'executes webhooks' do it 'executes webhooks' do
expect(service).to receive(:execute_hooks).once expect(service).to receive(:execute_hooks).once
.with(instance_of(WikiPage), 'update') .with(instance_of(WikiPage), 'update')
=======
expect(updated_page).to have_attributes(message: opts[:message], content: opts[:content], format: opts[:format].to_sym)
end
it 'executes webhooks' do
expect(service).to receive(:execute_hooks).once.with(instance_of(WikiPage), 'update')
>>>>>>> master
service.execute(page) service.execute(page)
end end
......
require 'spec_helper'
describe PostReceive do
let(:changes) { "123456 789012 refs/heads/tést\n654321 210987 refs/tags/tag" }
let(:wrongly_encoded_changes) { changes.encode("ISO-8859-1").force_encoding("UTF-8") }
let(:base64_changes) { Base64.encode64(wrongly_encoded_changes) }
let(:project_identifier) { "project-#{project.id}" }
let(:key) { create(:key, user: project.owner) }
let(:key_id) { key.shell_id }
let(:project) { create(:project, :repository) }
describe "#process_project_changes" do
before do
allow_any_instance_of(Gitlab::GitPostReceive).to receive(:identify).and_return(project.owner)
end
context 'after project changes hooks' do
let(:fake_hook_data) { Hash.new(event_name: 'repository_update') }
before do
allow_any_instance_of(Gitlab::DataBuilder::Repository).to receive(:update).and_return(fake_hook_data)
# silence hooks so we can isolate
allow_any_instance_of(Key).to receive(:post_create_hook).and_return(true)
allow_any_instance_of(GitTagPushService).to receive(:execute).and_return(true)
allow_any_instance_of(GitPushService).to receive(:execute).and_return(true)
end
it 'calls Geo::RepositoryUpdatedEventStore' do
expect_any_instance_of(Geo::RepositoryUpdatedEventStore).to receive(:create)
described_class.new.perform(project_identifier, key_id, base64_changes)
end
end
end
describe '#process_wiki_changes' do
let(:project_identifier) { "#{pwd(project)}.wiki" }
it 'triggers Geo::RepositoryUpdatedEventStore when Geo is enabled' do
allow(Gitlab::Geo).to receive(:enabled?) { true }
expect(Geo::RepositoryUpdatedEventStore).to receive(:new).with(instance_of(Project), source: Geo::RepositoryUpdatedEvent::WIKI).and_call_original
expect_any_instance_of(Geo::RepositoryUpdatedEventStore).to receive(:create)
described_class.new.perform(project_identifier, key_id, base64_changes)
end
it 'triggers wiki index update when ElasticSearch is enabled' do
expect(Project).to receive(:find_by_full_path).with("#{project.full_path}.wiki").and_return(nil)
expect(Project).to receive(:find_by_full_path).with(project.full_path).and_return(project)
stub_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
expect_any_instance_of(ProjectWiki).to receive(:index_blobs)
described_class.new.perform(project_identifier, key_id, base64_changes)
end
end
def pwd(project)
File.join(Gitlab.config.repositories.storages.default['path'], project.path_with_namespace)
end
end
...@@ -4,9 +4,10 @@ describe 'Every Sidekiq worker' do ...@@ -4,9 +4,10 @@ describe 'Every Sidekiq worker' do
let(:workers) do let(:workers) do
root = Rails.root.join('app', 'workers') root = Rails.root.join('app', 'workers')
concerns = root.join('concerns').to_s concerns = root.join('concerns').to_s
ee_modules = root.join('ee').to_s
workers = Dir[root.join('**', '*.rb')]. workers = Dir[root.join('**', '*.rb')].
reject { |path| path.start_with?(concerns) } reject { |path| path.start_with?(concerns, ee_modules) }
workers.map do |path| workers.map do |path|
ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '') ns = Pathname.new(path).relative_path_from(root).to_s.gsub('.rb', '')
......
...@@ -94,26 +94,23 @@ describe PostReceive do ...@@ -94,26 +94,23 @@ describe PostReceive do
it { expect{ subject }.not_to change{ Ci::Pipeline.count } } it { expect{ subject }.not_to change{ Ci::Pipeline.count } }
end end
end end
end
describe '#process_repository_update' do context 'after project changes hooks' do
let(:changes) {'123456 789012 refs/heads/tést'} let(:changes) { '123456 789012 refs/heads/tést' }
let(:fake_hook_data) do let(:fake_hook_data) { Hash.new(event_name: 'repository_update') }
{ event_name: 'repository_update' }
end
before do before do
allow_any_instance_of(Gitlab::GitPostReceive).to receive(:identify).and_return(project.owner) allow_any_instance_of(Gitlab::DataBuilder::Repository).to receive(:update).and_return(fake_hook_data)
allow_any_instance_of(Gitlab::DataBuilder::Repository).to receive(:update).and_return(fake_hook_data) # silence hooks so we can isolate
# silence hooks so we can isolate allow_any_instance_of(Key).to receive(:post_create_hook).and_return(true)
allow_any_instance_of(Key).to receive(:post_create_hook).and_return(true) allow_any_instance_of(GitPushService).to receive(:execute).and_return(true)
allow(subject).to receive(:process_project_changes).and_return(true) end
end
it 'calls SystemHooksService' do it 'calls SystemHooksService' do
expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with(fake_hook_data, :repository_update_hooks).and_return(true) expect_any_instance_of(SystemHooksService).to receive(:execute_hooks).with(fake_hook_data, :repository_update_hooks).and_return(true)
subject.perform(pwd(project), key_id, base64_changes) described_class.new.perform(project_identifier, key_id, base64_changes)
end
end end
end end
...@@ -123,17 +120,6 @@ describe PostReceive do ...@@ -123,17 +120,6 @@ describe PostReceive do
described_class.new.perform(project_identifier, key_id, base64_changes) described_class.new.perform(project_identifier, key_id, base64_changes)
end end
it "triggers wiki index update" do
expect(Project).to receive(:find_by_full_path).with("#{project.full_path}.wiki").and_return(nil)
expect(Project).to receive(:find_by_full_path).with(project.full_path).and_return(project)
stub_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
expect_any_instance_of(ProjectWiki).to receive(:index_blobs)
repo_path = "#{pwd(project)}.wiki"
described_class.new.perform(repo_path, key_id, base64_changes)
end
it "does not run if the author is not in the project" do it "does not run if the author is not in the project" do
allow_any_instance_of(Gitlab::GitPostReceive). allow_any_instance_of(Gitlab::GitPostReceive).
to receive(:identify_using_ssh_key). to receive(:identify_using_ssh_key).
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment