Commit 160e5b5d authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-08-06

# Conflicts:
#	config/application.rb
#	locale/gitlab.pot

[ci skip]
parents 00a1309e d737abc5
......@@ -36,6 +36,8 @@ class ImporterStatus {
const $targetField = $tr.find('.import-target');
const $namespaceInput = $targetField.find('.js-select-namespace option:selected');
const id = $tr.attr('id').replace('repo_', '');
const repoData = $tr.data();
let targetNamespace;
let newName;
if ($namespaceInput.length > 0) {
......@@ -45,12 +47,20 @@ class ImporterStatus {
}
$btn.disable().addClass('is-loading');
return axios.post(this.importUrl, {
this.id = id;
let attributes = {
repo_id: id,
target_namespace: targetNamespace,
new_name: newName,
ci_cd_only: this.ciCdOnly,
})
};
if (repoData) {
attributes = Object.assign(repoData, attributes);
}
return axios.post(this.importUrl, attributes)
.then(({ data }) => {
const job = $(`tr#repo_${id}`);
job.attr('id', `project_${data.id}`);
......@@ -70,6 +80,9 @@ class ImporterStatus {
.catch((error) => {
let details = error;
const $statusField = $(`#repo_${this.id} .job-status`);
$statusField.text(__('Failed'));
if (error.response && error.response.data && error.response.data.errors) {
details = error.response.data.errors;
}
......
......@@ -165,6 +165,7 @@ export default {
}
this.showEmptyState = false;
})
.then(this.resize)
.catch(() => {
this.state = 'unableToConnect';
});
......
import initPipelineDetails from '~/pipelines/pipeline_details_bundle';
import initPipelines from '../init_pipelines';
document.addEventListener('DOMContentLoaded', () => {
initPipelines();
initPipelineDetails();
});
// /builds is an alias for show
import '../show/index';
import initPipelines from '../init_pipelines';
document.addEventListener('DOMContentLoaded', initPipelines);
// /failures is an alias for show
import '../show/index';
......@@ -17,7 +17,7 @@ document.addEventListener('DOMContentLoaded', () => {
new ShortcutsNavigation(); // eslint-disable-line no-new
new NotificationsForm(); // eslint-disable-line no-new
new UserCallout({ // eslint-disable-line no-new
setCalloutPerProject: true,
setCalloutPerProject: false,
className: 'js-autodevops-banner',
});
......
......@@ -36,11 +36,11 @@
:key="index"
class="row prepend-top-10 append-bottom-10"
>
<strong class="col-sm-2 text-right">
<strong class="col-sm-3 text-right">
{{ field.text }}:
</strong>
<div class="col-sm-10 text-secondary">
<div class="col-sm-9 text-secondary">
<code-block
v-if="field.type === $options.fieldTypes.codeBock"
:code="field.value"
......
......@@ -4,15 +4,20 @@ import Icon from '~/vue_shared/components/icon.vue';
import { inserted } from '~/feature_highlight/feature_highlight_helper';
import { mouseenter, debouncedMouseleave, togglePopover } from '~/shared/popover';
/**
* Render a button with a question mark icon
* On hover shows a popover. The popover will be dismissed on mouseleave
*/
export default {
name: 'ReportsHelpPopover',
name: 'HelpPopover',
components: {
Icon,
},
props: {
options: {
type: Object,
required: true,
required: false,
default: () => ({}),
},
},
mounted() {
......
......@@ -2,7 +2,7 @@
import { __ } from '~/locale';
import StatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
import IssuesList from './issues_list.vue';
import Popover from './help_popover.vue';
import Popover from '../help_popover.vue';
const LOADING = 'LOADING';
const ERROR = 'ERROR';
......
<script>
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import LoadingIcon from '~/vue_shared/components/loading_icon.vue';
import Popover from './help_popover.vue';
import Popover from '../help_popover.vue';
/**
* Renders the summary row for each report
......
......@@ -35,6 +35,7 @@ class ApplicationController < ActionController::Base
:gitea_import_enabled?, :github_import_configured?,
:gitlab_import_enabled?, :gitlab_import_configured?,
:bitbucket_import_enabled?, :bitbucket_import_configured?,
:bitbucket_server_import_enabled?,
:google_code_import_enabled?, :fogbugz_import_enabled?,
:git_import_enabled?, :gitlab_project_import_enabled?,
:manifest_import_enabled?
......@@ -345,6 +346,10 @@ class ApplicationController < ActionController::Base
!Gitlab::CurrentSettings.import_sources.empty?
end
def bitbucket_server_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('bitbucket_server')
end
def github_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('github')
end
......
# frozen_string_literal: true
class Import::BitbucketServerController < Import::BaseController
before_action :verify_bitbucket_server_import_enabled
before_action :bitbucket_auth, except: [:new, :configure]
before_action :validate_import_params, only: [:create]
# As a basic sanity check to prevent URL injection, restrict project
# repository input and repository slugs to allowed characters. For Bitbucket:
#
# Project keys must start with a letter and may only consist of ASCII letters, numbers and underscores (A-Z, a-z, 0-9, _).
#
# Repository names are limited to 128 characters. They must start with a
# letter or number and may contain spaces, hyphens, underscores, and periods.
# (https://community.atlassian.com/t5/Answers-Developer-Questions/stash-repository-names/qaq-p/499054)
VALID_BITBUCKET_CHARS = /\A[\w\-_\.\s]+\z/
def new
end
def create
repo = bitbucket_client.repo(@project_key, @repo_slug)
unless repo
return render json: { errors: "Project #{@project_key}/#{@repo_slug} could not be found" }, status: :unprocessable_entity
end
project_name = params[:new_name].presence || repo.name
namespace_path = params[:new_namespace].presence || current_user.username
target_namespace = find_or_create_namespace(namespace_path, current_user)
if current_user.can?(:create_projects, target_namespace)
project = Gitlab::BitbucketServerImport::ProjectCreator.new(@project_key, @repo_slug, repo, project_name, target_namespace, current_user, credentials).execute
if project.persisted?
render json: ProjectSerializer.new.represent(project)
else
render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end
else
render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity
end
rescue BitbucketServer::Client::ServerError => e
render json: { errors: "Unable to connect to server: #{e}" }, status: :unprocessable_entity
end
def configure
session[personal_access_token_key] = params[:personal_access_token]
session[bitbucket_server_username_key] = params[:bitbucket_username]
session[bitbucket_server_url_key] = params[:bitbucket_server_url]
redirect_to status_import_bitbucket_server_path
end
def status
repos = bitbucket_client.repos
@repos, @incompatible_repos = repos.partition { |repo| repo.valid? }
@already_added_projects = find_already_added_projects('bitbucket_server')
already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.to_a.reject! { |repo| already_added_projects_names.include?(repo.browse_url) }
rescue BitbucketServer::Connection::ConnectionError, BitbucketServer::Client::ServerError => e
flash[:alert] = "Unable to connect to server: #{e}"
clear_session_data
redirect_to new_import_bitbucket_server_path
end
def jobs
render json: find_jobs('bitbucket_server')
end
private
def bitbucket_client
@bitbucket_client ||= BitbucketServer::Client.new(credentials)
end
def validate_import_params
@project_key = params[:project]
@repo_slug = params[:repository]
return render_validation_error('Missing project key') unless @project_key.present? && @repo_slug.present?
return render_validation_error('Missing repository slug') unless @repo_slug.present?
return render_validation_error('Invalid project key') unless @project_key =~ VALID_BITBUCKET_CHARS
return render_validation_error('Invalid repository slug') unless @repo_slug =~ VALID_BITBUCKET_CHARS
end
def render_validation_error(message)
render json: { errors: message }, status: :unprocessable_entity
end
def bitbucket_auth
unless session[bitbucket_server_url_key].present? &&
session[bitbucket_server_username_key].present? &&
session[personal_access_token_key].present?
redirect_to new_import_bitbucket_server_path
end
end
def verify_bitbucket_server_import_enabled
render_404 unless bitbucket_server_import_enabled?
end
def bitbucket_server_url_key
:bitbucket_server_url
end
def bitbucket_server_username_key
:bitbucket_server_username
end
def personal_access_token_key
:bitbucket_server_personal_access_token
end
def clear_session_data
session[bitbucket_server_url_key] = nil
session[bitbucket_server_username_key] = nil
session[personal_access_token_key] = nil
end
def credentials
{
base_uri: session[bitbucket_server_url_key],
user: session[bitbucket_server_username_key],
password: session[personal_access_token_key]
}
end
end
......@@ -150,7 +150,7 @@ class ProjectsController < Projects::ApplicationController
def archive
return access_denied! unless can?(current_user, :archive_project, @project)
@project.archive!
::Projects::UpdateService.new(@project, current_user, archived: true).execute
respond_to do |format|
format.html { redirect_to project_path(@project) }
......@@ -160,7 +160,7 @@ class ProjectsController < Projects::ApplicationController
def unarchive
return access_denied! unless can?(current_user, :archive_project, @project)
@project.unarchive!
::Projects::UpdateService.new(@project, current_user, archived: false).execute
respond_to do |format|
format.html { redirect_to project_path(@project) }
......
......@@ -11,13 +11,23 @@ module NamespacesHelper
.includes(:route)
.order('routes.path')
users = [current_user.namespace]
selected_id = selected
unless extra_group.nil? || extra_group.is_a?(Group)
extra_group = Group.find(extra_group) if Namespace.find(extra_group).kind == 'group'
end
if extra_group && extra_group.is_a?(Group) && (!Group.exists?(name: extra_group.name) || Ability.allowed?(current_user, :read_group, extra_group))
if extra_group && extra_group.is_a?(Group)
extra_group = dedup_extra_group(extra_group)
if Ability.allowed?(current_user, :read_group, extra_group)
# Assign the value to an invalid primary ID so that the select box works
extra_group.id = -1 unless extra_group.persisted?
selected_id = extra_group.id if selected == :extra_group
groups |= [extra_group]
else
selected_id = current_user.namespace.id
end
end
options = []
......@@ -27,11 +37,11 @@ module NamespacesHelper
options << options_for_group(users, display_path: display_path, type: 'user')
if selected == :current_user && current_user.namespace
selected = current_user.namespace.id
selected_id = current_user.namespace.id
end
end
grouped_options_for_select(options, selected)
grouped_options_for_select(options, selected_id)
end
def namespace_icon(namespace, size = 40)
......@@ -44,6 +54,17 @@ module NamespacesHelper
private
# Many importers create a temporary Group, so use the real
# group if one exists by that name to prevent duplicates.
def dedup_extra_group(extra_group)
unless extra_group.persisted?
existing_group = Group.find_by(name: extra_group.name)
extra_group = existing_group if existing_group&.persisted?
end
extra_group
end
def options_for_group(namespaces, display_path:, type:)
group_label = type.pluralize
elements = namespaces.sort_by(&:human_name).map! do |n|
......
......@@ -64,6 +64,8 @@ class MergeRequest < ActiveRecord::Base
class_name: 'MergeRequestsClosingIssues',
dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :cached_closes_issues, through: :merge_requests_closing_issues, source: :issue
belongs_to :assignee, class_name: "User"
serialize :merge_params, Hash # rubocop:disable Cop/ActiveRecordSerialize
......@@ -769,8 +771,9 @@ class MergeRequest < ActiveRecord::Base
# Calculating this information for a number of merge requests requires
# running `ReferenceExtractor` on each of them separately.
# This optimization does not apply to issues from external sources.
def cache_merge_request_closes_issues!(current_user)
def cache_merge_request_closes_issues!(current_user = self.author)
return unless project.issues_enabled?
return if closed? || merged?
transaction do
self.merge_requests_closing_issues.delete_all
......@@ -783,6 +786,18 @@ class MergeRequest < ActiveRecord::Base
end
end
def visible_closing_issues_for(current_user = self.author)
strong_memoize(:visible_closing_issues_for) do
if self.target_project.has_external_issue_tracker?
closes_issues(current_user)
else
cached_closes_issues.select do |issue|
Ability.allowed?(current_user, :read_issue, issue)
end
end
end
end
# Return the set of issues that will be closed if this merge request is accepted.
def closes_issues(current_user = self.author)
if target_branch == project.default_branch
......@@ -802,7 +817,7 @@ class MergeRequest < ActiveRecord::Base
ext = Gitlab::ReferenceExtractor.new(project, current_user)
ext.analyze("#{title}\n#{description}")
ext.issues - closes_issues(current_user)
ext.issues - visible_closing_issues_for(current_user)
end
def target_project_path
......@@ -850,7 +865,7 @@ class MergeRequest < ActiveRecord::Base
end
def merge_commit_message(include_description: false)
closes_issues_references = closes_issues.map do |issue|
closes_issues_references = visible_closing_issues_for.map do |issue|
issue.to_reference(target_project)
end
......
# frozen_string_literal: true
module Postgresql
class ReplicationSlot < ActiveRecord::Base
self.table_name = 'pg_replication_slots'
# Returns true if the lag observed across all replication slots exceeds a
# given threshold.
#
# max - The maximum replication lag size, in bytes. Based on GitLab.com
# statistics it takes between 1 and 5 seconds to replicate around
# 100 MB of data.
def self.lag_too_great?(max = 100.megabytes)
lag_function = "#{Gitlab::Database.pg_wal_lsn_diff}" \
"(#{Gitlab::Database.pg_current_wal_insert_lsn}(), restart_lsn)::bigint"
# We force the use of a transaction here so the query always goes to the
# primary, even when using the EE DB load balancer.
sizes = transaction { pluck(lag_function) }
too_great = sizes.count { |size| size >= max }
# If too many replicas are falling behind too much, the availability of a
# GitLab instance might suffer. To prevent this from happening we require
# at least 1 replica to have data recent enough.
if sizes.any? && too_great.positive?
(sizes.length - too_great) <= 1
else
false
end
end
end
end
......@@ -671,6 +671,8 @@ class Project < ActiveRecord::Base
project_import_data.credentials ||= {}
project_import_data.credentials = project_import_data.credentials.merge(credentials)
end
project_import_data
end
def import?
......@@ -1338,14 +1340,6 @@ class Project < ActiveRecord::Base
:visibility_level
end
def archive!
update_attribute(:archived, true)
end
def unarchive!
update_attribute(:archived, false)
end
def change_head(branch)
if repository.branch_exists?(branch)
repository.before_change_head
......
......@@ -8,6 +8,10 @@ class JiraService < IssueTrackerService
validates :username, presence: true, if: :activated?
validates :password, presence: true, if: :activated?
validates :jira_issue_transition_id,
format: { with: Gitlab::Regex.jira_transition_id_regex, message: "transition ids can have only numbers which can be split with , or ;" },
allow_blank: true
prop_accessor :username, :password, :url, :api_url, :jira_issue_transition_id, :title, :description
before_update :reset_password
......@@ -91,7 +95,7 @@ class JiraService < IssueTrackerService
{ type: 'text', name: 'api_url', title: 'JIRA API URL', placeholder: 'If different from Web URL' },
{ type: 'text', name: 'username', placeholder: '', required: true },
{ type: 'password', name: 'password', placeholder: '', required: true },
{ type: 'text', name: 'jira_issue_transition_id', title: 'Transition ID', placeholder: '' }
{ type: 'text', name: 'jira_issue_transition_id', title: 'Transition ID(s)', placeholder: 'Use , or ; to separate multiple transition IDs' }
]
end
......@@ -191,8 +195,18 @@ class JiraService < IssueTrackerService
end
end
# jira_issue_transition_id can have multiple values split by , or ;
# the issue is transitioned at the order given by the user
# if any transition fails it will log the error message and stop the transition sequence
def transition_issue(issue)
issue.transitions.build.save(transition: { id: jira_issue_transition_id })
jira_issue_transition_id.scan(Gitlab::Regex.jira_transition_id_regex).each do |transition_id|
begin
issue.transitions.build.save!(transition: { id: transition_id })
rescue => error
Rails.logger.info "#{self.class.name} Issue Transition failed message ERROR: #{client_url} - #{error.message}"
return false
end
end
end
def add_issue_solved_comment(issue, commit_id, commit_url)
......
......@@ -208,7 +208,7 @@ class MergeRequestPresenter < Gitlab::View::Presenter::Delegated
end
def closing_issues
@closing_issues ||= closes_issues(current_user)
@closing_issues ||= visible_closing_issues_for(current_user)
end
def pipeline
......
......@@ -25,7 +25,7 @@ module MergeRequests
def close_issues(merge_request)
return unless merge_request.target_branch == project.default_branch
closed_issues = merge_request.closes_issues(current_user)
closed_issues = merge_request.visible_closing_issues_for(current_user)
closed_issues.each do |issue|
if can?(current_user, :update_issue, issue)
......
......@@ -14,6 +14,7 @@ module MergeRequests
merge_request.mark_as_unchecked
invalidate_cache_counts(merge_request, users: merge_request.assignees)
merge_request.update_project_counter_caches
merge_request.cache_merge_request_closes_issues!(current_user)
end
merge_request
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-abuse-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-account-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-background-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-ci-cd-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-email-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-gitaly-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-help-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-influx-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-ip-limits-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-koding-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-logging-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-outbound-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-pages-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-performance-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-performance-bar-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-plantuml-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-prometheus-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-realtime-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-registry-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-check-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-mirror-settings') do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-storage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-signin-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-signup-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-spam-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-terminal-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-terms-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
- application_setting = local_assigns.fetch(:application_setting)
= form_for application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for application_setting, url: admin_application_settings_path(anchor: 'js-third-party-offers-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-usage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
= form_for @application_setting, url: admin_application_settings_path, html: { class: 'fieldset-form' } do |f|
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-visibility-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
......
- title = _('Bitbucket Server Import')
- page_title title
- breadcrumb_title title
- header_title "Projects", root_path
%h3.page-title
= icon 'bitbucket-square', text: _('Import repositories from Bitbucket Server')
%p
= _('Enter in your Bitbucket Server URL and personal access token below')
= form_tag configure_import_bitbucket_server_path, method: :post do
.form-group.row
= label_tag :bitbucket_server_url, 'Bitbucket Server URL', class: 'col-form-label col-md-2'
.col-md-4
= text_field_tag :bitbucket_server_url, '', class: 'form-control append-right-8', placeholder: _('https://your-bitbucket-server'), size: 40
.form-group.row
= label_tag :bitbucket_server_url, 'Username', class: 'col-form-label col-md-2'
.col-md-4
= text_field_tag :bitbucket_username, '', class: 'form-control append-right-8', placeholder: _('username'), size: 40
.form-group.row
= label_tag :personal_access_token, 'Password/Personal Access Token', class: 'col-form-label col-md-2'
.col-md-4
= password_field_tag :personal_access_token, '', class: 'form-control append-right-8', placeholder: _('Personal Access Token'), size: 40
.form-actions
= submit_tag _('List your Bitbucket Server repositories'), class: 'btn btn-success'
- page_title 'Bitbucket Server import'
- header_title 'Projects', root_path
%h3.page-title
%i.fa.fa-bitbucket-square
= _('Import projects from Bitbucket Server')
- if @repos.any?
%p.light
= _('Select projects you want to import.')
.btn-group
- if @incompatible_repos.any?
= button_tag class: 'btn btn-import btn-success js-import-all' do
= _('Import all compatible projects')
= icon('spinner spin', class: 'loading-icon')
- else
= button_tag class: 'btn btn-import btn-success js-import-all' do
= _('Import all projects')
= icon('spinner spin', class: 'loading-icon')
.btn-group
= link_to('Reconfigure', configure_import_bitbucket_server_path, class: 'btn btn-primary', method: :post)
.table-responsive.prepend-top-10
%table.table.import-jobs
%colgroup.import-jobs-from-col
%colgroup.import-jobs-to-col
%colgroup.import-jobs-status-col
%thead
%tr
%th= _('From Bitbucket Server')
%th= _('To GitLab')
%th= _(' Status')
%tbody
- @already_added_projects.each do |project|
%tr{ id: "project_#{project.id}", class: "#{project_status_css_class(project.import_status)}" }
%td
= link_to project.import_source, project.import_source, target: '_blank', rel: 'noopener noreferrer'
%td
= link_to project.full_path, [project.namespace.becomes(Namespace), project]
%td.job-status
- if project.import_status == 'finished'
= icon('check', text: 'Done')
- elsif project.import_status == 'started'
= icon('spin', text: 'started')
- else
= project.human_import_status_name
- @repos.each do |repo|
%tr{ id: "repo_#{repo.project_key}___#{repo.slug}", data: { project: repo.project_key, repository: repo.slug } }
%td
= link_to repo.browse_url, repo.browse_url, target: '_blank', rel: 'noopener noreferrer'
%td.import-target
%fieldset.row
.input-group
.project-path.input-group-prepend
- if current_user.can_select_namespace?
- selected = params[:namespace_id] || :extra_group
- opts = current_user.can_create_group? ? { extra_group: Group.new(name: repo.project_key, path: repo.project_key) } : {}
= select_tag :namespace_id, namespaces_options(selected, opts.merge({ display_path: true })), { class: 'input-group-text select2 js-select-namespace', tabindex: 1 }
- else
= text_field_tag :path, current_user.namespace_path, class: "input-group-text input-large form-control", tabindex: 1, disabled: true
%span.input-group-prepend
.input-group-text /
= text_field_tag :path, repo.name, class: "input-mini form-control", tabindex: 2, autofocus: true, required: true
%td.import-actions.job-status
= button_tag class: 'btn btn-import js-add-to-import' do
Import
= icon('spinner spin', class: 'loading-icon')
- @incompatible_repos.each do |repo|
%tr{ id: "repo_#{repo.project_key}___#{repo.slug}" }
%td
= link_to repo.browse_url, repo.browse_url, target: '_blank', rel: 'noopener noreferrer'
%td.import-target
%td.import-actions-job-status
= label_tag 'Incompatible Project', nil, class: 'label badge-danger'
- if @incompatible_repos.any?
%p
One or more of your Bitbucket Server projects cannot be imported into GitLab
directly because they use Subversion or Mercurial for version control,
rather than Git. Please convert
= link_to 'them to Git,', 'https://www.atlassian.com/git/tutorials/migrating-overview'
and go through the
= link_to 'import flow', status_import_bitbucket_server_path
again.
.js-importer-status{ data: { jobs_import_path: "#{jobs_import_bitbucket_server_path}", import_path: "#{import_bitbucket_server_path}" } }
......@@ -18,10 +18,14 @@
- if bitbucket_import_enabled?
%div
= link_to status_import_bitbucket_path, class: "btn import_bitbucket #{'how_to_import_link' unless bitbucket_import_configured?}" do
= icon('bitbucket', text: 'Bitbucket')
= icon('bitbucket', text: 'Bitbucket Cloud')
- unless bitbucket_import_configured?
= render 'bitbucket_import_modal'
- if bitbucket_server_import_enabled?
%div
= link_to status_import_bitbucket_server_path, class: "btn import_bitbucket" do
= icon('bitbucket-square', text: 'Bitbucket Server')
%div
- if gitlab_import_enabled?
%div
= link_to status_import_gitlab_path, class: "btn import_gitlab #{'how_to_import_link' unless gitlab_import_configured?}" do
......
......@@ -6,10 +6,22 @@ class BackgroundMigrationWorker
# The minimum amount of time between processing two jobs of the same migration
# class.
#
# This interval is set to 5 minutes so autovacuuming and other maintenance
# related tasks have plenty of time to clean up after a migration has been
# performed.
MIN_INTERVAL = 5.minutes.to_i
# This interval is set to 2 or 5 minutes so autovacuuming and other
# maintenance related tasks have plenty of time to clean up after a migration
# has been performed.
def self.minimum_interval
if enable_health_check?
2.minutes.to_i
else
5.minutes.to_i
end
end
def self.enable_health_check?
Rails.env.development? ||
Rails.env.test? ||
Feature.enabled?('background_migration_health_check')
end
# Performs the background migration.
#
......@@ -27,7 +39,8 @@ class BackgroundMigrationWorker
# running a migration of this class or we ran one recently. In this case
# we'll reschedule the job in such a way that it is picked up again around
# the time the lease expires.
self.class.perform_in(ttl || MIN_INTERVAL, class_name, arguments)
self.class
.perform_in(ttl || self.class.minimum_interval, class_name, arguments)
end
end
......@@ -39,17 +52,51 @@ class BackgroundMigrationWorker
[true, nil]
else
lease = lease_for(class_name)
perform = !!lease.try_obtain
# If we managed to acquire the lease but the DB is not healthy, then we
# want to simply reschedule our job and try again _after_ the lease
# expires.
if perform && !healthy_database?
database_unhealthy_counter.increment
[lease.try_obtain, lease.ttl]
perform = false
end
[perform, lease.ttl]
end
end
def lease_for(class_name)
Gitlab::ExclusiveLease
.new("#{self.class.name}:#{class_name}", timeout: MIN_INTERVAL)
.new(lease_key_for(class_name), timeout: self.class.minimum_interval)
end
def lease_key_for(class_name)
"#{self.class.name}:#{class_name}"
end
def always_perform?
Rails.env.test?
end
# Returns true if the database is healthy enough to allow the migration to be
# performed.
#
# class_name - The name of the background migration that we might want to
# run.
def healthy_database?
return true unless self.class.enable_health_check?
return true unless Gitlab::Database.postgresql?
!Postgresql::ReplicationSlot.lag_too_great?
end
def database_unhealthy_counter
Gitlab::Metrics.counter(
:background_migration_database_health_reschedules,
'The number of times a background migration is rescheduled because the database is unhealthy.'
)
end
end
---
title: Keep admin settings sections open after submitting forms
merge_request: 21040
author:
type: other
---
title: fix height of full-width Metrics charts on large screens
merge_request: 20866
author:
type: fixed
---
title: Persist 'Auto DevOps' banner dismissal globally
merge_request: 20540
author:
type: other
---
title: Moves help_popover component to a common location
merge_request:
author:
type: other
---
title: Increases title column on modal for reports
merge_request:
author:
type: other
---
title: Update design of project templates
merge_request: 21012
author:
type: changed
---
title: Allow multiple JIRA transition ids
merge_request: 20939
author:
type: changed
---
title: Retrieve merge request closing issues from database cache
merge_request: 20911
author:
type: fixed
---
title: Trigger system hooks when project is archived/unarchived
merge_request: 20995
author:
type: added
---
title: Fix rendering of pipeline failure view when directly navigationg to it
merge_request: 21043
author:
type: fixed
......@@ -166,6 +166,7 @@ module Gitlab
config.assets.paths << "#{config.root}/node_modules/xterm/src/"
config.assets.precompile << "xterm.css"
<<<<<<< HEAD
## EE-specific assets config START
%w[images javascripts stylesheets].each do |path|
config.assets.paths << "#{config.root}/ee/app/assets/#{path}"
......@@ -180,6 +181,8 @@ module Gitlab
config.assets.precompile << LOOSE_EE_APP_ASSETS
## EE-specific assets config END
=======
>>>>>>> upstream/master
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
......
......@@ -24,6 +24,13 @@ namespace :import do
get :jobs
end
resource :bitbucket_server, only: [:create, :new], controller: :bitbucket_server do
post :configure
get :status
get :callback
get :jobs
end
resource :google_code, only: [:create, :new], controller: :google_code do
get :status
post :callback
......
......@@ -5,6 +5,9 @@ otherwise take a very long time (hours, days, years, etc) to complete. For
example, you can use background migrations to migrate data so that instead of
storing data in a single JSON column the data is stored in a separate table.
If the database cluster is considered to be in an unhealthy state, background
migrations automatically reschedule themselves for a later point in time.
## When To Use Background Migrations
>**Note:**
......
......@@ -115,7 +115,7 @@ in the table below.
| `JIRA API URL` | The base URL to the JIRA instance API. Web URL value will be used if not set. E.g., `https://jira-api.example.com`. |
| `Username` | The user name created in [configuring JIRA step](#configuring-jira). Using the email address will cause `401 unauthorized`. |
| `Password` |The password of the user created in [configuring JIRA step](#configuring-jira). |
| `Transition ID` | This is the ID of a transition that moves issues to the desired state. **Closing JIRA issues via commits or Merge Requests won't work if you don't set the ID correctly.** |
| `Transition ID` | This is the ID of a transition that moves issues to the desired state. It is possible to insert transition ids separated by `,` or `;` which means the issue will be moved to each state after another using the given order. **Closing JIRA issues via commits or Merge Requests won't work if you don't set the ID correctly.** |
### Getting a transition ID
......
......@@ -382,7 +382,7 @@ module API
end
get ':id/merge_requests/:merge_request_iid/closes_issues' do
merge_request = find_merge_request_with_access(params[:merge_request_iid])
issues = ::Kaminari.paginate_array(merge_request.closes_issues(current_user))
issues = ::Kaminari.paginate_array(merge_request.visible_closing_issues_for(current_user))
issues = paginate(issues)
external_issues, internal_issues = issues.partition { |issue| issue.is_a?(ExternalIssue) }
......
......@@ -324,7 +324,7 @@ module API
post ':id/archive' do
authorize!(:archive_project, user_project)
user_project.archive!
::Projects::UpdateService.new(user_project, current_user, archived: true).execute
present user_project, with: Entities::Project
end
......@@ -335,7 +335,7 @@ module API
post ':id/unarchive' do
authorize!(:archive_project, user_project)
user_project.unarchive!
::Projects::UpdateService.new(@project, current_user, archived: false).execute
present user_project, with: Entities::Project
end
......
# frozen_string_literal: true
module BitbucketServer
class Client
attr_reader :connection
ServerError = Class.new(StandardError)
SERVER_ERRORS = [SocketError,
OpenSSL::SSL::SSLError,
Errno::ECONNRESET,
Errno::ECONNREFUSED,
Errno::EHOSTUNREACH,
Net::OpenTimeout,
Net::ReadTimeout,
Gitlab::HTTP::BlockedUrlError,
BitbucketServer::Connection::ConnectionError].freeze
def initialize(options = {})
@connection = Connection.new(options)
end
def pull_requests(project_key, repo)
path = "/projects/#{project_key}/repos/#{repo}/pull-requests?state=ALL"
get_collection(path, :pull_request)
end
def activities(project_key, repo, pull_request_id)
path = "/projects/#{project_key}/repos/#{repo}/pull-requests/#{pull_request_id}/activities"
get_collection(path, :activity)
end
def repo(project, repo_name)
parsed_response = connection.get("/projects/#{project}/repos/#{repo_name}")
BitbucketServer::Representation::Repo.new(parsed_response)
end
def repos
path = "/repos"
get_collection(path, :repo)
end
def create_branch(project_key, repo, branch_name, sha)
payload = {
name: branch_name,
startPoint: sha,
message: 'GitLab temporary branch for import'
}
connection.post("/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
end
def delete_branch(project_key, repo, branch_name, sha)
payload = {
name: Gitlab::Git::BRANCH_REF_PREFIX + branch_name,
dryRun: false
}
connection.delete(:branches, "/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
end
private
def get_collection(path, type)
paginator = BitbucketServer::Paginator.new(connection, Addressable::URI.escape(path), type)
BitbucketServer::Collection.new(paginator)
rescue *SERVER_ERRORS => e
raise ServerError, e
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Collection < Enumerator
def initialize(paginator)
super() do |yielder|
loop do
paginator.items.each { |item| yielder << item }
end
end
lazy
end
def method_missing(method, *args)
return super unless self.respond_to?(method)
self.__send__(method, *args) do |item| # rubocop:disable GitlabSecurity/PublicSend
block_given? ? yield(item) : item
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Connection
include ActionView::Helpers::SanitizeHelper
DEFAULT_API_VERSION = '1.0'
SEPARATOR = '/'
attr_reader :api_version, :base_uri, :username, :token
ConnectionError = Class.new(StandardError)
def initialize(options = {})
@api_version = options.fetch(:api_version, DEFAULT_API_VERSION)
@base_uri = options[:base_uri]
@username = options[:user]
@token = options[:password]
end
def get(path, extra_query = {})
response = Gitlab::HTTP.get(build_url(path),
basic_auth: auth,
headers: accept_headers,
query: extra_query)
check_errors!(response)
response.parsed_response
end
def post(path, body)
response = Gitlab::HTTP.post(build_url(path),
basic_auth: auth,
headers: post_headers,
body: body)
check_errors!(response)
response.parsed_response
end
# We need to support two different APIs for deletion:
#
# /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/branches/default
# /rest/branch-utils/1.0/projects/{projectKey}/repos/{repositorySlug}/branches
def delete(resource, path, body)
url = delete_url(resource, path)
response = Gitlab::HTTP.delete(url,
basic_auth: auth,
headers: post_headers,
body: body)
check_errors!(response)
response.parsed_response
end
private
def check_errors!(response)
raise ConnectionError, "Response is not valid JSON" unless response.parsed_response.is_a?(Hash)
return if response.code >= 200 && response.code < 300
details = sanitize(response.parsed_response.dig('errors', 0, 'message'))
message = "Error #{response.code}"
message += ": #{details}" if details
raise ConnectionError, message
rescue JSON::ParserError
raise ConnectionError, "Unable to parse the server response as JSON"
end
def auth
@auth ||= { username: username, password: token }
end
def accept_headers
@accept_headers ||= { 'Accept' => 'application/json' }
end
def post_headers
@post_headers ||= accept_headers.merge({ 'Content-Type' => 'application/json' })
end
def build_url(path)
return path if path.starts_with?(root_url)
url_join_paths(root_url, path)
end
def root_url
url_join_paths(base_uri, "/rest/api/#{api_version}")
end
def delete_url(resource, path)
if resource == :branches
url_join_paths(base_uri, "/rest/branch-utils/#{api_version}#{path}")
else
build_url(path)
end
end
# URI.join is stupid in that slashes are important:
#
# # URI.join('http://example.com/subpath', 'hello')
# => http://example.com/hello
#
# We really want http://example.com/subpath/hello
#
def url_join_paths(*paths)
paths.map { |path| strip_slashes(path) }.join(SEPARATOR)
end
def strip_slashes(path)
path = path[1..-1] if path.starts_with?(SEPARATOR)
path.chomp(SEPARATOR)
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Page
attr_reader :attrs, :items
def initialize(raw, type)
@attrs = parse_attrs(raw)
@items = parse_values(raw, representation_class(type))
end
def next?
!attrs.fetch(:isLastPage, true)
end
def next
attrs.fetch(:nextPageStart)
end
private
def parse_attrs(raw)
raw.slice('size', 'nextPageStart', 'isLastPage').symbolize_keys
end
def parse_values(raw, bitbucket_rep_class)
return [] unless raw['values'] && raw['values'].is_a?(Array)
bitbucket_rep_class.decorate(raw['values'])
end
def representation_class(type)
BitbucketServer::Representation.const_get(type.to_s.camelize)
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Paginator
PAGE_LENGTH = 25
def initialize(connection, url, type)
@connection = connection
@type = type
@url = url
@page = nil
end
def items
raise StopIteration unless has_next_page?
@page = fetch_next_page
@page.items
end
private
attr_reader :connection, :page, :url, :type
def has_next_page?
page.nil? || page.next?
end
def next_offset
page.nil? ? 0 : page.next
end
def fetch_next_page
parsed_response = connection.get(@url, start: next_offset, limit: PAGE_LENGTH)
Page.new(parsed_response, type)
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Activity < Representation::Base
def comment?
action == 'COMMENTED'
end
def inline_comment?
!!(comment? && comment_anchor)
end
def comment
return unless comment?
@comment ||=
if inline_comment?
PullRequestComment.new(raw)
else
Comment.new(raw)
end
end
# TODO Move this into MergeEvent
def merge_event?
action == 'MERGED'
end
def committer_user
commit.dig('committer', 'displayName')
end
def committer_email
commit.dig('committer', 'emailAddress')
end
def merge_timestamp
timestamp = commit['committerTimestamp']
self.class.convert_timestamp(timestamp)
end
def merge_commit
commit['id']
end
def created_at
self.class.convert_timestamp(created_date)
end
private
def commit
raw.fetch('commit', {})
end
def action
raw['action']
end
def comment_anchor
raw['commentAnchor']
end
def created_date
raw['createdDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Base
attr_reader :raw
def initialize(raw)
@raw = raw
end
def self.decorate(entries)
entries.map { |entry| new(entry)}
end
def self.convert_timestamp(time_usec)
Time.at(time_usec / 1000) if time_usec.is_a?(Integer)
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
# A general comment with the structure:
# "comment": {
# "author": {
# "active": true,
# "displayName": "root",
# "emailAddress": "stanhu+bitbucket@gitlab.com",
# "id": 1,
# "links": {
# "self": [
# {
# "href": "http://localhost:7990/users/root"
# }
# ]
# },
# "name": "root",
# "slug": "root",
# "type": "NORMAL"
# }
# }
# }
class Comment < Representation::Base
attr_reader :parent_comment
CommentNode = Struct.new(:raw_comments, :parent)
def initialize(raw, parent_comment: nil)
super(raw)
@parent_comment = parent_comment
end
def id
raw_comment['id']
end
def author_username
author['displayName']
end
def author_email
author['emailAddress']
end
def note
raw_comment['text']
end
def created_at
self.class.convert_timestamp(created_date)
end
def updated_at
self.class.convert_timestamp(created_date)
end
# Bitbucket Server supports the ability to reply to any comment
# and created multiple threads. It represents these as a linked list
# of comments within comments. For example:
#
# "comments": [
# {
# "author" : ...
# "comments": [
# {
# "author": ...
#
# Since GitLab only supports a single thread, we flatten all these
# comments into a single discussion.
def comments
@comments ||= flatten_comments
end
private
# In order to provide context for each reply, we need to track
# the parent of each comment. This method works as follows:
#
# 1. Insert the root comment into the workset. The root element is the current note.
# 2. For each node in the workset:
# a. Examine if it has replies to that comment. If it does,
# insert that node into the workset.
# b. Parse that note into a Comment structure and add it to a flat list.
def flatten_comments
comments = raw_comment['comments']
workset =
if comments
[CommentNode.new(comments, self)]
else
[]
end
all_comments = []
until workset.empty?
node = workset.pop
parent = node.parent
node.raw_comments.each do |comment|
new_comments = comment.delete('comments')
current_comment = Comment.new({ 'comment' => comment }, parent_comment: parent)
all_comments << current_comment
workset << CommentNode.new(new_comments, current_comment) if new_comments
end
end
all_comments
end
def raw_comment
raw.fetch('comment', {})
end
def author
raw_comment['author']
end
def created_date
raw_comment['createdDate']
end
def updated_date
raw_comment['updatedDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class PullRequest < Representation::Base
def author
raw.dig('author', 'user', 'name')
end
def author_email
raw.dig('author', 'user', 'emailAddress')
end
def description
raw['description']
end
def iid
raw['id']
end
def state
case raw['state']
when 'MERGED'
'merged'
when 'DECLINED'
'closed'
else
'opened'
end
end
def merged?
state == 'merged'
end
def created_at
self.class.convert_timestamp(created_date)
end
def updated_at
self.class.convert_timestamp(updated_date)
end
def title
raw['title']
end
def source_branch_name
raw.dig('fromRef', 'id')
end
def source_branch_sha
raw.dig('fromRef', 'latestCommit')
end
def target_branch_name
raw.dig('toRef', 'id')
end
def target_branch_sha
raw.dig('toRef', 'latestCommit')
end
private
def created_date
raw['createdDate']
end
def updated_date
raw['updatedDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
# An inline comment with the following structure that identifies
# the part of the diff:
#
# "commentAnchor": {
# "diffType": "EFFECTIVE",
# "fileType": "TO",
# "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
# "line": 1,
# "lineType": "ADDED",
# "orphaned": false,
# "path": "CHANGELOG.md",
# "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
# }
#
# More details in https://docs.atlassian.com/bitbucket-server/rest/5.12.0/bitbucket-rest.html.
class PullRequestComment < Comment
def from_sha
comment_anchor['fromHash']
end
def to_sha
comment_anchor['toHash']
end
def to?
file_type == 'TO'
end
def from?
file_type == 'FROM'
end
def added?
line_type == 'ADDED'
end
def removed?
line_type == 'REMOVED'
end
# There are three line comment types: added, removed, or context.
#
# 1. An added type means a new line was inserted, so there is no old position.
# 2. A removed type means a line was removed, so there is no new position.
# 3. A context type means the line was unmodified, so there is both a
# old and new position.
def new_pos
return if removed?
return unless line_position
line_position[1]
end
def old_pos
return if added?
return unless line_position
line_position[0]
end
def file_path
comment_anchor.fetch('path')
end
private
def file_type
comment_anchor['fileType']
end
def line_type
comment_anchor['lineType']
end
# Each comment contains the following information about the diff:
#
# hunks: [
# {
# segments: [
# {
# "lines": [
# {
# "commentIds": [ N ],
# "source": X,
# "destination": Y
# }, ...
# ] ....
#
# To determine the line position of a comment, we search all the lines
# entries until we find this comment ID.
def line_position
@line_position ||= diff_hunks.each do |hunk|
segments = hunk.fetch('segments', [])
segments.each do |segment|
lines = segment.fetch('lines', [])
lines.each do |line|
if line['commentIds']&.include?(id)
return [line['source'], line['destination']]
end
end
end
end
end
def comment_anchor
raw.fetch('commentAnchor', {})
end
def diff
raw.fetch('diff', {})
end
def diff_hunks
diff.fetch('hunks', [])
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Repo < Representation::Base
def initialize(raw)
super(raw)
end
def project_key
raw.dig('project', 'key')
end
def project_name
raw.dig('project', 'name')
end
def slug
raw['slug']
end
def browse_url
# The JSON reponse contains an array of 1 element. Not sure if there
# are cases where multiple links would be provided.
raw.dig('links', 'self').first.fetch('href')
end
def clone_url
raw['links']['clone'].find { |link| link['name'].starts_with?('http') }.fetch('href')
end
def description
project['description']
end
def full_name
"#{project_name}/#{name}"
end
def issues_enabled?
true
end
def name
raw['name']
end
def valid?
raw['scmId'] == 'git'
end
def visibility_level
if project['public']
Gitlab::VisibilityLevel::PUBLIC
else
Gitlab::VisibilityLevel::PRIVATE
end
end
def project
raw['project']
end
def to_s
full_name
end
end
end
end
......@@ -46,7 +46,11 @@ module Gitlab
# arguments - The arguments to pass to the background migration's "perform"
# method.
def self.perform(class_name, arguments)
const_get(class_name).new.perform(*arguments)
migration_class_for(class_name).new.perform(*arguments)
end
def self.migration_class_for(class_name)
const_get(class_name)
end
end
end
module Gitlab
module BitbucketServerImport
class Importer
include Gitlab::ShellAdapter
attr_reader :recover_missing_commits
attr_reader :project, :project_key, :repository_slug, :client, :errors, :users
REMOTE_NAME = 'bitbucket_server'.freeze
BATCH_SIZE = 100
TempBranch = Struct.new(:name, :sha)
def self.imports_repository?
true
end
def self.refmap
[:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head']
end
# Unlike GitHub, you can't grab the commit SHAs for pull requests that
# have been closed but not merged even though Bitbucket has these
# commits internally. We can recover these pull requests by creating a
# branch with the Bitbucket REST API, but by default we turn this
# behavior off.
def initialize(project, recover_missing_commits: false)
@project = project
@recover_missing_commits = recover_missing_commits
@project_key = project.import_data.data['project_key']
@repository_slug = project.import_data.data['repo_slug']
@client = BitbucketServer::Client.new(project.import_data.credentials)
@formatter = Gitlab::ImportFormatter.new
@errors = []
@users = {}
@temp_branches = []
end
def execute
import_repository
import_pull_requests
delete_temp_branches
handle_errors
true
end
private
def handle_errors
return unless errors.any?
project.update_column(:import_error, {
message: 'The remote data could not be fully imported.',
errors: errors
}.to_json)
end
def gitlab_user_id(email)
find_user_id(email) || project.creator_id
end
def find_user_id(email)
return nil unless email
return users[email] if users.key?(email)
user = User.find_by_any_email(email, confirmed: true)
users[email] = user&.id
user&.id
end
def repo
@repo ||= client.repo(project_key, repository_slug)
end
def sha_exists?(sha)
project.repository.commit(sha)
end
def temp_branch_name(pull_request, suffix)
"gitlab/import/pull-request/#{pull_request.iid}/#{suffix}"
end
# This method restores required SHAs that GitLab needs to create diffs
# into branch names as the following:
#
# gitlab/import/pull-request/N/{to,from}
def restore_branches(pull_requests)
shas_to_restore = []
pull_requests.each do |pull_request|
shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :from),
pull_request.source_branch_sha)
shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :to),
pull_request.target_branch_sha)
end
# Create the branches on the Bitbucket Server first
created_branches = restore_branch_shas(shas_to_restore)
@temp_branches += created_branches
# Now sync the repository so we get the new branches
import_repository unless created_branches.empty?
end
def restore_branch_shas(shas_to_restore)
shas_to_restore.each_with_object([]) do |temp_branch, branches_created|
branch_name = temp_branch.name
sha = temp_branch.sha
next if sha_exists?(sha)
begin
client.create_branch(project_key, repository_slug, branch_name, sha)
branches_created << temp_branch
rescue BitbucketServer::Connection::ConnectionError => e
Rails.logger.warn("BitbucketServerImporter: Unable to recreate branch for SHA #{sha}: #{e}")
end
end
end
def import_repository
project.ensure_repository
project.repository.fetch_as_mirror(project.import_url, refmap: self.class.refmap, remote_name: REMOTE_NAME)
rescue Gitlab::Shell::Error, Gitlab::Git::RepositoryMirroring::RemoteError => e
# Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
# 2. Retried import, repo is broken or not imported but +exists?+ still returns true
project.repository.expire_content_cache if project.repository_exists?
raise e.message
end
# Bitbucket Server keeps tracks of references for open pull requests in
# refs/heads/pull-requests, but closed and merged requests get moved
# into hidden internal refs under stash-refs/pull-requests. Unless the
# SHAs involved are at the tip of a branch or tag, there is no way to
# retrieve the server for those commits.
#
# To avoid losing history, we use the Bitbucket API to re-create the branch
# on the remote server. Then we have to issue a `git fetch` to download these
# branches.
def import_pull_requests
pull_requests = client.pull_requests(project_key, repository_slug).to_a
# Creating branches on the server and fetching the newly-created branches
# may take a number of network round-trips. Do this in batches so that we can
# avoid doing a git fetch for every new branch.
pull_requests.each_slice(BATCH_SIZE) do |batch|
restore_branches(batch) if recover_missing_commits
batch.each do |pull_request|
begin
import_bitbucket_pull_request(pull_request)
rescue StandardError => e
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, trace: e.backtrace.join("\n"), raw_response: pull_request.raw }
end
end
end
end
def delete_temp_branches
@temp_branches.each do |branch|
begin
client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
project.repository.delete_branch(branch.name)
rescue BitbucketServer::Connection::ConnectionError => e
@errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
end
end
end
def import_bitbucket_pull_request(pull_request)
description = ''
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author_email)
description += pull_request.description if pull_request.description
source_branch_sha = pull_request.source_branch_sha
target_branch_sha = pull_request.target_branch_sha
author_id = gitlab_user_id(pull_request.author_email)
attributes = {
iid: pull_request.iid,
title: pull_request.title,
description: description,
source_project: project,
source_branch: Gitlab::Git.ref_name(pull_request.source_branch_name),
source_branch_sha: source_branch_sha,
target_project: project,
target_branch: Gitlab::Git.ref_name(pull_request.target_branch_name),
target_branch_sha: target_branch_sha,
state: pull_request.state,
author_id: author_id,
assignee_id: nil,
created_at: pull_request.created_at,
updated_at: pull_request.updated_at
}
merge_request = project.merge_requests.create!(attributes)
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
end
def import_pull_request_comments(pull_request, merge_request)
comments, other_activities = client.activities(project_key, repository_slug, pull_request.iid).partition(&:comment?)
merge_event = other_activities.find(&:merge_event?)
import_merge_event(merge_request, merge_event) if merge_event
inline_comments, pr_comments = comments.partition(&:inline_comment?)
import_inline_comments(inline_comments.map(&:comment), merge_request)
import_standalone_pr_comments(pr_comments.map(&:comment), merge_request)
end
def import_merge_event(merge_request, merge_event)
committer = merge_event.committer_email
user_id = gitlab_user_id(committer)
timestamp = merge_event.merge_timestamp
merge_request.update({ merge_commit_sha: merge_event.merge_commit })
metric = MergeRequest::Metrics.find_or_initialize_by(merge_request: merge_request)
metric.update(merged_by_id: user_id, merged_at: timestamp)
end
def import_inline_comments(inline_comments, merge_request)
inline_comments.each do |comment|
position = build_position(merge_request, comment)
parent = create_diff_note(merge_request, comment, position)
next unless parent&.persisted?
discussion_id = parent.discussion_id
comment.comments.each do |reply|
create_diff_note(merge_request, reply, position, discussion_id)
end
end
end
def create_diff_note(merge_request, comment, position, discussion_id = nil)
attributes = pull_request_comment_attributes(comment)
attributes.merge!(position: position, type: 'DiffNote')
attributes[:discussion_id] = discussion_id if discussion_id
note = merge_request.notes.build(attributes)
if note.valid?
note.save
return note
end
# Bitbucket Server supports the ability to comment on any line, not just the
# line in the diff. If we can't add the note as a DiffNote, fallback to creating
# a regular note.
create_fallback_diff_note(merge_request, comment, position)
rescue StandardError => e
errors << { type: :pull_request, id: comment.id, errors: e.message }
nil
end
def create_fallback_diff_note(merge_request, comment, position)
attributes = pull_request_comment_attributes(comment)
note = "*Comment on"
note += " #{position.old_path}:#{position.old_line} -->" if position.old_line
note += " #{position.new_path}:#{position.new_line}" if position.new_line
note += "*\n\n#{comment.note}"
attributes[:note] = note
merge_request.notes.create!(attributes)
end
def build_position(merge_request, pr_comment)
params = {
diff_refs: merge_request.diff_refs,
old_path: pr_comment.file_path,
new_path: pr_comment.file_path,
old_line: pr_comment.old_pos,
new_line: pr_comment.new_pos
}
Gitlab::Diff::Position.new(params)
end
def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment|
begin
merge_request.notes.create!(pull_request_comment_attributes(comment))
comment.comments.each do |replies|
merge_request.notes.create!(pull_request_comment_attributes(replies))
end
rescue StandardError => e
errors << { type: :pull_request, iid: comment.id, errors: e.message }
end
end
end
def pull_request_comment_attributes(comment)
author = find_user_id(comment.author_email)
note = ''
unless author
author = project.creator_id
note = "*By #{comment.author_username} (#{comment.author_email})*\n\n"
end
note +=
# Provide some context for replying
if comment.parent_comment
"> #{comment.parent_comment.note.truncate(80)}\n\n#{comment.note}"
else
comment.note
end
{
project: project,
note: note,
author_id: author,
created_at: comment.created_at,
updated_at: comment.updated_at
}
end
end
end
end
module Gitlab
module BitbucketServerImport
class ProjectCreator
attr_reader :project_key, :repo_slug, :repo, :name, :namespace, :current_user, :session_data
def initialize(project_key, repo_slug, repo, name, namespace, current_user, session_data)
@project_key = project_key
@repo_slug = repo_slug
@repo = repo
@name = name
@namespace = namespace
@current_user = current_user
@session_data = session_data
end
def execute
::Projects::CreateService.new(
current_user,
name: name,
path: name,
description: repo.description,
namespace_id: namespace.id,
visibility_level: repo.visibility_level,
import_type: 'bitbucket_server',
import_source: repo.browse_url,
import_url: repo.clone_url,
import_data: {
credentials: session_data,
data: { project_key: project_key, repo_slug: repo_slug }
},
skip_wiki: true
).execute
end
end
end
end
......@@ -979,8 +979,8 @@ into similar problems in the future (e.g. when new tables are created).
# To not overload the worker too much we enforce a minimum interval both
# when scheduling and performing jobs.
if delay_interval < BackgroundMigrationWorker::MIN_INTERVAL
delay_interval = BackgroundMigrationWorker::MIN_INTERVAL
if delay_interval < BackgroundMigrationWorker.minimum_interval
delay_interval = BackgroundMigrationWorker.minimum_interval
end
model_class.each_batch(of: batch_size) do |relation, index|
......
......@@ -10,7 +10,8 @@ module Gitlab
# We exclude `bare_repository` here as it has no import class associated
ImportTable = [
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
......
......@@ -100,5 +100,9 @@ module Gitlab
)
}mx
end
def jira_transition_id_regex
@jira_transition_id_regex ||= /\d+/
end
end
end
......@@ -16,6 +16,7 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n"
<<<<<<< HEAD
msgid " and"
msgstr ""
......@@ -29,6 +30,11 @@ msgid_plural " improved on %d points"
msgstr[0] ""
msgstr[1] ""
=======
msgid " Status"
msgstr ""
>>>>>>> upstream/master
msgid "%d changed file"
msgid_plural "%d changed files"
msgstr[0] ""
......@@ -921,6 +927,7 @@ msgstr ""
msgid "Below you will find all the groups that are public."
msgstr ""
<<<<<<< HEAD
msgid "Billing"
msgstr ""
......@@ -985,6 +992,9 @@ msgid "BillingPlans|paid annually at %{price_per_year}"
msgstr ""
msgid "BillingPlans|per user"
=======
msgid "Bitbucket Server Import"
>>>>>>> upstream/master
msgstr ""
msgid "Bitbucket import"
......@@ -1253,12 +1263,15 @@ msgstr ""
msgid "Cannot modify managed Kubernetes cluster"
msgstr ""
<<<<<<< HEAD
msgid "Certificate fingerprint"
msgstr ""
msgid "Change Weight"
msgstr ""
=======
>>>>>>> upstream/master
msgid "Change template"
msgstr ""
......@@ -2666,6 +2679,9 @@ msgstr ""
msgid "Ends at (UTC)"
msgstr ""
msgid "Enter in your Bitbucket Server URL and personal access token below"
msgstr ""
msgid "Environments"
msgstr ""
......@@ -3022,6 +3038,9 @@ msgstr ""
msgid "From Bitbucket"
msgstr ""
msgid "From Bitbucket Server"
msgstr ""
msgid "From FogBugz"
msgstr ""
......@@ -3630,6 +3649,9 @@ msgstr ""
msgid "Import projects from Bitbucket"
msgstr ""
msgid "Import projects from Bitbucket Server"
msgstr ""
msgid "Import projects from FogBugz"
msgstr ""
......@@ -3639,6 +3661,9 @@ msgstr ""
msgid "Import projects from Google Code"
msgstr ""
msgid "Import repositories from Bitbucket Server"
msgstr ""
msgid "Import repositories from GitHub"
msgstr ""
......@@ -3958,6 +3983,9 @@ msgstr ""
msgid "List available repositories"
msgstr ""
msgid "List your Bitbucket Server repositories"
msgstr ""
msgid "List your GitHub repositories"
msgstr ""
......@@ -4935,9 +4963,12 @@ msgstr ""
msgid "Preview"
msgstr ""
<<<<<<< HEAD
msgid "Primary"
msgstr ""
=======
>>>>>>> upstream/master
msgid "Prioritize"
msgstr ""
......@@ -7647,6 +7678,9 @@ msgstr ""
msgid "here"
msgstr ""
msgid "https://your-bitbucket-server"
msgstr ""
msgid "import flow"
msgstr ""
......
require 'spec_helper'
describe Import::BitbucketServerController do
let(:user) { create(:user) }
let(:project_key) { 'test-project' }
let(:repo_slug) { 'some-repo' }
let(:client) { instance_double(BitbucketServer::Client) }
def assign_session_tokens
session[:bitbucket_server_url] = 'http://localhost:7990'
session[:bitbucket_server_username] = 'bitbucket'
session[:bitbucket_server_personal_access_token] = 'some-token'
end
before do
sign_in(user)
allow(controller).to receive(:bitbucket_server_import_enabled?).and_return(true)
end
describe 'GET new' do
render_views
it 'shows the input form' do
get :new
expect(response.body).to have_text('Bitbucket Server URL')
end
end
describe 'POST create' do
before do
allow(controller).to receive(:bitbucket_client).and_return(client)
repo = double(name: 'my-project')
allow(client).to receive(:repo).with(project_key, repo_slug).and_return(repo)
assign_session_tokens
end
set(:project) { create(:project) }
it 'returns the new project' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, 'my-project', user.namespace, user, anything)
.and_return(double(execute: project))
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(200)
end
it 'returns an error when an invalid project key is used' do
post :create, project: 'some&project'
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when an invalid repository slug is used' do
post :create, project: 'some-project', repository: 'try*this'
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when the project cannot be found' do
allow(client).to receive(:repo).with(project_key, repo_slug).and_return(nil)
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when the project cannot be saved' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, 'my-project', user.namespace, user, anything)
.and_return(double(execute: build(:project)))
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
it "returns an error when the server can't be contacted" do
expect(client).to receive(:repo).with(project_key, repo_slug).and_raise(BitbucketServer::Client::ServerError)
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
end
describe 'POST configure' do
let(:token) { 'token' }
let(:username) { 'bitbucket-user' }
let(:url) { 'http://localhost:7990/bitbucket' }
it 'clears out existing session' do
post :configure
expect(session[:bitbucket_server_url]).to be_nil
expect(session[:bitbucket_server_username]).to be_nil
expect(session[:bitbucket_server_personal_access_token]).to be_nil
expect(response).to have_gitlab_http_status(302)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
it 'sets the session variables' do
post :configure, personal_access_token: token, bitbucket_username: username, bitbucket_server_url: url
expect(session[:bitbucket_server_url]).to eq(url)
expect(session[:bitbucket_server_username]).to eq(username)
expect(session[:bitbucket_server_personal_access_token]).to eq(token)
expect(response).to have_gitlab_http_status(302)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
end
describe 'GET status' do
render_views
before do
allow(controller).to receive(:bitbucket_client).and_return(client)
@repo = double(slug: 'vim', project_key: 'asd', full_name: 'asd/vim', "valid?" => true, project_name: 'asd', browse_url: 'http://test', name: 'vim')
@invalid_repo = double(slug: 'invalid', project_key: 'foobar', full_name: 'asd/foobar', "valid?" => false, browse_url: 'http://bad-repo')
assign_session_tokens
end
it 'assigns repository categories' do
created_project = create(:project, import_type: 'bitbucket_server', creator_id: user.id, import_source: 'foo/bar', import_status: 'finished')
expect(client).to receive(:repos).and_return([@repo, @invalid_repo])
get :status
expect(assigns(:already_added_projects)).to eq([created_project])
expect(assigns(:repos)).to eq([@repo])
expect(assigns(:incompatible_repos)).to eq([@invalid_repo])
end
end
describe 'GET jobs' do
before do
assign_session_tokens
end
it 'returns a list of imported projects' do
created_project = create(:project, import_type: 'bitbucket_server', creator_id: user.id)
get :jobs
expect(json_response.count).to eq(1)
expect(json_response.first['id']).to eq(created_project.id)
expect(json_response.first['import_status']).to eq('none')
end
end
end
......@@ -118,6 +118,10 @@ FactoryBot.define do
end
end
after(:create) do |merge_request, evaluator|
merge_request.cache_merge_request_closes_issues!
end
factory :merged_merge_request, traits: [:merged]
factory :closed_merge_request, traits: [:closed]
factory :reopened_merge_request, traits: [:opened]
......
......@@ -53,14 +53,14 @@ describe 'Explore Groups page', :js do
expect(find('.js-groups-list-holder .content-list li:first-child .stats .number-projects')).to have_text("1")
# Archive project
empty_project.archive!
::Projects::UpdateService.new(empty_project, user, archived: true).execute
visit explore_groups_path
# Check project count
expect(find('.js-groups-list-holder .content-list li:first-child .stats .number-projects')).to have_text("0")
# Unarchive project
empty_project.unarchive!
::Projects::UpdateService.new(empty_project, user, archived: false).execute
visit explore_groups_path
# Check project count
......
......@@ -52,6 +52,7 @@ describe 'Group issues page' do
context 'issues list', :nested_groups do
let(:subgroup) { create(:group, parent: group) }
let(:subgroup_project) { create(:project, :public, group: subgroup)}
let(:user_in_group) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
let!(:issue) { create(:issue, project: project, title: 'root group issue') }
let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') }
......@@ -67,7 +68,7 @@ describe 'Group issues page' do
context 'when project is archived' do
before do
project.archive!
::Projects::UpdateService.new(project, user_in_group, archived: true).execute
end
it 'does not render issue' do
......
......@@ -55,7 +55,7 @@ describe LabelsFinder do
context 'filtering by group_id' do
it 'returns labels available for any non-archived project within the group' do
group_1.add_developer(user)
project_1.archive!
::Projects::UpdateService.new(project_1, user, archived: true).execute
finder = described_class.new(user, group_id: group_1.id)
expect(finder.execute).to eq [group_label_2, group_label_1, project_label_5]
......
......@@ -36,7 +36,7 @@ describe MoveToProjectFinder do
it 'does not return archived projects' do
reporter_project.add_reporter(user)
reporter_project.archive!
::Projects::UpdateService.new(reporter_project, user, archived: true).execute
other_reporter_project = create(:project)
other_reporter_project.add_reporter(user)
......
{
"isLastPage": true,
"limit": 25,
"size": 8,
"start": 0,
"values": [
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530164016725,
"id": 11,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [
{
"anchor": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"createdDate": 1530164016725,
"id": 11,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"text": "Ok",
"type": "COMMENT",
"updatedDate": 1530164016725,
"version": 0
},
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"createdDate": 1530164026000,
"id": 1,
"permittedOperations": {
"deletable": true,
"editable": true,
"transitionable": true
},
"state": "OPEN",
"text": "here's a task"
}
],
"text": "Ok",
"updatedDate": 1530164016725,
"version": 0
},
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530165543990,
"id": 12,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "hi",
"updatedDate": 1530165543990,
"version": 0
}
],
"createdDate": 1530164013718,
"id": 10,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "Hello world",
"updatedDate": 1530164013718,
"version": 0
},
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530165549932,
"id": 13,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "hello",
"updatedDate": 1530165549932,
"version": 0
}
],
"createdDate": 1530161499144,
"id": 9,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "is this a new line?",
"updatedDate": 1530161499144,
"version": 0
},
"commentAction": "ADDED",
"commentAnchor": {
"diffType": "EFFECTIVE",
"fileType": "TO",
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"line": 1,
"lineType": "ADDED",
"orphaned": false,
"path": "CHANGELOG.md",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"createdDate": 1530161499144,
"diff": {
"destination": {
"components": [
"CHANGELOG.md"
],
"extension": "md",
"name": "CHANGELOG.md",
"parent": "",
"toString": "CHANGELOG.md"
},
"hunks": [
{
"destinationLine": 1,
"destinationSpan": 11,
"segments": [
{
"lines": [
{
"commentIds": [
9
],
"destination": 1,
"line": "# Edit 1",
"source": 1,
"truncated": false
},
{
"destination": 2,
"line": "",
"source": 1,
"truncated": false
}
],
"truncated": false,
"type": "ADDED"
},
{
"lines": [
{
"destination": 3,
"line": "# ChangeLog",
"source": 1,
"truncated": false
},
{
"destination": 4,
"line": "",
"source": 2,
"truncated": false
},
{
"destination": 5,
"line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
"source": 3,
"truncated": false
},
{
"destination": 6,
"line": "we use is semver, although we will often release new lexers in minor versions, as a",
"source": 4,
"truncated": false
},
{
"destination": 7,
"line": "practical matter.",
"source": 5,
"truncated": false
},
{
"destination": 8,
"line": "",
"source": 6,
"truncated": false
},
{
"destination": 9,
"line": "## version TBD: (unreleased)",
"source": 7,
"truncated": false
},
{
"destination": 10,
"line": "",
"source": 8,
"truncated": false
},
{
"destination": 11,
"line": "* General",
"source": 9,
"truncated": false
}
],
"truncated": false,
"type": "CONTEXT"
}
],
"sourceLine": 1,
"sourceSpan": 9,
"truncated": false
}
],
"properties": {
"current": true,
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"source": null,
"truncated": false
},
"id": 19,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530053198463,
"id": 7,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "What about this line?",
"updatedDate": 1530053198463,
"version": 0
},
"commentAction": "ADDED",
"commentAnchor": {
"diffType": "EFFECTIVE",
"fileType": "FROM",
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"line": 9,
"lineType": "CONTEXT",
"orphaned": false,
"path": "CHANGELOG.md",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"createdDate": 1530053198463,
"diff": {
"destination": {
"components": [
"CHANGELOG.md"
],
"extension": "md",
"name": "CHANGELOG.md",
"parent": "",
"toString": "CHANGELOG.md"
},
"hunks": [
{
"destinationLine": 1,
"destinationSpan": 12,
"segments": [
{
"lines": [
{
"destination": 1,
"line": "# Edit 1",
"source": 1,
"truncated": false
},
{
"destination": 2,
"line": "",
"source": 1,
"truncated": false
}
],
"truncated": false,
"type": "ADDED"
},
{
"lines": [
{
"destination": 3,
"line": "# ChangeLog",
"source": 1,
"truncated": false
},
{
"destination": 4,
"line": "",
"source": 2,
"truncated": false
},
{
"destination": 5,
"line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
"source": 3,
"truncated": false
},
{
"destination": 6,
"line": "we use is semver, although we will often release new lexers in minor versions, as a",
"source": 4,
"truncated": false
},
{
"destination": 7,
"line": "practical matter.",
"source": 5,
"truncated": false
},
{
"destination": 8,
"line": "",
"source": 6,
"truncated": false
},
{
"destination": 9,
"line": "## version TBD: (unreleased)",
"source": 7,
"truncated": false
},
{
"destination": 10,
"line": "",
"source": 8,
"truncated": false
},
{
"commentIds": [
7
],
"destination": 11,
"line": "* General",
"source": 9,
"truncated": false
},
{
"destination": 12,
"line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
"source": 10,
"truncated": false
}
],
"truncated": false,
"type": "CONTEXT"
}
],
"sourceLine": 1,
"sourceSpan": 10,
"truncated": false
}
],
"properties": {
"current": true,
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"source": null,
"truncated": false
},
"id": 14,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530143330513,
"id": 8,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "How about this?",
"updatedDate": 1530143330513,
"version": 0
}
],
"createdDate": 1530053193795,
"id": 6,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "It does.",
"updatedDate": 1530053193795,
"version": 0
}
],
"createdDate": 1530053187904,
"id": 5,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "Does this line make sense?",
"updatedDate": 1530053187904,
"version": 0
},
"commentAction": "ADDED",
"commentAnchor": {
"diffType": "EFFECTIVE",
"fileType": "FROM",
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"line": 3,
"lineType": "CONTEXT",
"orphaned": false,
"path": "CHANGELOG.md",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"createdDate": 1530053187904,
"diff": {
"destination": {
"components": [
"CHANGELOG.md"
],
"extension": "md",
"name": "CHANGELOG.md",
"parent": "",
"toString": "CHANGELOG.md"
},
"hunks": [
{
"destinationLine": 1,
"destinationSpan": 12,
"segments": [
{
"lines": [
{
"destination": 1,
"line": "# Edit 1",
"source": 1,
"truncated": false
},
{
"destination": 2,
"line": "",
"source": 1,
"truncated": false
}
],
"truncated": false,
"type": "ADDED"
},
{
"lines": [
{
"destination": 3,
"line": "# ChangeLog",
"source": 1,
"truncated": false
},
{
"destination": 4,
"line": "",
"source": 2,
"truncated": false
},
{
"commentIds": [
5
],
"destination": 5,
"line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
"source": 3,
"truncated": false
},
{
"destination": 6,
"line": "we use is semver, although we will often release new lexers in minor versions, as a",
"source": 4,
"truncated": false
},
{
"destination": 7,
"line": "practical matter.",
"source": 5,
"truncated": false
},
{
"destination": 8,
"line": "",
"source": 6,
"truncated": false
},
{
"destination": 9,
"line": "## version TBD: (unreleased)",
"source": 7,
"truncated": false
},
{
"destination": 10,
"line": "",
"source": 8,
"truncated": false
},
{
"destination": 11,
"line": "* General",
"source": 9,
"truncated": false
},
{
"destination": 12,
"line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
"source": 10,
"truncated": false
}
],
"truncated": false,
"type": "CONTEXT"
}
],
"sourceLine": 1,
"sourceSpan": 10,
"truncated": false
}
],
"properties": {
"current": true,
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"source": null,
"truncated": false
},
"id": 12,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1529813304164,
"id": 4,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "Hello world",
"updatedDate": 1529813304164,
"version": 0
},
"commentAction": "ADDED",
"createdDate": 1529813304164,
"id": 11,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "MERGED",
"commit": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"authorTimestamp": 1529727872000,
"committer": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"committerTimestamp": 1529727872000,
"displayId": "839fa9a2d43",
"id": "839fa9a2d434eb697815b8fcafaecc51accfdbbc",
"message": "Merge pull request #1 in TEST/rouge from root/CHANGELOGmd-1529725646923 to master\n\n* commit '66fbe6a097803f0acb7342b19563f710657ce5a2':\n CHANGELOG.md edited online with Bitbucket",
"parents": [
{
"author": {
"emailAddress": "dblessing@users.noreply.github.com",
"name": "Drew Blessing"
},
"authorTimestamp": 1529604583000,
"committer": {
"emailAddress": "noreply@github.com",
"name": "GitHub"
},
"committerTimestamp": 1529604583000,
"displayId": "c5f4288162e",
"id": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"message": "Merge pull request #949 from jneen/dblessing-patch-1\n\nAdd 'obj-c', 'obj_c' as ObjectiveC aliases",
"parents": [
{
"displayId": "ea7675f741e",
"id": "ea7675f741ee28f3f177ff32a9bde192742ffc59"
},
{
"displayId": "386b95a977b",
"id": "386b95a977b331e267497aa5206861774656f0c5"
}
]
},
{
"author": {
"emailAddress": "test.user@example.com",
"name": "root"
},
"authorTimestamp": 1529725651000,
"committer": {
"emailAddress": "test.user@example.com",
"name": "root"
},
"committerTimestamp": 1529725651000,
"displayId": "66fbe6a0978",
"id": "66fbe6a097803f0acb7342b19563f710657ce5a2",
"message": "CHANGELOG.md edited online with Bitbucket",
"parents": [
{
"displayId": "c5f4288162e",
"id": "c5f4288162e2e6218180779c7f6ac1735bb56eab"
}
]
}
]
},
"createdDate": 1529727872302,
"id": 7,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1529813297478,
"id": 3,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "This is a thread",
"updatedDate": 1529813297478,
"version": 0
}
],
"createdDate": 1529725692591,
"id": 2,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "What about this?",
"updatedDate": 1529725692591,
"version": 0
},
"commentAction": "ADDED",
"createdDate": 1529725692591,
"id": 6,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1529725685910,
"id": 1,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "This is a test.\n\n[analyze.json](attachment:1/1f32f09d97%2Fanalyze.json)\n",
"updatedDate": 1529725685910,
"version": 0
},
"commentAction": "ADDED",
"createdDate": 1529725685910,
"id": 5,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "OPENED",
"createdDate": 1529725657542,
"id": 4,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
}
]
}
{
"author":{
"approved":false,
"role":"AUTHOR",
"status":"UNAPPROVED",
"user":{
"active":true,
"displayName":"root",
"emailAddress":"joe.montana@49ers.com",
"id":1,
"links":{
"self":[
{
"href":"http://localhost:7990/users/root"
}
]
},
"name":"root",
"slug":"root",
"type":"NORMAL"
}
},
"closed":true,
"closedDate":1530600648850,
"createdDate":1530600635690,
"description":"Test",
"fromRef":{
"displayId":"root/CODE_OF_CONDUCTmd-1530600625006",
"id":"refs/heads/root/CODE_OF_CONDUCTmd-1530600625006",
"latestCommit":"074e2b4dddc5b99df1bf9d4a3f66cfc15481fdc8",
"repository":{
"forkable":true,
"id":1,
"links":{
"clone":[
{
"href":"http://root@localhost:7990/scm/test/rouge.git",
"name":"http"
},
{
"href":"ssh://git@localhost:7999/test/rouge.git",
"name":"ssh"
}
],
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
},
"name":"rouge",
"project":{
"description":"Test",
"id":1,
"key":"TEST",
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST"
}
]
},
"name":"test",
"public":false,
"type":"NORMAL"
},
"public":false,
"scmId":"git",
"slug":"rouge",
"state":"AVAILABLE",
"statusMessage":"Available"
}
},
"id":7,
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/pull-requests/7"
}
]
},
"locked":false,
"open":false,
"participants":[
],
"properties":{
"commentCount":1,
"openTaskCount":0,
"resolvedTaskCount":0
},
"reviewers":[
],
"state":"MERGED",
"title":"Added a new line",
"toRef":{
"displayId":"master",
"id":"refs/heads/master",
"latestCommit":"839fa9a2d434eb697815b8fcafaecc51accfdbbc",
"repository":{
"forkable":true,
"id":1,
"links":{
"clone":[
{
"href":"http://root@localhost:7990/scm/test/rouge.git",
"name":"http"
},
{
"href":"ssh://git@localhost:7999/test/rouge.git",
"name":"ssh"
}
],
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
},
"name":"rouge",
"project":{
"description":"Test",
"id":1,
"key":"TEST",
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST"
}
]
},
"name":"test",
"public":false,
"type":"NORMAL"
},
"public":false,
"scmId":"git",
"slug":"rouge",
"state":"AVAILABLE",
"statusMessage":"Available"
}
},
"updatedDate":1530600648850,
"version":2
}
......@@ -31,6 +31,44 @@ describe NamespacesHelper do
expect(options).to include(user.name)
end
it 'avoids duplicate groups when extra_group is used' do
allow(helper).to receive(:current_user).and_return(admin)
options = helper.namespaces_options(user_group.id, display_path: true, extra_group: build(:group, name: admin_group.name))
expect(options.scan("data-name=\"#{admin_group.name}\"").count).to eq(1)
expect(options).to include(admin_group.name)
end
it 'selects existing group' do
allow(helper).to receive(:current_user).and_return(admin)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: user_group)
expect(options).to include("selected=\"selected\" value=\"#{user_group.id}\"")
expect(options).to include(admin_group.name)
end
it 'selects the new group by default' do
allow(helper).to receive(:current_user).and_return(user)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: 'new-group'))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
expect(options).to include("selected=\"selected\" value=\"-1\"")
end
it 'falls back to current user selection' do
allow(helper).to receive(:current_user).and_return(user)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: admin_group.name))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
expect(options).to include("selected=\"selected\" value=\"#{user.namespace.id}\"")
end
it 'returns only groups if groups_only option is true' do
allow(helper).to receive(:current_user).and_return(user)
......
require 'spec_helper'
describe BitbucketServer::Client do
let(:base_uri) { 'https://test:7990/stash/' }
let(:options) { { base_uri: base_uri, user: 'bitbucket', password: 'mypassword' } }
let(:project) { 'SOME-PROJECT' }
let(:repo_slug) { 'my-repo' }
let(:headers) { { "Content-Type" => "application/json" } }
subject { described_class.new(options) }
describe '#pull_requests' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}/pull-requests?state=ALL" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :pull_request)
subject.pull_requests(project, repo_slug)
end
it 'throws an exception when connection fails' do
allow(BitbucketServer::Collection).to receive(:new).and_raise(OpenSSL::SSL::SSLError)
expect { subject.pull_requests(project, repo_slug) }.to raise_error(described_class::ServerError)
end
end
describe '#activities' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}/pull-requests/1/activities" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :activity)
subject.activities(project, repo_slug, 1)
end
end
describe '#repo' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}" }
let(:url) { "#{base_uri}rest/api/1.0/projects/SOME-PROJECT/repos/my-repo" }
it 'requests a specific repository' do
stub_request(:get, url).to_return(status: 200, headers: headers, body: '{}')
subject.repo(project, repo_slug)
expect(WebMock).to have_requested(:get, url)
end
end
describe '#repos' do
let(:path) { "/repos" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :repo)
subject.repos
end
end
describe '#create_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
let(:url) { "#{base_uri}rest/api/1.0/projects/SOME-PROJECT/repos/my-repo/branches" }
it 'requests Bitbucket to create a branch' do
stub_request(:post, url).to_return(status: 204, headers: headers, body: '{}')
subject.create_branch(project, repo_slug, branch, sha)
expect(WebMock).to have_requested(:post, url)
end
end
describe '#delete_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
let(:url) { "#{base_uri}rest/branch-utils/1.0/projects/SOME-PROJECT/repos/my-repo/branches" }
it 'requests Bitbucket to create a branch' do
stub_request(:delete, url).to_return(status: 204, headers: headers, body: '{}')
subject.delete_branch(project, repo_slug, branch, sha)
expect(WebMock).to have_requested(:delete, url)
end
end
end
require 'spec_helper'
describe BitbucketServer::Connection do
let(:options) { { base_uri: 'https://test:7990', user: 'bitbucket', password: 'mypassword' } }
let(:payload) { { 'test' => 1 } }
let(:headers) { { "Content-Type" => "application/json" } }
let(:url) { 'https://test:7990/rest/api/1.0/test?something=1' }
subject { described_class.new(options) }
describe '#get' do
it 'returns JSON body' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.get(url, { something: 1 })).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception if the response is not JSON' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
end
describe '#post' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
it 'returns JSON body' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.post(url, payload)).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
end
end
describe '#delete' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
context 'branch API' do
let(:branch_path) { '/projects/foo/repos/bar/branches' }
let(:branch_url) { 'https://test:7990/rest/branch-utils/1.0/projects/foo/repos/bar/branches' }
let(:path) { }
it 'returns JSON body' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
end
end
end
end
require 'spec_helper'
describe BitbucketServer::Page do
let(:response) { { 'values' => [{ 'description' => 'Test' }], 'isLastPage' => false, 'nextPageStart' => 2 } }
before do
# Autoloading hack
BitbucketServer::Representation::PullRequest.new({})
end
describe '#items' do
it 'returns collection of needed objects' do
page = described_class.new(response, :pull_request)
expect(page.items.first).to be_a(BitbucketServer::Representation::PullRequest)
expect(page.items.count).to eq(1)
end
end
describe '#attrs' do
it 'returns attributes' do
page = described_class.new(response, :pull_request)
expect(page.attrs.keys).to include(:isLastPage, :nextPageStart)
end
end
describe '#next?' do
it 'returns true' do
page = described_class.new(response, :pull_request)
expect(page.next?).to be_truthy
end
it 'returns false' do
response['isLastPage'] = true
response.delete('nextPageStart')
page = described_class.new(response, :pull_request)
expect(page.next?).to be_falsey
end
end
describe '#next' do
it 'returns next attribute' do
page = described_class.new(response, :pull_request)
expect(page.next).to eq(2)
end
end
end
require 'spec_helper'
describe BitbucketServer::Paginator do
let(:last_page) { double(:page, next?: false, items: ['item_2']) }
let(:first_page) { double(:page, next?: true, next: last_page, items: ['item_1']) }
let(:connection) { instance_double(BitbucketServer::Connection) }
describe '#items' do
let(:paginator) { described_class.new(connection, 'http://more-data', :pull_request) }
let(:page_attrs) { { 'isLastPage' => false, 'nextPageStart' => 1 } }
it 'returns items and raises StopIteration in the end' do
allow(paginator).to receive(:fetch_next_page).and_return(first_page)
expect(paginator.items).to match(['item_1'])
allow(paginator).to receive(:fetch_next_page).and_return(last_page)
expect(paginator.items).to match(['item_2'])
allow(paginator).to receive(:fetch_next_page).and_return(nil)
expect { paginator.items }.to raise_error(StopIteration)
end
it 'calls the connection with different offsets' do
expect(connection).to receive(:get).with('http://more-data', start: 0, limit: BitbucketServer::Paginator::PAGE_LENGTH).and_return(page_attrs)
expect(paginator.items).to eq([])
expect(connection).to receive(:get).with('http://more-data', start: 1, limit: BitbucketServer::Paginator::PAGE_LENGTH).and_return({})
expect(paginator.items).to eq([])
expect { paginator.items }.to raise_error(StopIteration)
end
end
end
require 'spec_helper'
describe BitbucketServer::Representation::Activity do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:inline_comment) { activities.first }
let(:comment) { activities[3] }
let(:merge_event) { activities[4] }
describe 'regular comment' do
subject { described_class.new(comment) }
it { expect(subject.comment?).to be_truthy }
it { expect(subject.inline_comment?).to be_falsey }
it { expect(subject.comment).to be_a(BitbucketServer::Representation::Comment) }
it { expect(subject.created_at).to be_a(Time) }
end
describe 'inline comment' do
subject { described_class.new(inline_comment) }
it { expect(subject.comment?).to be_truthy }
it { expect(subject.inline_comment?).to be_truthy }
it { expect(subject.comment).to be_a(BitbucketServer::Representation::PullRequestComment) }
it { expect(subject.created_at).to be_a(Time) }
end
describe 'merge event' do
subject { described_class.new(merge_event) }
it { expect(subject.comment?).to be_falsey }
it { expect(subject.inline_comment?).to be_falsey }
it { expect(subject.committer_user).to eq('root') }
it { expect(subject.committer_email).to eq('test.user@example.com') }
it { expect(subject.merge_timestamp).to be_a(Time) }
it { expect(subject.created_at).to be_a(Time) }
it { expect(subject.merge_commit).to eq('839fa9a2d434eb697815b8fcafaecc51accfdbbc') }
end
end
require 'spec_helper'
describe BitbucketServer::Representation::Comment do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:comment) { activities.first }
subject { described_class.new(comment) }
describe '#id' do
it { expect(subject.id).to eq(9) }
end
describe '#author_username' do
it { expect(subject.author_username).to eq('root' ) }
end
describe '#author_email' do
it { expect(subject.author_email).to eq('test.user@example.com' ) }
end
describe '#note' do
it { expect(subject.note).to eq('is this a new line?') }
end
describe '#created_at' do
it { expect(subject.created_at).to be_a(Time) }
end
describe '#updated_at' do
it { expect(subject.created_at).to be_a(Time) }
end
describe '#comments' do
it { expect(subject.comments.count).to eq(4) }
it { expect(subject.comments).to all( be_a(described_class) ) }
it { expect(subject.comments.map(&:note)).to match_array(["Hello world", "Ok", "hello", "hi"]) }
# The thread should look like:
#
# is this a new line? (subject)
# -> Hello world (first)
# -> Ok (third)
# -> Hi (fourth)
# -> hello (second)
it 'comments have the right parent' do
first, second, third, fourth = subject.comments[0..4]
expect(subject.parent_comment).to be_nil
expect(first.parent_comment).to eq(subject)
expect(second.parent_comment).to eq(subject)
expect(third.parent_comment).to eq(first)
expect(fourth.parent_comment).to eq(first)
end
end
end
require 'spec_helper'
describe BitbucketServer::Representation::PullRequestComment do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:comment) { activities.second }
subject { described_class.new(comment) }
describe '#id' do
it { expect(subject.id).to eq(7) }
end
describe '#from_sha' do
it { expect(subject.from_sha).to eq('c5f4288162e2e6218180779c7f6ac1735bb56eab') }
end
describe '#to_sha' do
it { expect(subject.to_sha).to eq('a4c2164330f2549f67c13f36a93884cf66e976be') }
end
describe '#to?' do
it { expect(subject.to?).to be_falsey }
end
describe '#from?' do
it { expect(subject.from?).to be_truthy }
end
describe '#added?' do
it { expect(subject.added?).to be_falsey }
end
describe '#removed?' do
it { expect(subject.removed?).to be_falsey }
end
describe '#new_pos' do
it { expect(subject.new_pos).to eq(11) }
end
describe '#old_pos' do
it { expect(subject.old_pos).to eq(9) }
end
describe '#file_path' do
it { expect(subject.file_path).to eq('CHANGELOG.md') }
end
end
require 'spec_helper'
describe BitbucketServer::Representation::PullRequest do
let(:sample_data) { JSON.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
subject { described_class.new(sample_data) }
describe '#author' do
it { expect(subject.author).to eq('root') }
end
describe '#author_email' do
it { expect(subject.author_email).to eq('joe.montana@49ers.com') }
end
describe '#description' do
it { expect(subject.description).to eq('Test') }
end
describe '#iid' do
it { expect(subject.iid).to eq(7) }
end
describe '#state' do
it { expect(subject.state).to eq('merged') }
context 'declined pull requests' do
before do
sample_data['state'] = 'DECLINED'
end
it 'returns closed' do
expect(subject.state).to eq('closed')
end
end
context 'open pull requests' do
before do
sample_data['state'] = 'OPEN'
end
it 'returns open' do
expect(subject.state).to eq('opened')
end
end
end
describe '#merged?' do
it { expect(subject.merged?).to be_truthy }
end
describe '#created_at' do
it { expect(subject.created_at.to_i).to eq(sample_data['createdDate'] / 1000) }
end
describe '#updated_at' do
it { expect(subject.updated_at.to_i).to eq(sample_data['updatedDate'] / 1000) }
end
describe '#title' do
it { expect(subject.title).to eq('Added a new line') }
end
describe '#source_branch_name' do
it { expect(subject.source_branch_name).to eq('refs/heads/root/CODE_OF_CONDUCTmd-1530600625006') }
end
describe '#source_branch_sha' do
it { expect(subject.source_branch_sha).to eq('074e2b4dddc5b99df1bf9d4a3f66cfc15481fdc8') }
end
describe '#target_branch_name' do
it { expect(subject.target_branch_name).to eq('refs/heads/master') }
end
describe '#target_branch_sha' do
it { expect(subject.target_branch_sha).to eq('839fa9a2d434eb697815b8fcafaecc51accfdbbc') }
end
end
require 'spec_helper'
describe BitbucketServer::Representation::Repo do
let(:sample_data) do
<<~DATA
{
"slug": "rouge",
"id": 1,
"name": "rouge",
"scmId": "git",
"state": "AVAILABLE",
"statusMessage": "Available",
"forkable": true,
"project": {
"key": "TEST",
"id": 1,
"name": "test",
"description": "Test",
"public": false,
"type": "NORMAL",
"links": {
"self": [
{
"href": "http://localhost:7990/projects/TEST"
}
]
}
},
"public": false,
"links": {
"clone": [
{
"href": "http://root@localhost:7990/scm/test/rouge.git",
"name": "http"
},
{
"href": "ssh://git@localhost:7999/test/rouge.git",
"name": "ssh"
}
],
"self": [
{
"href": "http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
}
}
DATA
end
subject { described_class.new(JSON.parse(sample_data)) }
describe '#project_key' do
it { expect(subject.project_key).to eq('TEST') }
end
describe '#project_name' do
it { expect(subject.project_name).to eq('test') }
end
describe '#slug' do
it { expect(subject.slug).to eq('rouge') }
end
describe '#browse_url' do
it { expect(subject.browse_url).to eq('http://localhost:7990/projects/TEST/repos/rouge/browse') }
end
describe '#clone_url' do
it { expect(subject.clone_url).to eq('http://root@localhost:7990/scm/test/rouge.git') }
end
describe '#description' do
it { expect(subject.description).to eq('Test') }
end
describe '#full_name' do
it { expect(subject.full_name).to eq('test/rouge') }
end
end
require 'spec_helper'
describe Gitlab::BitbucketServerImport::Importer do
include ImportSpecHelper
let(:project) { create(:project, :repository, import_url: 'http://my-bitbucket') }
let(:now) { Time.now.utc.change(usec: 0) }
let(:project_key) { 'TEST' }
let(:repo_slug) { 'rouge' }
let(:sample) { RepoHelpers.sample_compare }
subject { described_class.new(project, recover_missing_commits: true) }
before do
data = project.create_or_update_import_data(
data: { project_key: project_key, repo_slug: repo_slug },
credentials: { base_uri: 'http://my-bitbucket', user: 'bitbucket', password: 'test' }
)
data.save
project.save
end
describe '#import_repository' do
before do
expect(subject).to receive(:import_pull_requests)
expect(subject).to receive(:delete_temp_branches)
end
it 'adds a remote' do
expect(project.repository).to receive(:fetch_as_mirror)
.with('http://bitbucket:test@my-bitbucket',
refmap: [:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head'],
remote_name: 'bitbucket_server')
subject.execute
end
end
describe '#import_pull_requests' do
before do
allow(subject).to receive(:import_repository)
allow(subject).to receive(:delete_temp_branches)
allow(subject).to receive(:restore_branches)
pull_request = instance_double(
BitbucketServer::Representation::PullRequest,
iid: 10,
source_branch_sha: sample.commits.last,
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
target_branch_sha: sample.commits.first,
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
author: 'Test Author',
author_email: project.owner.email,
created_at: Time.now,
updated_at: Time.now,
merged?: true)
allow(subject.client).to receive(:pull_requests).and_return([pull_request])
@merge_event = instance_double(
BitbucketServer::Representation::Activity,
comment?: false,
merge_event?: true,
committer_email: project.owner.email,
merge_timestamp: now,
merge_commit: '12345678'
)
@pr_note = instance_double(
BitbucketServer::Representation::Comment,
note: 'Hello world',
author_email: 'unknown@gmail.com',
author_username: 'The Flash',
comments: [],
created_at: now,
updated_at: now,
parent_comment: nil)
@pr_comment = instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: false,
merge_event?: false,
comment: @pr_note)
end
it 'imports merge event' do
expect(subject.client).to receive(:activities).and_return([@merge_event])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.metrics.merged_by).to eq(project.owner)
expect(merge_request.metrics.merged_at).to eq(@merge_event.merge_timestamp)
expect(merge_request.merge_commit_sha).to eq('12345678')
end
it 'imports comments' do
expect(subject.client).to receive(:activities).and_return([@pr_comment])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.notes.count).to eq(1)
note = merge_request.notes.first
expect(note.note).to end_with(@pr_note.note)
expect(note.author).to eq(project.owner)
expect(note.created_at).to eq(@pr_note.created_at)
expect(note.updated_at).to eq(@pr_note.created_at)
end
it 'imports threaded discussions' do
reply = instance_double(
BitbucketServer::Representation::PullRequestComment,
author_email: 'someuser@gitlab.com',
author_username: 'Batman',
note: 'I agree',
created_at: now,
updated_at: now)
# https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
inline_note = instance_double(
BitbucketServer::Representation::PullRequestComment,
file_type: 'ADDED',
from_sha: sample.commits.first,
to_sha: sample.commits.last,
file_path: '.gitmodules',
old_pos: nil,
new_pos: 4,
note: 'Hello world',
author_email: 'unknown@gmail.com',
author_username: 'Superman',
comments: [reply],
created_at: now,
updated_at: now,
parent_comment: nil)
allow(reply).to receive(:parent_comment).and_return(inline_note)
inline_comment = instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: true,
merge_event?: false,
comment: inline_note)
expect(subject.client).to receive(:activities).and_return([inline_comment])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.notes.count).to eq(2)
expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
notes = merge_request.notes.order(:id).to_a
start_note = notes.first
expect(start_note.type).to eq('DiffNote')
expect(start_note.note).to end_with(inline_note.note)
expect(start_note.created_at).to eq(inline_note.created_at)
expect(start_note.updated_at).to eq(inline_note.updated_at)
expect(start_note.position.base_sha).to eq(inline_note.from_sha)
expect(start_note.position.start_sha).to eq(inline_note.from_sha)
expect(start_note.position.head_sha).to eq(inline_note.to_sha)
expect(start_note.position.old_line).to be_nil
expect(start_note.position.new_line).to eq(inline_note.new_pos)
reply_note = notes.last
# Make sure author and reply context is included
expect(reply_note.note).to start_with("*By #{reply.author_username} (#{reply.author_email})*\n\n")
expect(reply_note.note).to end_with("> #{inline_note.note}\n\n#{reply.note}")
expect(reply_note.author).to eq(project.owner)
expect(reply_note.created_at).to eq(reply.created_at)
expect(reply_note.updated_at).to eq(reply.created_at)
expect(reply_note.position.base_sha).to eq(inline_note.from_sha)
expect(reply_note.position.start_sha).to eq(inline_note.from_sha)
expect(reply_note.position.head_sha).to eq(inline_note.to_sha)
expect(reply_note.position.old_line).to be_nil
expect(reply_note.position.new_line).to eq(inline_note.new_pos)
end
it 'falls back to comments if diff comments fail to validate' do
reply = instance_double(
BitbucketServer::Representation::Comment,
author_email: 'someuser@gitlab.com',
author_username: 'Aquaman',
note: 'I agree',
created_at: now,
updated_at: now)
# https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
inline_note = instance_double(
BitbucketServer::Representation::PullRequestComment,
file_type: 'REMOVED',
from_sha: sample.commits.first,
to_sha: sample.commits.last,
file_path: '.gitmodules',
old_pos: 8,
new_pos: 9,
note: 'This is a note with an invalid line position.',
author_email: project.owner.email,
author_username: 'Owner',
comments: [reply],
created_at: now,
updated_at: now,
parent_comment: nil)
inline_comment = instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: true,
merge_event?: false,
comment: inline_note)
allow(reply).to receive(:parent_comment).and_return(inline_note)
expect(subject.client).to receive(:activities).and_return([inline_comment])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.notes.count).to eq(2)
notes = merge_request.notes
expect(notes.first.note).to start_with('*Comment on .gitmodules')
expect(notes.second.note).to start_with('*Comment on .gitmodules')
end
end
describe 'inaccessible branches' do
let(:id) { 10 }
let(:temp_branch_from) { "gitlab/import/pull-request/#{id}/from" }
let(:temp_branch_to) { "gitlab/import/pull-request/#{id}/to" }
before do
pull_request = instance_double(
BitbucketServer::Representation::PullRequest,
iid: id,
source_branch_sha: '12345678',
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
target_branch_sha: '98765432',
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
author: 'Test Author',
author_email: project.owner.email,
created_at: Time.now,
updated_at: Time.now,
merged?: true)
expect(subject.client).to receive(:pull_requests).and_return([pull_request])
expect(subject.client).to receive(:activities).and_return([])
expect(subject).to receive(:import_repository).twice
end
it '#restore_branches' do
expect(subject).to receive(:restore_branches).and_call_original
expect(subject).to receive(:delete_temp_branches)
expect(subject.client).to receive(:create_branch)
.with(project_key, repo_slug,
temp_branch_from,
'12345678')
expect(subject.client).to receive(:create_branch)
.with(project_key, repo_slug,
temp_branch_to,
'98765432')
expect { subject.execute }.to change { MergeRequest.count }.by(1)
end
it '#delete_temp_branches' do
expect(subject.client).to receive(:create_branch).twice
expect(subject).to receive(:delete_temp_branches).and_call_original
expect(subject.client).to receive(:delete_branch)
.with(project_key, repo_slug,
temp_branch_from,
'12345678')
expect(subject.client).to receive(:delete_branch)
.with(project_key, repo_slug,
temp_branch_to,
'98765432')
expect(project.repository).to receive(:delete_branch).with(temp_branch_from)
expect(project.repository).to receive(:delete_branch).with(temp_branch_to)
expect { subject.execute }.to change { MergeRequest.count }.by(1)
end
end
end
......@@ -92,6 +92,7 @@ merge_requests:
- merge_request_diff
- events
- merge_requests_closing_issues
- cached_closes_issues
- metrics
- approvals
- approvers
......
......@@ -6,7 +6,8 @@ describe Gitlab::ImportSources do
expected =
{
'GitHub' => 'github',
'Bitbucket' => 'bitbucket',
'Bitbucket Cloud' => 'bitbucket',
'Bitbucket Server' => 'bitbucket_server',
'GitLab.com' => 'gitlab',
'Google Code' => 'google_code',
'FogBugz' => 'fogbugz',
......@@ -26,6 +27,7 @@ describe Gitlab::ImportSources do
%w(
github
bitbucket
bitbucket_server
gitlab
google_code
fogbugz
......@@ -45,6 +47,7 @@ describe Gitlab::ImportSources do
%w(
github
bitbucket
bitbucket_server
gitlab
google_code
fogbugz
......@@ -60,6 +63,7 @@ describe Gitlab::ImportSources do
import_sources = {
'github' => Gitlab::GithubImport::ParallelImporter,
'bitbucket' => Gitlab::BitbucketImport::Importer,
'bitbucket_server' => Gitlab::BitbucketServerImport::Importer,
'gitlab' => Gitlab::GitlabImport::Importer,
'google_code' => Gitlab::GoogleCodeImport::Importer,
'fogbugz' => Gitlab::FogbugzImport::Importer,
......@@ -79,7 +83,8 @@ describe Gitlab::ImportSources do
describe '.title' do
import_sources = {
'github' => 'GitHub',
'bitbucket' => 'Bitbucket',
'bitbucket' => 'Bitbucket Cloud',
'bitbucket_server' => 'Bitbucket Server',
'gitlab' => 'GitLab.com',
'google_code' => 'Google Code',
'fogbugz' => 'FogBugz',
......@@ -97,7 +102,7 @@ describe Gitlab::ImportSources do
end
describe 'imports_repository? checker' do
let(:allowed_importers) { %w[github gitlab_project] }
let(:allowed_importers) { %w[github gitlab_project bitbucket_server] }
it 'fails if any importer other than the allowed ones implements this method' do
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }
......
......@@ -27,11 +27,11 @@ describe NormalizeLdapExternUids, :migration, :sidekiq do
migrate!
expect(BackgroundMigrationWorker.jobs[0]['args']).to eq([described_class::MIGRATION, [1, 2]])
expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(5.minutes.from_now.to_f)
expect(BackgroundMigrationWorker.jobs[0]['at']).to eq(2.minutes.from_now.to_f)
expect(BackgroundMigrationWorker.jobs[1]['args']).to eq([described_class::MIGRATION, [3, 4]])
expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(10.minutes.from_now.to_f)
expect(BackgroundMigrationWorker.jobs[1]['at']).to eq(4.minutes.from_now.to_f)
expect(BackgroundMigrationWorker.jobs[2]['args']).to eq([described_class::MIGRATION, [5, 5]])
expect(BackgroundMigrationWorker.jobs[2]['at']).to eq(15.minutes.from_now.to_f)
expect(BackgroundMigrationWorker.jobs[2]['at']).to eq(6.minutes.from_now.to_f)
expect(BackgroundMigrationWorker.jobs.size).to eq 3
end
end
......
......@@ -311,6 +311,51 @@ describe MergeRequest do
end
end
describe '#visible_closing_issues_for' do
let(:guest) { create(:user) }
let(:developer) { create(:user) }
let(:issue_1) { create(:issue, project: subject.source_project) }
let(:issue_2) { create(:issue, project: subject.source_project) }
let(:confidential_issue) { create(:issue, :confidential, project: subject.source_project) }
before do
subject.project.add_developer(subject.author)
subject.target_branch = subject.project.default_branch
commit = double('commit1', safe_message: "Fixes #{issue_1.to_reference} #{issue_2.to_reference} #{confidential_issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
end
it 'shows only allowed issues to guest' do
subject.project.add_guest(guest)
subject.cache_merge_request_closes_issues!
expect(subject.visible_closing_issues_for(guest)).to match_array([issue_1, issue_2])
end
it 'shows only allowed issues to developer' do
subject.project.add_developer(developer)
subject.cache_merge_request_closes_issues!
expect(subject.visible_closing_issues_for(developer)).to match_array([issue_1, confidential_issue, issue_2])
end
context 'when external issue tracker is enabled' do
before do
subject.project.has_external_issue_tracker = true
subject.project.save!
end
it 'calls non #closes_issues to retrieve data' do
expect(subject).to receive(:closes_issues)
expect(subject).not_to receive(:cached_closes_issues)
subject.visible_closing_issues_for
end
end
end
describe '#cache_merge_request_closes_issues!' do
before do
subject.project.add_developer(subject.author)
......@@ -325,6 +370,25 @@ describe MergeRequest do
expect { subject.cache_merge_request_closes_issues!(subject.author) }.to change(subject.merge_requests_closing_issues, :count).by(1)
end
it 'does not cache closed issues when merge request is closed' do
issue = create :issue, project: subject.project
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
allow(subject).to receive(:state).and_return("closed")
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
end
it 'does not cache closed issues when merge request is merged' do
issue = create :issue, project: subject.project
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
allow(subject).to receive(:state).and_return("merged")
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
end
context 'when both internal and external issue trackers are enabled' do
before do
subject.project.has_external_issue_tracker = true
......@@ -634,6 +698,7 @@ describe MergeRequest do
allow(subject).to receive(:commits).and_return([commit])
allow(subject.project).to receive(:default_branch)
.and_return(subject.target_branch)
subject.cache_merge_request_closes_issues!
expect(subject.issues_mentioned_but_not_closing(subject.author)).to match_array([mentioned_issue])
end
......@@ -651,6 +716,8 @@ describe MergeRequest do
end
it 'detects issues mentioned in description but not closed' do
subject.cache_merge_request_closes_issues!
expect(subject.issues_mentioned_but_not_closing(subject.author).map(&:to_s)).to match_array(['TEST-2'])
end
end
......@@ -996,9 +1063,8 @@ describe MergeRequest do
subject.project.add_developer(subject.author)
subject.description = "This issue Closes #{issue.to_reference}"
allow(subject.project).to receive(:default_branch)
.and_return(subject.target_branch)
allow(subject.project).to receive(:default_branch).and_return(subject.target_branch)
subject.cache_merge_request_closes_issues!
expect(subject.merge_commit_message)
.to match("Closes #{issue.to_reference}")
......
# frozen_string_literal: true
require 'spec_helper'
describe Postgresql::ReplicationSlot, :postgresql do
describe '.lag_too_great?' do
it 'returns true when replication lag is too great' do
expect(described_class)
.to receive(:pluck)
.and_return([125.megabytes])
expect(described_class.lag_too_great?).to eq(true)
end
it 'returns false when more than one replicas is up to date enough' do
expect(described_class)
.to receive(:pluck)
.and_return([125.megabytes, 0.megabytes, 0.megabytes])
expect(described_class.lag_too_great?).to eq(false)
end
it 'returns false when replication lag is not too great' do
expect(described_class)
.to receive(:pluck)
.and_return([0.megabytes])
expect(described_class.lag_too_great?).to eq(false)
end
end
end
......@@ -30,6 +30,10 @@ describe JiraService do
describe "Associations" do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
it { is_expected.to allow_value(nil).for(:jira_issue_transition_id) }
it { is_expected.to allow_value("1,2,3").for(:jira_issue_transition_id) }
it { is_expected.to allow_value("1;2;3").for(:jira_issue_transition_id) }
it { is_expected.not_to allow_value("a,b,cd").for(:jira_issue_transition_id) }
end
describe 'Validations' do
......@@ -124,7 +128,7 @@ describe JiraService do
url: 'http://jira.example.com',
username: 'gitlab_jira_username',
password: 'gitlab_jira_password',
jira_issue_transition_id: "custom-id"
jira_issue_transition_id: "999"
)
# These stubs are needed to test JiraService#close_issue.
......@@ -226,15 +230,52 @@ describe JiraService do
).once
end
context '#close_issue' do
it "logs exception when transition id is not valid" do
allow(Rails.logger).to receive(:info)
WebMock.stub_request(:post, @transitions_url).with(basic_auth: %w(gitlab_jira_username gitlab_jira_password)).and_raise("Bad Request")
@jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project))
expect(Rails.logger).to have_received(:info).with("JiraService Issue Transition failed message ERROR: http://jira.example.com - Bad Request")
end
it "calls the api with jira_issue_transition_id" do
@jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project))
expect(WebMock).to have_requested(:post, @transitions_url).with(
body: /custom-id/
body: /999/
).once
end
context "when have multiple transition ids" do
it "calls the api with transition ids separated by comma" do
allow(@jira_service).to receive_messages(jira_issue_transition_id: "1,2,3")
@jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project))
1.upto(3) do |transition_id|
expect(WebMock).to have_requested(:post, @transitions_url).with(
body: /#{transition_id}/
).once
end
end
it "calls the api with transition ids separated by semicolon" do
allow(@jira_service).to receive_messages(jira_issue_transition_id: "1;2;3")
@jira_service.close_issue(merge_request, ExternalIssue.new("JIRA-123", project))
1.upto(3) do |transition_id|
expect(WebMock).to have_requested(:post, @transitions_url).with(
body: /#{transition_id}/
).once
end
end
end
end
end
describe '#test_settings' do
let(:jira_service) do
described_class.new(
......
......@@ -117,9 +117,9 @@ describe MergeRequestPresenter do
before do
project.add_developer(user)
allow(resource.project).to receive(:default_branch)
.and_return(resource.target_branch)
resource.cache_merge_request_closes_issues!
end
describe '#closing_issues_links' do
......
......@@ -429,7 +429,7 @@ describe API::Internal do
context "archived project" do
before do
project.add_developer(user)
project.archive!
::Projects::UpdateService.new(project, user, archived: true).execute
end
context "git pull" do
......
......@@ -975,6 +975,7 @@ describe API::MergeRequests do
issue = create(:issue, project: project)
mr = merge_request.tap do |mr|
mr.update_attribute(:description, "Closes #{issue.to_reference(mr.project)}")
mr.cache_merge_request_closes_issues!
end
get api("/projects/#{project.id}/merge_requests/#{mr.iid}/closes_issues", user)
......
......@@ -1743,7 +1743,7 @@ describe API::Projects do
context 'on an archived project' do
before do
project.archive!
::Projects::UpdateService.new(project, user, archived: true).execute
end
it 'remains archived' do
......@@ -1779,7 +1779,7 @@ describe API::Projects do
context 'on an archived project' do
before do
project.archive!
::Projects::UpdateService.new(project, user, archived: true).execute
end
it 'unarchives the project' do
......
......@@ -20,7 +20,7 @@ describe Issues::ReopenService do
end
end
context 'when user is authrized to reopen issue' do
context 'when user is authorized to reopen issue' do
let(:user) { create(:user) }
before do
......
......@@ -49,6 +49,7 @@ describe MergeRequests::MergeService do
issue = create :issue, project: project
commit = double('commit', safe_message: "Fixes #{issue.to_reference}")
allow(merge_request).to receive(:commits).and_return([commit])
merge_request.cache_merge_request_closes_issues!
service.execute(merge_request)
......
......@@ -53,7 +53,7 @@ describe MergeRequests::PostMergeService do
allow(project).to receive(:default_branch).and_return('foo')
issue = create(:issue, project: project)
allow(merge_request).to receive(:closes_issues).and_return([issue])
allow(merge_request).to receive(:visible_closing_issues_for).and_return([issue])
allow_any_instance_of(Issues::CloseService).to receive(:execute).with(issue, commit: merge_request).and_raise
expect { described_class.new(project, user, {}).execute(merge_request) }.to raise_error
......
......@@ -47,6 +47,12 @@ describe MergeRequests::ReopenService do
end
end
it 'caches merge request closing issues' do
expect(merge_request).to receive(:cache_merge_request_closes_issues!)
described_class.new(project, user, {}).execute(merge_request)
end
it 'updates metrics' do
metrics = merge_request.metrics
service = double(MergeRequestMetricsService)
......
......@@ -114,6 +114,17 @@ describe Projects::CreateService, '#execute' do
end
end
context 'import data' do
it 'stores import data and URL' do
import_data = { data: { 'test' => 'some data' } }
project = create_project(user, { name: 'test', import_url: 'http://import-url', import_data: import_data })
expect(project.import_data).to be_persisted
expect(project.import_data.data).to eq(import_data[:data])
expect(project.import_url).to eq('http://import-url')
end
end
context 'builds_enabled global setting' do
let(:project) { create_project(user, opts) }
......
......@@ -3,6 +3,12 @@ require 'spec_helper'
describe BackgroundMigrationWorker, :sidekiq, :clean_gitlab_redis_shared_state do
let(:worker) { described_class.new }
describe '.minimum_interval' do
it 'returns 2 minutes' do
expect(described_class.minimum_interval).to eq(2.minutes.to_i)
end
end
describe '.perform' do
it 'performs a background migration' do
expect(Gitlab::BackgroundMigration)
......@@ -28,5 +34,51 @@ describe BackgroundMigrationWorker, :sidekiq, :clean_gitlab_redis_shared_state d
worker.perform('Foo', [10, 20])
end
it 'reschedules a migration if the database is not healthy' do
allow(worker)
.to receive(:always_perform?)
.and_return(false)
allow(worker)
.to receive(:healthy_database?)
.and_return(false)
expect(described_class)
.to receive(:perform_in)
.with(a_kind_of(Numeric), 'Foo', [10, 20])
worker.perform('Foo', [10, 20])
end
end
describe '#healthy_database?' do
context 'using MySQL', :mysql do
it 'returns true' do
expect(worker.healthy_database?).to eq(true)
end
end
context 'using PostgreSQL', :postgresql do
context 'when replication lag is too great' do
it 'returns false' do
allow(Postgresql::ReplicationSlot)
.to receive(:lag_too_great?)
.and_return(true)
expect(worker.healthy_database?).to eq(false)
end
end
context 'when replication lag is small enough' do
it 'returns true' do
allow(Postgresql::ReplicationSlot)
.to receive(:lag_too_great?)
.and_return(false)
expect(worker.healthy_database?).to eq(true)
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment