Commit d737abc5 authored by Tim Zallmann's avatar Tim Zallmann

Merge branch 'sh-support-bitbucket-server-import' into 'master'

Add support for Bitbucket Server imports

Closes #25393

See merge request gitlab-org/gitlab-ce!20164
parents 0ffd7931 ee851e58
...@@ -36,6 +36,8 @@ class ImporterStatus { ...@@ -36,6 +36,8 @@ class ImporterStatus {
const $targetField = $tr.find('.import-target'); const $targetField = $tr.find('.import-target');
const $namespaceInput = $targetField.find('.js-select-namespace option:selected'); const $namespaceInput = $targetField.find('.js-select-namespace option:selected');
const id = $tr.attr('id').replace('repo_', ''); const id = $tr.attr('id').replace('repo_', '');
const repoData = $tr.data();
let targetNamespace; let targetNamespace;
let newName; let newName;
if ($namespaceInput.length > 0) { if ($namespaceInput.length > 0) {
...@@ -45,12 +47,20 @@ class ImporterStatus { ...@@ -45,12 +47,20 @@ class ImporterStatus {
} }
$btn.disable().addClass('is-loading'); $btn.disable().addClass('is-loading');
return axios.post(this.importUrl, { this.id = id;
let attributes = {
repo_id: id, repo_id: id,
target_namespace: targetNamespace, target_namespace: targetNamespace,
new_name: newName, new_name: newName,
ci_cd_only: this.ciCdOnly, ci_cd_only: this.ciCdOnly,
}) };
if (repoData) {
attributes = Object.assign(repoData, attributes);
}
return axios.post(this.importUrl, attributes)
.then(({ data }) => { .then(({ data }) => {
const job = $(`tr#repo_${id}`); const job = $(`tr#repo_${id}`);
job.attr('id', `project_${data.id}`); job.attr('id', `project_${data.id}`);
...@@ -70,6 +80,9 @@ class ImporterStatus { ...@@ -70,6 +80,9 @@ class ImporterStatus {
.catch((error) => { .catch((error) => {
let details = error; let details = error;
const $statusField = $(`#repo_${this.id} .job-status`);
$statusField.text(__('Failed'));
if (error.response && error.response.data && error.response.data.errors) { if (error.response && error.response.data && error.response.data.errors) {
details = error.response.data.errors; details = error.response.data.errors;
} }
......
...@@ -35,6 +35,7 @@ class ApplicationController < ActionController::Base ...@@ -35,6 +35,7 @@ class ApplicationController < ActionController::Base
:gitea_import_enabled?, :github_import_configured?, :gitea_import_enabled?, :github_import_configured?,
:gitlab_import_enabled?, :gitlab_import_configured?, :gitlab_import_enabled?, :gitlab_import_configured?,
:bitbucket_import_enabled?, :bitbucket_import_configured?, :bitbucket_import_enabled?, :bitbucket_import_configured?,
:bitbucket_server_import_enabled?,
:google_code_import_enabled?, :fogbugz_import_enabled?, :google_code_import_enabled?, :fogbugz_import_enabled?,
:git_import_enabled?, :gitlab_project_import_enabled?, :git_import_enabled?, :gitlab_project_import_enabled?,
:manifest_import_enabled? :manifest_import_enabled?
...@@ -337,6 +338,10 @@ class ApplicationController < ActionController::Base ...@@ -337,6 +338,10 @@ class ApplicationController < ActionController::Base
!Gitlab::CurrentSettings.import_sources.empty? !Gitlab::CurrentSettings.import_sources.empty?
end end
def bitbucket_server_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('bitbucket_server')
end
def github_import_enabled? def github_import_enabled?
Gitlab::CurrentSettings.import_sources.include?('github') Gitlab::CurrentSettings.import_sources.include?('github')
end end
......
# frozen_string_literal: true
class Import::BitbucketServerController < Import::BaseController
before_action :verify_bitbucket_server_import_enabled
before_action :bitbucket_auth, except: [:new, :configure]
before_action :validate_import_params, only: [:create]
# As a basic sanity check to prevent URL injection, restrict project
# repository input and repository slugs to allowed characters. For Bitbucket:
#
# Project keys must start with a letter and may only consist of ASCII letters, numbers and underscores (A-Z, a-z, 0-9, _).
#
# Repository names are limited to 128 characters. They must start with a
# letter or number and may contain spaces, hyphens, underscores, and periods.
# (https://community.atlassian.com/t5/Answers-Developer-Questions/stash-repository-names/qaq-p/499054)
VALID_BITBUCKET_CHARS = /\A[\w\-_\.\s]+\z/
def new
end
def create
repo = bitbucket_client.repo(@project_key, @repo_slug)
unless repo
return render json: { errors: "Project #{@project_key}/#{@repo_slug} could not be found" }, status: :unprocessable_entity
end
project_name = params[:new_name].presence || repo.name
namespace_path = params[:new_namespace].presence || current_user.username
target_namespace = find_or_create_namespace(namespace_path, current_user)
if current_user.can?(:create_projects, target_namespace)
project = Gitlab::BitbucketServerImport::ProjectCreator.new(@project_key, @repo_slug, repo, project_name, target_namespace, current_user, credentials).execute
if project.persisted?
render json: ProjectSerializer.new.represent(project)
else
render json: { errors: project_save_error(project) }, status: :unprocessable_entity
end
else
render json: { errors: 'This namespace has already been taken! Please choose another one.' }, status: :unprocessable_entity
end
rescue BitbucketServer::Client::ServerError => e
render json: { errors: "Unable to connect to server: #{e}" }, status: :unprocessable_entity
end
def configure
session[personal_access_token_key] = params[:personal_access_token]
session[bitbucket_server_username_key] = params[:bitbucket_username]
session[bitbucket_server_url_key] = params[:bitbucket_server_url]
redirect_to status_import_bitbucket_server_path
end
def status
repos = bitbucket_client.repos
@repos, @incompatible_repos = repos.partition { |repo| repo.valid? }
@already_added_projects = find_already_added_projects('bitbucket_server')
already_added_projects_names = @already_added_projects.pluck(:import_source)
@repos.to_a.reject! { |repo| already_added_projects_names.include?(repo.browse_url) }
rescue BitbucketServer::Connection::ConnectionError, BitbucketServer::Client::ServerError => e
flash[:alert] = "Unable to connect to server: #{e}"
clear_session_data
redirect_to new_import_bitbucket_server_path
end
def jobs
render json: find_jobs('bitbucket_server')
end
private
def bitbucket_client
@bitbucket_client ||= BitbucketServer::Client.new(credentials)
end
def validate_import_params
@project_key = params[:project]
@repo_slug = params[:repository]
return render_validation_error('Missing project key') unless @project_key.present? && @repo_slug.present?
return render_validation_error('Missing repository slug') unless @repo_slug.present?
return render_validation_error('Invalid project key') unless @project_key =~ VALID_BITBUCKET_CHARS
return render_validation_error('Invalid repository slug') unless @repo_slug =~ VALID_BITBUCKET_CHARS
end
def render_validation_error(message)
render json: { errors: message }, status: :unprocessable_entity
end
def bitbucket_auth
unless session[bitbucket_server_url_key].present? &&
session[bitbucket_server_username_key].present? &&
session[personal_access_token_key].present?
redirect_to new_import_bitbucket_server_path
end
end
def verify_bitbucket_server_import_enabled
render_404 unless bitbucket_server_import_enabled?
end
def bitbucket_server_url_key
:bitbucket_server_url
end
def bitbucket_server_username_key
:bitbucket_server_username
end
def personal_access_token_key
:bitbucket_server_personal_access_token
end
def clear_session_data
session[bitbucket_server_url_key] = nil
session[bitbucket_server_username_key] = nil
session[personal_access_token_key] = nil
end
def credentials
{
base_uri: session[bitbucket_server_url_key],
user: session[bitbucket_server_username_key],
password: session[personal_access_token_key]
}
end
end
...@@ -9,13 +9,23 @@ module NamespacesHelper ...@@ -9,13 +9,23 @@ module NamespacesHelper
.includes(:route) .includes(:route)
.order('routes.path') .order('routes.path')
users = [current_user.namespace] users = [current_user.namespace]
selected_id = selected
unless extra_group.nil? || extra_group.is_a?(Group) unless extra_group.nil? || extra_group.is_a?(Group)
extra_group = Group.find(extra_group) if Namespace.find(extra_group).kind == 'group' extra_group = Group.find(extra_group) if Namespace.find(extra_group).kind == 'group'
end end
if extra_group && extra_group.is_a?(Group) && (!Group.exists?(name: extra_group.name) || Ability.allowed?(current_user, :read_group, extra_group)) if extra_group && extra_group.is_a?(Group)
extra_group = dedup_extra_group(extra_group)
if Ability.allowed?(current_user, :read_group, extra_group)
# Assign the value to an invalid primary ID so that the select box works
extra_group.id = -1 unless extra_group.persisted?
selected_id = extra_group.id if selected == :extra_group
groups |= [extra_group] groups |= [extra_group]
else
selected_id = current_user.namespace.id
end
end end
options = [] options = []
...@@ -25,11 +35,11 @@ module NamespacesHelper ...@@ -25,11 +35,11 @@ module NamespacesHelper
options << options_for_group(users, display_path: display_path, type: 'user') options << options_for_group(users, display_path: display_path, type: 'user')
if selected == :current_user && current_user.namespace if selected == :current_user && current_user.namespace
selected = current_user.namespace.id selected_id = current_user.namespace.id
end end
end end
grouped_options_for_select(options, selected) grouped_options_for_select(options, selected_id)
end end
def namespace_icon(namespace, size = 40) def namespace_icon(namespace, size = 40)
...@@ -42,6 +52,17 @@ module NamespacesHelper ...@@ -42,6 +52,17 @@ module NamespacesHelper
private private
# Many importers create a temporary Group, so use the real
# group if one exists by that name to prevent duplicates.
def dedup_extra_group(extra_group)
unless extra_group.persisted?
existing_group = Group.find_by(name: extra_group.name)
extra_group = existing_group if existing_group&.persisted?
end
extra_group
end
def options_for_group(namespaces, display_path:, type:) def options_for_group(namespaces, display_path:, type:)
group_label = type.pluralize group_label = type.pluralize
elements = namespaces.sort_by(&:human_name).map! do |n| elements = namespaces.sort_by(&:human_name).map! do |n|
......
...@@ -654,6 +654,8 @@ class Project < ActiveRecord::Base ...@@ -654,6 +654,8 @@ class Project < ActiveRecord::Base
project_import_data.credentials ||= {} project_import_data.credentials ||= {}
project_import_data.credentials = project_import_data.credentials.merge(credentials) project_import_data.credentials = project_import_data.credentials.merge(credentials)
end end
project_import_data
end end
def import? def import?
......
- title = _('Bitbucket Server Import')
- page_title title
- breadcrumb_title title
- header_title "Projects", root_path
%h3.page-title
= icon 'bitbucket-square', text: _('Import repositories from Bitbucket Server')
%p
= _('Enter in your Bitbucket Server URL and personal access token below')
= form_tag configure_import_bitbucket_server_path, method: :post do
.form-group.row
= label_tag :bitbucket_server_url, 'Bitbucket Server URL', class: 'col-form-label col-md-2'
.col-md-4
= text_field_tag :bitbucket_server_url, '', class: 'form-control append-right-8', placeholder: _('https://your-bitbucket-server'), size: 40
.form-group.row
= label_tag :bitbucket_server_url, 'Username', class: 'col-form-label col-md-2'
.col-md-4
= text_field_tag :bitbucket_username, '', class: 'form-control append-right-8', placeholder: _('username'), size: 40
.form-group.row
= label_tag :personal_access_token, 'Password/Personal Access Token', class: 'col-form-label col-md-2'
.col-md-4
= password_field_tag :personal_access_token, '', class: 'form-control append-right-8', placeholder: _('Personal Access Token'), size: 40
.form-actions
= submit_tag _('List your Bitbucket Server repositories'), class: 'btn btn-success'
- page_title 'Bitbucket Server import'
- header_title 'Projects', root_path
%h3.page-title
%i.fa.fa-bitbucket-square
= _('Import projects from Bitbucket Server')
- if @repos.any?
%p.light
= _('Select projects you want to import.')
.btn-group
- if @incompatible_repos.any?
= button_tag class: 'btn btn-import btn-success js-import-all' do
= _('Import all compatible projects')
= icon('spinner spin', class: 'loading-icon')
- else
= button_tag class: 'btn btn-import btn-success js-import-all' do
= _('Import all projects')
= icon('spinner spin', class: 'loading-icon')
.btn-group
= link_to('Reconfigure', configure_import_bitbucket_server_path, class: 'btn btn-primary', method: :post)
.table-responsive.prepend-top-10
%table.table.import-jobs
%colgroup.import-jobs-from-col
%colgroup.import-jobs-to-col
%colgroup.import-jobs-status-col
%thead
%tr
%th= _('From Bitbucket Server')
%th= _('To GitLab')
%th= _(' Status')
%tbody
- @already_added_projects.each do |project|
%tr{ id: "project_#{project.id}", class: "#{project_status_css_class(project.import_status)}" }
%td
= link_to project.import_source, project.import_source, target: '_blank', rel: 'noopener noreferrer'
%td
= link_to project.full_path, [project.namespace.becomes(Namespace), project]
%td.job-status
- if project.import_status == 'finished'
= icon('check', text: 'Done')
- elsif project.import_status == 'started'
= icon('spin', text: 'started')
- else
= project.human_import_status_name
- @repos.each do |repo|
%tr{ id: "repo_#{repo.project_key}___#{repo.slug}", data: { project: repo.project_key, repository: repo.slug } }
%td
= link_to repo.browse_url, repo.browse_url, target: '_blank', rel: 'noopener noreferrer'
%td.import-target
%fieldset.row
.input-group
.project-path.input-group-prepend
- if current_user.can_select_namespace?
- selected = params[:namespace_id] || :extra_group
- opts = current_user.can_create_group? ? { extra_group: Group.new(name: repo.project_key, path: repo.project_key) } : {}
= select_tag :namespace_id, namespaces_options(selected, opts.merge({ display_path: true })), { class: 'input-group-text select2 js-select-namespace', tabindex: 1 }
- else
= text_field_tag :path, current_user.namespace_path, class: "input-group-text input-large form-control", tabindex: 1, disabled: true
%span.input-group-prepend
.input-group-text /
= text_field_tag :path, repo.name, class: "input-mini form-control", tabindex: 2, autofocus: true, required: true
%td.import-actions.job-status
= button_tag class: 'btn btn-import js-add-to-import' do
Import
= icon('spinner spin', class: 'loading-icon')
- @incompatible_repos.each do |repo|
%tr{ id: "repo_#{repo.project_key}___#{repo.slug}" }
%td
= link_to repo.browse_url, repo.browse_url, target: '_blank', rel: 'noopener noreferrer'
%td.import-target
%td.import-actions-job-status
= label_tag 'Incompatible Project', nil, class: 'label badge-danger'
- if @incompatible_repos.any?
%p
One or more of your Bitbucket Server projects cannot be imported into GitLab
directly because they use Subversion or Mercurial for version control,
rather than Git. Please convert
= link_to 'them to Git,', 'https://www.atlassian.com/git/tutorials/migrating-overview'
and go through the
= link_to 'import flow', status_import_bitbucket_server_path
again.
.js-importer-status{ data: { jobs_import_path: "#{jobs_import_bitbucket_server_path}", import_path: "#{import_bitbucket_server_path}" } }
...@@ -18,10 +18,14 @@ ...@@ -18,10 +18,14 @@
- if bitbucket_import_enabled? - if bitbucket_import_enabled?
%div %div
= link_to status_import_bitbucket_path, class: "btn import_bitbucket #{'how_to_import_link' unless bitbucket_import_configured?}" do = link_to status_import_bitbucket_path, class: "btn import_bitbucket #{'how_to_import_link' unless bitbucket_import_configured?}" do
= icon('bitbucket', text: 'Bitbucket') = icon('bitbucket', text: 'Bitbucket Cloud')
- unless bitbucket_import_configured? - unless bitbucket_import_configured?
= render 'bitbucket_import_modal' = render 'bitbucket_import_modal'
- if bitbucket_server_import_enabled?
%div
= link_to status_import_bitbucket_server_path, class: "btn import_bitbucket" do
= icon('bitbucket-square', text: 'Bitbucket Server')
%div
- if gitlab_import_enabled? - if gitlab_import_enabled?
%div %div
= link_to status_import_gitlab_path, class: "btn import_gitlab #{'how_to_import_link' unless gitlab_import_configured?}" do = link_to status_import_gitlab_path, class: "btn import_gitlab #{'how_to_import_link' unless gitlab_import_configured?}" do
......
...@@ -24,6 +24,13 @@ namespace :import do ...@@ -24,6 +24,13 @@ namespace :import do
get :jobs get :jobs
end end
resource :bitbucket_server, only: [:create, :new], controller: :bitbucket_server do
post :configure
get :status
get :callback
get :jobs
end
resource :google_code, only: [:create, :new], controller: :google_code do resource :google_code, only: [:create, :new], controller: :google_code do
get :status get :status
post :callback post :callback
......
# frozen_string_literal: true
module BitbucketServer
class Client
attr_reader :connection
ServerError = Class.new(StandardError)
SERVER_ERRORS = [SocketError,
OpenSSL::SSL::SSLError,
Errno::ECONNRESET,
Errno::ECONNREFUSED,
Errno::EHOSTUNREACH,
Net::OpenTimeout,
Net::ReadTimeout,
Gitlab::HTTP::BlockedUrlError,
BitbucketServer::Connection::ConnectionError].freeze
def initialize(options = {})
@connection = Connection.new(options)
end
def pull_requests(project_key, repo)
path = "/projects/#{project_key}/repos/#{repo}/pull-requests?state=ALL"
get_collection(path, :pull_request)
end
def activities(project_key, repo, pull_request_id)
path = "/projects/#{project_key}/repos/#{repo}/pull-requests/#{pull_request_id}/activities"
get_collection(path, :activity)
end
def repo(project, repo_name)
parsed_response = connection.get("/projects/#{project}/repos/#{repo_name}")
BitbucketServer::Representation::Repo.new(parsed_response)
end
def repos
path = "/repos"
get_collection(path, :repo)
end
def create_branch(project_key, repo, branch_name, sha)
payload = {
name: branch_name,
startPoint: sha,
message: 'GitLab temporary branch for import'
}
connection.post("/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
end
def delete_branch(project_key, repo, branch_name, sha)
payload = {
name: Gitlab::Git::BRANCH_REF_PREFIX + branch_name,
dryRun: false
}
connection.delete(:branches, "/projects/#{project_key}/repos/#{repo}/branches", payload.to_json)
end
private
def get_collection(path, type)
paginator = BitbucketServer::Paginator.new(connection, Addressable::URI.escape(path), type)
BitbucketServer::Collection.new(paginator)
rescue *SERVER_ERRORS => e
raise ServerError, e
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Collection < Enumerator
def initialize(paginator)
super() do |yielder|
loop do
paginator.items.each { |item| yielder << item }
end
end
lazy
end
def method_missing(method, *args)
return super unless self.respond_to?(method)
self.__send__(method, *args) do |item| # rubocop:disable GitlabSecurity/PublicSend
block_given? ? yield(item) : item
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Connection
include ActionView::Helpers::SanitizeHelper
DEFAULT_API_VERSION = '1.0'
SEPARATOR = '/'
attr_reader :api_version, :base_uri, :username, :token
ConnectionError = Class.new(StandardError)
def initialize(options = {})
@api_version = options.fetch(:api_version, DEFAULT_API_VERSION)
@base_uri = options[:base_uri]
@username = options[:user]
@token = options[:password]
end
def get(path, extra_query = {})
response = Gitlab::HTTP.get(build_url(path),
basic_auth: auth,
headers: accept_headers,
query: extra_query)
check_errors!(response)
response.parsed_response
end
def post(path, body)
response = Gitlab::HTTP.post(build_url(path),
basic_auth: auth,
headers: post_headers,
body: body)
check_errors!(response)
response.parsed_response
end
# We need to support two different APIs for deletion:
#
# /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/branches/default
# /rest/branch-utils/1.0/projects/{projectKey}/repos/{repositorySlug}/branches
def delete(resource, path, body)
url = delete_url(resource, path)
response = Gitlab::HTTP.delete(url,
basic_auth: auth,
headers: post_headers,
body: body)
check_errors!(response)
response.parsed_response
end
private
def check_errors!(response)
raise ConnectionError, "Response is not valid JSON" unless response.parsed_response.is_a?(Hash)
return if response.code >= 200 && response.code < 300
details = sanitize(response.parsed_response.dig('errors', 0, 'message'))
message = "Error #{response.code}"
message += ": #{details}" if details
raise ConnectionError, message
rescue JSON::ParserError
raise ConnectionError, "Unable to parse the server response as JSON"
end
def auth
@auth ||= { username: username, password: token }
end
def accept_headers
@accept_headers ||= { 'Accept' => 'application/json' }
end
def post_headers
@post_headers ||= accept_headers.merge({ 'Content-Type' => 'application/json' })
end
def build_url(path)
return path if path.starts_with?(root_url)
url_join_paths(root_url, path)
end
def root_url
url_join_paths(base_uri, "/rest/api/#{api_version}")
end
def delete_url(resource, path)
if resource == :branches
url_join_paths(base_uri, "/rest/branch-utils/#{api_version}#{path}")
else
build_url(path)
end
end
# URI.join is stupid in that slashes are important:
#
# # URI.join('http://example.com/subpath', 'hello')
# => http://example.com/hello
#
# We really want http://example.com/subpath/hello
#
def url_join_paths(*paths)
paths.map { |path| strip_slashes(path) }.join(SEPARATOR)
end
def strip_slashes(path)
path = path[1..-1] if path.starts_with?(SEPARATOR)
path.chomp(SEPARATOR)
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Page
attr_reader :attrs, :items
def initialize(raw, type)
@attrs = parse_attrs(raw)
@items = parse_values(raw, representation_class(type))
end
def next?
!attrs.fetch(:isLastPage, true)
end
def next
attrs.fetch(:nextPageStart)
end
private
def parse_attrs(raw)
raw.slice('size', 'nextPageStart', 'isLastPage').symbolize_keys
end
def parse_values(raw, bitbucket_rep_class)
return [] unless raw['values'] && raw['values'].is_a?(Array)
bitbucket_rep_class.decorate(raw['values'])
end
def representation_class(type)
BitbucketServer::Representation.const_get(type.to_s.camelize)
end
end
end
# frozen_string_literal: true
module BitbucketServer
class Paginator
PAGE_LENGTH = 25
def initialize(connection, url, type)
@connection = connection
@type = type
@url = url
@page = nil
end
def items
raise StopIteration unless has_next_page?
@page = fetch_next_page
@page.items
end
private
attr_reader :connection, :page, :url, :type
def has_next_page?
page.nil? || page.next?
end
def next_offset
page.nil? ? 0 : page.next
end
def fetch_next_page
parsed_response = connection.get(@url, start: next_offset, limit: PAGE_LENGTH)
Page.new(parsed_response, type)
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Activity < Representation::Base
def comment?
action == 'COMMENTED'
end
def inline_comment?
!!(comment? && comment_anchor)
end
def comment
return unless comment?
@comment ||=
if inline_comment?
PullRequestComment.new(raw)
else
Comment.new(raw)
end
end
# TODO Move this into MergeEvent
def merge_event?
action == 'MERGED'
end
def committer_user
commit.dig('committer', 'displayName')
end
def committer_email
commit.dig('committer', 'emailAddress')
end
def merge_timestamp
timestamp = commit['committerTimestamp']
self.class.convert_timestamp(timestamp)
end
def merge_commit
commit['id']
end
def created_at
self.class.convert_timestamp(created_date)
end
private
def commit
raw.fetch('commit', {})
end
def action
raw['action']
end
def comment_anchor
raw['commentAnchor']
end
def created_date
raw['createdDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Base
attr_reader :raw
def initialize(raw)
@raw = raw
end
def self.decorate(entries)
entries.map { |entry| new(entry)}
end
def self.convert_timestamp(time_usec)
Time.at(time_usec / 1000) if time_usec.is_a?(Integer)
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
# A general comment with the structure:
# "comment": {
# "author": {
# "active": true,
# "displayName": "root",
# "emailAddress": "stanhu+bitbucket@gitlab.com",
# "id": 1,
# "links": {
# "self": [
# {
# "href": "http://localhost:7990/users/root"
# }
# ]
# },
# "name": "root",
# "slug": "root",
# "type": "NORMAL"
# }
# }
# }
class Comment < Representation::Base
attr_reader :parent_comment
CommentNode = Struct.new(:raw_comments, :parent)
def initialize(raw, parent_comment: nil)
super(raw)
@parent_comment = parent_comment
end
def id
raw_comment['id']
end
def author_username
author['displayName']
end
def author_email
author['emailAddress']
end
def note
raw_comment['text']
end
def created_at
self.class.convert_timestamp(created_date)
end
def updated_at
self.class.convert_timestamp(created_date)
end
# Bitbucket Server supports the ability to reply to any comment
# and created multiple threads. It represents these as a linked list
# of comments within comments. For example:
#
# "comments": [
# {
# "author" : ...
# "comments": [
# {
# "author": ...
#
# Since GitLab only supports a single thread, we flatten all these
# comments into a single discussion.
def comments
@comments ||= flatten_comments
end
private
# In order to provide context for each reply, we need to track
# the parent of each comment. This method works as follows:
#
# 1. Insert the root comment into the workset. The root element is the current note.
# 2. For each node in the workset:
# a. Examine if it has replies to that comment. If it does,
# insert that node into the workset.
# b. Parse that note into a Comment structure and add it to a flat list.
def flatten_comments
comments = raw_comment['comments']
workset =
if comments
[CommentNode.new(comments, self)]
else
[]
end
all_comments = []
until workset.empty?
node = workset.pop
parent = node.parent
node.raw_comments.each do |comment|
new_comments = comment.delete('comments')
current_comment = Comment.new({ 'comment' => comment }, parent_comment: parent)
all_comments << current_comment
workset << CommentNode.new(new_comments, current_comment) if new_comments
end
end
all_comments
end
def raw_comment
raw.fetch('comment', {})
end
def author
raw_comment['author']
end
def created_date
raw_comment['createdDate']
end
def updated_date
raw_comment['updatedDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class PullRequest < Representation::Base
def author
raw.dig('author', 'user', 'name')
end
def author_email
raw.dig('author', 'user', 'emailAddress')
end
def description
raw['description']
end
def iid
raw['id']
end
def state
case raw['state']
when 'MERGED'
'merged'
when 'DECLINED'
'closed'
else
'opened'
end
end
def merged?
state == 'merged'
end
def created_at
self.class.convert_timestamp(created_date)
end
def updated_at
self.class.convert_timestamp(updated_date)
end
def title
raw['title']
end
def source_branch_name
raw.dig('fromRef', 'id')
end
def source_branch_sha
raw.dig('fromRef', 'latestCommit')
end
def target_branch_name
raw.dig('toRef', 'id')
end
def target_branch_sha
raw.dig('toRef', 'latestCommit')
end
private
def created_date
raw['createdDate']
end
def updated_date
raw['updatedDate']
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
# An inline comment with the following structure that identifies
# the part of the diff:
#
# "commentAnchor": {
# "diffType": "EFFECTIVE",
# "fileType": "TO",
# "fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
# "line": 1,
# "lineType": "ADDED",
# "orphaned": false,
# "path": "CHANGELOG.md",
# "toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
# }
#
# More details in https://docs.atlassian.com/bitbucket-server/rest/5.12.0/bitbucket-rest.html.
class PullRequestComment < Comment
def from_sha
comment_anchor['fromHash']
end
def to_sha
comment_anchor['toHash']
end
def to?
file_type == 'TO'
end
def from?
file_type == 'FROM'
end
def added?
line_type == 'ADDED'
end
def removed?
line_type == 'REMOVED'
end
# There are three line comment types: added, removed, or context.
#
# 1. An added type means a new line was inserted, so there is no old position.
# 2. A removed type means a line was removed, so there is no new position.
# 3. A context type means the line was unmodified, so there is both a
# old and new position.
def new_pos
return if removed?
return unless line_position
line_position[1]
end
def old_pos
return if added?
return unless line_position
line_position[0]
end
def file_path
comment_anchor.fetch('path')
end
private
def file_type
comment_anchor['fileType']
end
def line_type
comment_anchor['lineType']
end
# Each comment contains the following information about the diff:
#
# hunks: [
# {
# segments: [
# {
# "lines": [
# {
# "commentIds": [ N ],
# "source": X,
# "destination": Y
# }, ...
# ] ....
#
# To determine the line position of a comment, we search all the lines
# entries until we find this comment ID.
def line_position
@line_position ||= diff_hunks.each do |hunk|
segments = hunk.fetch('segments', [])
segments.each do |segment|
lines = segment.fetch('lines', [])
lines.each do |line|
if line['commentIds']&.include?(id)
return [line['source'], line['destination']]
end
end
end
end
end
def comment_anchor
raw.fetch('commentAnchor', {})
end
def diff
raw.fetch('diff', {})
end
def diff_hunks
diff.fetch('hunks', [])
end
end
end
end
# frozen_string_literal: true
module BitbucketServer
module Representation
class Repo < Representation::Base
def initialize(raw)
super(raw)
end
def project_key
raw.dig('project', 'key')
end
def project_name
raw.dig('project', 'name')
end
def slug
raw['slug']
end
def browse_url
# The JSON reponse contains an array of 1 element. Not sure if there
# are cases where multiple links would be provided.
raw.dig('links', 'self').first.fetch('href')
end
def clone_url
raw['links']['clone'].find { |link| link['name'].starts_with?('http') }.fetch('href')
end
def description
project['description']
end
def full_name
"#{project_name}/#{name}"
end
def issues_enabled?
true
end
def name
raw['name']
end
def valid?
raw['scmId'] == 'git'
end
def visibility_level
if project['public']
Gitlab::VisibilityLevel::PUBLIC
else
Gitlab::VisibilityLevel::PRIVATE
end
end
def project
raw['project']
end
def to_s
full_name
end
end
end
end
module Gitlab
module BitbucketServerImport
class Importer
include Gitlab::ShellAdapter
attr_reader :recover_missing_commits
attr_reader :project, :project_key, :repository_slug, :client, :errors, :users
REMOTE_NAME = 'bitbucket_server'.freeze
BATCH_SIZE = 100
TempBranch = Struct.new(:name, :sha)
def self.imports_repository?
true
end
def self.refmap
[:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head']
end
# Unlike GitHub, you can't grab the commit SHAs for pull requests that
# have been closed but not merged even though Bitbucket has these
# commits internally. We can recover these pull requests by creating a
# branch with the Bitbucket REST API, but by default we turn this
# behavior off.
def initialize(project, recover_missing_commits: false)
@project = project
@recover_missing_commits = recover_missing_commits
@project_key = project.import_data.data['project_key']
@repository_slug = project.import_data.data['repo_slug']
@client = BitbucketServer::Client.new(project.import_data.credentials)
@formatter = Gitlab::ImportFormatter.new
@errors = []
@users = {}
@temp_branches = []
end
def execute
import_repository
import_pull_requests
delete_temp_branches
handle_errors
true
end
private
def handle_errors
return unless errors.any?
project.update_column(:import_error, {
message: 'The remote data could not be fully imported.',
errors: errors
}.to_json)
end
def gitlab_user_id(email)
find_user_id(email) || project.creator_id
end
def find_user_id(email)
return nil unless email
return users[email] if users.key?(email)
user = User.find_by_any_email(email, confirmed: true)
users[email] = user&.id
user&.id
end
def repo
@repo ||= client.repo(project_key, repository_slug)
end
def sha_exists?(sha)
project.repository.commit(sha)
end
def temp_branch_name(pull_request, suffix)
"gitlab/import/pull-request/#{pull_request.iid}/#{suffix}"
end
# This method restores required SHAs that GitLab needs to create diffs
# into branch names as the following:
#
# gitlab/import/pull-request/N/{to,from}
def restore_branches(pull_requests)
shas_to_restore = []
pull_requests.each do |pull_request|
shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :from),
pull_request.source_branch_sha)
shas_to_restore << TempBranch.new(temp_branch_name(pull_request, :to),
pull_request.target_branch_sha)
end
# Create the branches on the Bitbucket Server first
created_branches = restore_branch_shas(shas_to_restore)
@temp_branches += created_branches
# Now sync the repository so we get the new branches
import_repository unless created_branches.empty?
end
def restore_branch_shas(shas_to_restore)
shas_to_restore.each_with_object([]) do |temp_branch, branches_created|
branch_name = temp_branch.name
sha = temp_branch.sha
next if sha_exists?(sha)
begin
client.create_branch(project_key, repository_slug, branch_name, sha)
branches_created << temp_branch
rescue BitbucketServer::Connection::ConnectionError => e
Rails.logger.warn("BitbucketServerImporter: Unable to recreate branch for SHA #{sha}: #{e}")
end
end
end
def import_repository
project.ensure_repository
project.repository.fetch_as_mirror(project.import_url, refmap: self.class.refmap, remote_name: REMOTE_NAME)
rescue Gitlab::Shell::Error, Gitlab::Git::RepositoryMirroring::RemoteError => e
# Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
# 2. Retried import, repo is broken or not imported but +exists?+ still returns true
project.repository.expire_content_cache if project.repository_exists?
raise e.message
end
# Bitbucket Server keeps tracks of references for open pull requests in
# refs/heads/pull-requests, but closed and merged requests get moved
# into hidden internal refs under stash-refs/pull-requests. Unless the
# SHAs involved are at the tip of a branch or tag, there is no way to
# retrieve the server for those commits.
#
# To avoid losing history, we use the Bitbucket API to re-create the branch
# on the remote server. Then we have to issue a `git fetch` to download these
# branches.
def import_pull_requests
pull_requests = client.pull_requests(project_key, repository_slug).to_a
# Creating branches on the server and fetching the newly-created branches
# may take a number of network round-trips. Do this in batches so that we can
# avoid doing a git fetch for every new branch.
pull_requests.each_slice(BATCH_SIZE) do |batch|
restore_branches(batch) if recover_missing_commits
batch.each do |pull_request|
begin
import_bitbucket_pull_request(pull_request)
rescue StandardError => e
errors << { type: :pull_request, iid: pull_request.iid, errors: e.message, trace: e.backtrace.join("\n"), raw_response: pull_request.raw }
end
end
end
end
def delete_temp_branches
@temp_branches.each do |branch|
begin
client.delete_branch(project_key, repository_slug, branch.name, branch.sha)
project.repository.delete_branch(branch.name)
rescue BitbucketServer::Connection::ConnectionError => e
@errors << { type: :delete_temp_branches, branch_name: branch.name, errors: e.message }
end
end
end
def import_bitbucket_pull_request(pull_request)
description = ''
description += @formatter.author_line(pull_request.author) unless find_user_id(pull_request.author_email)
description += pull_request.description if pull_request.description
source_branch_sha = pull_request.source_branch_sha
target_branch_sha = pull_request.target_branch_sha
author_id = gitlab_user_id(pull_request.author_email)
attributes = {
iid: pull_request.iid,
title: pull_request.title,
description: description,
source_project: project,
source_branch: Gitlab::Git.ref_name(pull_request.source_branch_name),
source_branch_sha: source_branch_sha,
target_project: project,
target_branch: Gitlab::Git.ref_name(pull_request.target_branch_name),
target_branch_sha: target_branch_sha,
state: pull_request.state,
author_id: author_id,
assignee_id: nil,
created_at: pull_request.created_at,
updated_at: pull_request.updated_at
}
merge_request = project.merge_requests.create!(attributes)
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
end
def import_pull_request_comments(pull_request, merge_request)
comments, other_activities = client.activities(project_key, repository_slug, pull_request.iid).partition(&:comment?)
merge_event = other_activities.find(&:merge_event?)
import_merge_event(merge_request, merge_event) if merge_event
inline_comments, pr_comments = comments.partition(&:inline_comment?)
import_inline_comments(inline_comments.map(&:comment), merge_request)
import_standalone_pr_comments(pr_comments.map(&:comment), merge_request)
end
def import_merge_event(merge_request, merge_event)
committer = merge_event.committer_email
user_id = gitlab_user_id(committer)
timestamp = merge_event.merge_timestamp
merge_request.update({ merge_commit_sha: merge_event.merge_commit })
metric = MergeRequest::Metrics.find_or_initialize_by(merge_request: merge_request)
metric.update(merged_by_id: user_id, merged_at: timestamp)
end
def import_inline_comments(inline_comments, merge_request)
inline_comments.each do |comment|
position = build_position(merge_request, comment)
parent = create_diff_note(merge_request, comment, position)
next unless parent&.persisted?
discussion_id = parent.discussion_id
comment.comments.each do |reply|
create_diff_note(merge_request, reply, position, discussion_id)
end
end
end
def create_diff_note(merge_request, comment, position, discussion_id = nil)
attributes = pull_request_comment_attributes(comment)
attributes.merge!(position: position, type: 'DiffNote')
attributes[:discussion_id] = discussion_id if discussion_id
note = merge_request.notes.build(attributes)
if note.valid?
note.save
return note
end
# Bitbucket Server supports the ability to comment on any line, not just the
# line in the diff. If we can't add the note as a DiffNote, fallback to creating
# a regular note.
create_fallback_diff_note(merge_request, comment, position)
rescue StandardError => e
errors << { type: :pull_request, id: comment.id, errors: e.message }
nil
end
def create_fallback_diff_note(merge_request, comment, position)
attributes = pull_request_comment_attributes(comment)
note = "*Comment on"
note += " #{position.old_path}:#{position.old_line} -->" if position.old_line
note += " #{position.new_path}:#{position.new_line}" if position.new_line
note += "*\n\n#{comment.note}"
attributes[:note] = note
merge_request.notes.create!(attributes)
end
def build_position(merge_request, pr_comment)
params = {
diff_refs: merge_request.diff_refs,
old_path: pr_comment.file_path,
new_path: pr_comment.file_path,
old_line: pr_comment.old_pos,
new_line: pr_comment.new_pos
}
Gitlab::Diff::Position.new(params)
end
def import_standalone_pr_comments(pr_comments, merge_request)
pr_comments.each do |comment|
begin
merge_request.notes.create!(pull_request_comment_attributes(comment))
comment.comments.each do |replies|
merge_request.notes.create!(pull_request_comment_attributes(replies))
end
rescue StandardError => e
errors << { type: :pull_request, iid: comment.id, errors: e.message }
end
end
end
def pull_request_comment_attributes(comment)
author = find_user_id(comment.author_email)
note = ''
unless author
author = project.creator_id
note = "*By #{comment.author_username} (#{comment.author_email})*\n\n"
end
note +=
# Provide some context for replying
if comment.parent_comment
"> #{comment.parent_comment.note.truncate(80)}\n\n#{comment.note}"
else
comment.note
end
{
project: project,
note: note,
author_id: author,
created_at: comment.created_at,
updated_at: comment.updated_at
}
end
end
end
end
module Gitlab
module BitbucketServerImport
class ProjectCreator
attr_reader :project_key, :repo_slug, :repo, :name, :namespace, :current_user, :session_data
def initialize(project_key, repo_slug, repo, name, namespace, current_user, session_data)
@project_key = project_key
@repo_slug = repo_slug
@repo = repo
@name = name
@namespace = namespace
@current_user = current_user
@session_data = session_data
end
def execute
::Projects::CreateService.new(
current_user,
name: name,
path: name,
description: repo.description,
namespace_id: namespace.id,
visibility_level: repo.visibility_level,
import_type: 'bitbucket_server',
import_source: repo.browse_url,
import_url: repo.clone_url,
import_data: {
credentials: session_data,
data: { project_key: project_key, repo_slug: repo_slug }
},
skip_wiki: true
).execute
end
end
end
end
...@@ -10,7 +10,8 @@ module Gitlab ...@@ -10,7 +10,8 @@ module Gitlab
# We exclude `bare_repository` here as it has no import class associated # We exclude `bare_repository` here as it has no import class associated
ImportTable = [ ImportTable = [
ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter), ImportSource.new('github', 'GitHub', Gitlab::GithubImport::ParallelImporter),
ImportSource.new('bitbucket', 'Bitbucket', Gitlab::BitbucketImport::Importer), ImportSource.new('bitbucket', 'Bitbucket Cloud', Gitlab::BitbucketImport::Importer),
ImportSource.new('bitbucket_server', 'Bitbucket Server', Gitlab::BitbucketServerImport::Importer),
ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer), ImportSource.new('gitlab', 'GitLab.com', Gitlab::GitlabImport::Importer),
ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer), ImportSource.new('google_code', 'Google Code', Gitlab::GoogleCodeImport::Importer),
ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer), ImportSource.new('fogbugz', 'FogBugz', Gitlab::FogbugzImport::Importer),
......
...@@ -16,6 +16,9 @@ msgstr "" ...@@ -16,6 +16,9 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n" "Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n" "Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\n"
msgid " Status"
msgstr ""
msgid "%d changed file" msgid "%d changed file"
msgid_plural "%d changed files" msgid_plural "%d changed files"
msgstr[0] "" msgstr[0] ""
...@@ -808,6 +811,9 @@ msgstr "" ...@@ -808,6 +811,9 @@ msgstr ""
msgid "Below you will find all the groups that are public." msgid "Below you will find all the groups that are public."
msgstr "" msgstr ""
msgid "Bitbucket Server Import"
msgstr ""
msgid "Bitbucket import" msgid "Bitbucket import"
msgstr "" msgstr ""
...@@ -2316,6 +2322,9 @@ msgstr "" ...@@ -2316,6 +2322,9 @@ msgstr ""
msgid "Ends at (UTC)" msgid "Ends at (UTC)"
msgstr "" msgstr ""
msgid "Enter in your Bitbucket Server URL and personal access token below"
msgstr ""
msgid "Environments" msgid "Environments"
msgstr "" msgstr ""
...@@ -2618,6 +2627,9 @@ msgstr "" ...@@ -2618,6 +2627,9 @@ msgstr ""
msgid "From Bitbucket" msgid "From Bitbucket"
msgstr "" msgstr ""
msgid "From Bitbucket Server"
msgstr ""
msgid "From FogBugz" msgid "From FogBugz"
msgstr "" msgstr ""
...@@ -2974,6 +2986,9 @@ msgstr "" ...@@ -2974,6 +2986,9 @@ msgstr ""
msgid "Import projects from Bitbucket" msgid "Import projects from Bitbucket"
msgstr "" msgstr ""
msgid "Import projects from Bitbucket Server"
msgstr ""
msgid "Import projects from FogBugz" msgid "Import projects from FogBugz"
msgstr "" msgstr ""
...@@ -2983,6 +2998,9 @@ msgstr "" ...@@ -2983,6 +2998,9 @@ msgstr ""
msgid "Import projects from Google Code" msgid "Import projects from Google Code"
msgstr "" msgstr ""
msgid "Import repositories from Bitbucket Server"
msgstr ""
msgid "Import repositories from GitHub" msgid "Import repositories from GitHub"
msgstr "" msgstr ""
...@@ -3219,6 +3237,9 @@ msgstr "" ...@@ -3219,6 +3237,9 @@ msgstr ""
msgid "List available repositories" msgid "List available repositories"
msgstr "" msgstr ""
msgid "List your Bitbucket Server repositories"
msgstr ""
msgid "List your GitHub repositories" msgid "List your GitHub repositories"
msgstr "" msgstr ""
...@@ -6131,6 +6152,9 @@ msgstr "" ...@@ -6131,6 +6152,9 @@ msgstr ""
msgid "here" msgid "here"
msgstr "" msgstr ""
msgid "https://your-bitbucket-server"
msgstr ""
msgid "import flow" msgid "import flow"
msgstr "" msgstr ""
......
require 'spec_helper'
describe Import::BitbucketServerController do
let(:user) { create(:user) }
let(:project_key) { 'test-project' }
let(:repo_slug) { 'some-repo' }
let(:client) { instance_double(BitbucketServer::Client) }
def assign_session_tokens
session[:bitbucket_server_url] = 'http://localhost:7990'
session[:bitbucket_server_username] = 'bitbucket'
session[:bitbucket_server_personal_access_token] = 'some-token'
end
before do
sign_in(user)
allow(controller).to receive(:bitbucket_server_import_enabled?).and_return(true)
end
describe 'GET new' do
render_views
it 'shows the input form' do
get :new
expect(response.body).to have_text('Bitbucket Server URL')
end
end
describe 'POST create' do
before do
allow(controller).to receive(:bitbucket_client).and_return(client)
repo = double(name: 'my-project')
allow(client).to receive(:repo).with(project_key, repo_slug).and_return(repo)
assign_session_tokens
end
set(:project) { create(:project) }
it 'returns the new project' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, 'my-project', user.namespace, user, anything)
.and_return(double(execute: project))
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(200)
end
it 'returns an error when an invalid project key is used' do
post :create, project: 'some&project'
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when an invalid repository slug is used' do
post :create, project: 'some-project', repository: 'try*this'
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when the project cannot be found' do
allow(client).to receive(:repo).with(project_key, repo_slug).and_return(nil)
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
it 'returns an error when the project cannot be saved' do
allow(Gitlab::BitbucketServerImport::ProjectCreator)
.to receive(:new).with(project_key, repo_slug, anything, 'my-project', user.namespace, user, anything)
.and_return(double(execute: build(:project)))
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
it "returns an error when the server can't be contacted" do
expect(client).to receive(:repo).with(project_key, repo_slug).and_raise(BitbucketServer::Client::ServerError)
post :create, project: project_key, repository: repo_slug, format: :json
expect(response).to have_gitlab_http_status(422)
end
end
describe 'POST configure' do
let(:token) { 'token' }
let(:username) { 'bitbucket-user' }
let(:url) { 'http://localhost:7990/bitbucket' }
it 'clears out existing session' do
post :configure
expect(session[:bitbucket_server_url]).to be_nil
expect(session[:bitbucket_server_username]).to be_nil
expect(session[:bitbucket_server_personal_access_token]).to be_nil
expect(response).to have_gitlab_http_status(302)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
it 'sets the session variables' do
post :configure, personal_access_token: token, bitbucket_username: username, bitbucket_server_url: url
expect(session[:bitbucket_server_url]).to eq(url)
expect(session[:bitbucket_server_username]).to eq(username)
expect(session[:bitbucket_server_personal_access_token]).to eq(token)
expect(response).to have_gitlab_http_status(302)
expect(response).to redirect_to(status_import_bitbucket_server_path)
end
end
describe 'GET status' do
render_views
before do
allow(controller).to receive(:bitbucket_client).and_return(client)
@repo = double(slug: 'vim', project_key: 'asd', full_name: 'asd/vim', "valid?" => true, project_name: 'asd', browse_url: 'http://test', name: 'vim')
@invalid_repo = double(slug: 'invalid', project_key: 'foobar', full_name: 'asd/foobar', "valid?" => false, browse_url: 'http://bad-repo')
assign_session_tokens
end
it 'assigns repository categories' do
created_project = create(:project, import_type: 'bitbucket_server', creator_id: user.id, import_source: 'foo/bar', import_status: 'finished')
expect(client).to receive(:repos).and_return([@repo, @invalid_repo])
get :status
expect(assigns(:already_added_projects)).to eq([created_project])
expect(assigns(:repos)).to eq([@repo])
expect(assigns(:incompatible_repos)).to eq([@invalid_repo])
end
end
describe 'GET jobs' do
before do
assign_session_tokens
end
it 'returns a list of imported projects' do
created_project = create(:project, import_type: 'bitbucket_server', creator_id: user.id)
get :jobs
expect(json_response.count).to eq(1)
expect(json_response.first['id']).to eq(created_project.id)
expect(json_response.first['import_status']).to eq('none')
end
end
end
{
"isLastPage": true,
"limit": 25,
"size": 8,
"start": 0,
"values": [
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530164016725,
"id": 11,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [
{
"anchor": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"createdDate": 1530164016725,
"id": 11,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"text": "Ok",
"type": "COMMENT",
"updatedDate": 1530164016725,
"version": 0
},
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"createdDate": 1530164026000,
"id": 1,
"permittedOperations": {
"deletable": true,
"editable": true,
"transitionable": true
},
"state": "OPEN",
"text": "here's a task"
}
],
"text": "Ok",
"updatedDate": 1530164016725,
"version": 0
},
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530165543990,
"id": 12,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "hi",
"updatedDate": 1530165543990,
"version": 0
}
],
"createdDate": 1530164013718,
"id": 10,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "Hello world",
"updatedDate": 1530164013718,
"version": 0
},
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530165549932,
"id": 13,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "hello",
"updatedDate": 1530165549932,
"version": 0
}
],
"createdDate": 1530161499144,
"id": 9,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "is this a new line?",
"updatedDate": 1530161499144,
"version": 0
},
"commentAction": "ADDED",
"commentAnchor": {
"diffType": "EFFECTIVE",
"fileType": "TO",
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"line": 1,
"lineType": "ADDED",
"orphaned": false,
"path": "CHANGELOG.md",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"createdDate": 1530161499144,
"diff": {
"destination": {
"components": [
"CHANGELOG.md"
],
"extension": "md",
"name": "CHANGELOG.md",
"parent": "",
"toString": "CHANGELOG.md"
},
"hunks": [
{
"destinationLine": 1,
"destinationSpan": 11,
"segments": [
{
"lines": [
{
"commentIds": [
9
],
"destination": 1,
"line": "# Edit 1",
"source": 1,
"truncated": false
},
{
"destination": 2,
"line": "",
"source": 1,
"truncated": false
}
],
"truncated": false,
"type": "ADDED"
},
{
"lines": [
{
"destination": 3,
"line": "# ChangeLog",
"source": 1,
"truncated": false
},
{
"destination": 4,
"line": "",
"source": 2,
"truncated": false
},
{
"destination": 5,
"line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
"source": 3,
"truncated": false
},
{
"destination": 6,
"line": "we use is semver, although we will often release new lexers in minor versions, as a",
"source": 4,
"truncated": false
},
{
"destination": 7,
"line": "practical matter.",
"source": 5,
"truncated": false
},
{
"destination": 8,
"line": "",
"source": 6,
"truncated": false
},
{
"destination": 9,
"line": "## version TBD: (unreleased)",
"source": 7,
"truncated": false
},
{
"destination": 10,
"line": "",
"source": 8,
"truncated": false
},
{
"destination": 11,
"line": "* General",
"source": 9,
"truncated": false
}
],
"truncated": false,
"type": "CONTEXT"
}
],
"sourceLine": 1,
"sourceSpan": 9,
"truncated": false
}
],
"properties": {
"current": true,
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"source": null,
"truncated": false
},
"id": 19,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530053198463,
"id": 7,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "What about this line?",
"updatedDate": 1530053198463,
"version": 0
},
"commentAction": "ADDED",
"commentAnchor": {
"diffType": "EFFECTIVE",
"fileType": "FROM",
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"line": 9,
"lineType": "CONTEXT",
"orphaned": false,
"path": "CHANGELOG.md",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"createdDate": 1530053198463,
"diff": {
"destination": {
"components": [
"CHANGELOG.md"
],
"extension": "md",
"name": "CHANGELOG.md",
"parent": "",
"toString": "CHANGELOG.md"
},
"hunks": [
{
"destinationLine": 1,
"destinationSpan": 12,
"segments": [
{
"lines": [
{
"destination": 1,
"line": "# Edit 1",
"source": 1,
"truncated": false
},
{
"destination": 2,
"line": "",
"source": 1,
"truncated": false
}
],
"truncated": false,
"type": "ADDED"
},
{
"lines": [
{
"destination": 3,
"line": "# ChangeLog",
"source": 1,
"truncated": false
},
{
"destination": 4,
"line": "",
"source": 2,
"truncated": false
},
{
"destination": 5,
"line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
"source": 3,
"truncated": false
},
{
"destination": 6,
"line": "we use is semver, although we will often release new lexers in minor versions, as a",
"source": 4,
"truncated": false
},
{
"destination": 7,
"line": "practical matter.",
"source": 5,
"truncated": false
},
{
"destination": 8,
"line": "",
"source": 6,
"truncated": false
},
{
"destination": 9,
"line": "## version TBD: (unreleased)",
"source": 7,
"truncated": false
},
{
"destination": 10,
"line": "",
"source": 8,
"truncated": false
},
{
"commentIds": [
7
],
"destination": 11,
"line": "* General",
"source": 9,
"truncated": false
},
{
"destination": 12,
"line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
"source": 10,
"truncated": false
}
],
"truncated": false,
"type": "CONTEXT"
}
],
"sourceLine": 1,
"sourceSpan": 10,
"truncated": false
}
],
"properties": {
"current": true,
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"source": null,
"truncated": false
},
"id": 14,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1530143330513,
"id": 8,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "How about this?",
"updatedDate": 1530143330513,
"version": 0
}
],
"createdDate": 1530053193795,
"id": 6,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "It does.",
"updatedDate": 1530053193795,
"version": 0
}
],
"createdDate": 1530053187904,
"id": 5,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "Does this line make sense?",
"updatedDate": 1530053187904,
"version": 0
},
"commentAction": "ADDED",
"commentAnchor": {
"diffType": "EFFECTIVE",
"fileType": "FROM",
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"line": 3,
"lineType": "CONTEXT",
"orphaned": false,
"path": "CHANGELOG.md",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"createdDate": 1530053187904,
"diff": {
"destination": {
"components": [
"CHANGELOG.md"
],
"extension": "md",
"name": "CHANGELOG.md",
"parent": "",
"toString": "CHANGELOG.md"
},
"hunks": [
{
"destinationLine": 1,
"destinationSpan": 12,
"segments": [
{
"lines": [
{
"destination": 1,
"line": "# Edit 1",
"source": 1,
"truncated": false
},
{
"destination": 2,
"line": "",
"source": 1,
"truncated": false
}
],
"truncated": false,
"type": "ADDED"
},
{
"lines": [
{
"destination": 3,
"line": "# ChangeLog",
"source": 1,
"truncated": false
},
{
"destination": 4,
"line": "",
"source": 2,
"truncated": false
},
{
"commentIds": [
5
],
"destination": 5,
"line": "This log summarizes the changes in each released version of rouge. The versioning scheme",
"source": 3,
"truncated": false
},
{
"destination": 6,
"line": "we use is semver, although we will often release new lexers in minor versions, as a",
"source": 4,
"truncated": false
},
{
"destination": 7,
"line": "practical matter.",
"source": 5,
"truncated": false
},
{
"destination": 8,
"line": "",
"source": 6,
"truncated": false
},
{
"destination": 9,
"line": "## version TBD: (unreleased)",
"source": 7,
"truncated": false
},
{
"destination": 10,
"line": "",
"source": 8,
"truncated": false
},
{
"destination": 11,
"line": "* General",
"source": 9,
"truncated": false
},
{
"destination": 12,
"line": " * Load pastie theme ([#809](https://github.com/jneen/rouge/pull/809) by rramsden)",
"source": 10,
"truncated": false
}
],
"truncated": false,
"type": "CONTEXT"
}
],
"sourceLine": 1,
"sourceSpan": 10,
"truncated": false
}
],
"properties": {
"current": true,
"fromHash": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"toHash": "a4c2164330f2549f67c13f36a93884cf66e976be"
},
"source": null,
"truncated": false
},
"id": 12,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1529813304164,
"id": 4,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "Hello world",
"updatedDate": 1529813304164,
"version": 0
},
"commentAction": "ADDED",
"createdDate": 1529813304164,
"id": 11,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "MERGED",
"commit": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"authorTimestamp": 1529727872000,
"committer": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"committerTimestamp": 1529727872000,
"displayId": "839fa9a2d43",
"id": "839fa9a2d434eb697815b8fcafaecc51accfdbbc",
"message": "Merge pull request #1 in TEST/rouge from root/CHANGELOGmd-1529725646923 to master\n\n* commit '66fbe6a097803f0acb7342b19563f710657ce5a2':\n CHANGELOG.md edited online with Bitbucket",
"parents": [
{
"author": {
"emailAddress": "dblessing@users.noreply.github.com",
"name": "Drew Blessing"
},
"authorTimestamp": 1529604583000,
"committer": {
"emailAddress": "noreply@github.com",
"name": "GitHub"
},
"committerTimestamp": 1529604583000,
"displayId": "c5f4288162e",
"id": "c5f4288162e2e6218180779c7f6ac1735bb56eab",
"message": "Merge pull request #949 from jneen/dblessing-patch-1\n\nAdd 'obj-c', 'obj_c' as ObjectiveC aliases",
"parents": [
{
"displayId": "ea7675f741e",
"id": "ea7675f741ee28f3f177ff32a9bde192742ffc59"
},
{
"displayId": "386b95a977b",
"id": "386b95a977b331e267497aa5206861774656f0c5"
}
]
},
{
"author": {
"emailAddress": "test.user@example.com",
"name": "root"
},
"authorTimestamp": 1529725651000,
"committer": {
"emailAddress": "test.user@example.com",
"name": "root"
},
"committerTimestamp": 1529725651000,
"displayId": "66fbe6a0978",
"id": "66fbe6a097803f0acb7342b19563f710657ce5a2",
"message": "CHANGELOG.md edited online with Bitbucket",
"parents": [
{
"displayId": "c5f4288162e",
"id": "c5f4288162e2e6218180779c7f6ac1735bb56eab"
}
]
}
]
},
"createdDate": 1529727872302,
"id": 7,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [
{
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1529813297478,
"id": 3,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "This is a thread",
"updatedDate": 1529813297478,
"version": 0
}
],
"createdDate": 1529725692591,
"id": 2,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "What about this?",
"updatedDate": 1529725692591,
"version": 0
},
"commentAction": "ADDED",
"createdDate": 1529725692591,
"id": 6,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "COMMENTED",
"comment": {
"author": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
},
"comments": [],
"createdDate": 1529725685910,
"id": 1,
"permittedOperations": {
"deletable": true,
"editable": true
},
"properties": {
"repositoryId": 1
},
"tasks": [],
"text": "This is a test.\n\n[analyze.json](attachment:1/1f32f09d97%2Fanalyze.json)\n",
"updatedDate": 1529725685910,
"version": 0
},
"commentAction": "ADDED",
"createdDate": 1529725685910,
"id": 5,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
},
{
"action": "OPENED",
"createdDate": 1529725657542,
"id": 4,
"user": {
"active": true,
"displayName": "root",
"emailAddress": "test.user@example.com",
"id": 1,
"links": {
"self": [
{
"href": "http://localhost:7990/users/root"
}
]
},
"name": "root",
"slug": "root",
"type": "NORMAL"
}
}
]
}
{
"author":{
"approved":false,
"role":"AUTHOR",
"status":"UNAPPROVED",
"user":{
"active":true,
"displayName":"root",
"emailAddress":"joe.montana@49ers.com",
"id":1,
"links":{
"self":[
{
"href":"http://localhost:7990/users/root"
}
]
},
"name":"root",
"slug":"root",
"type":"NORMAL"
}
},
"closed":true,
"closedDate":1530600648850,
"createdDate":1530600635690,
"description":"Test",
"fromRef":{
"displayId":"root/CODE_OF_CONDUCTmd-1530600625006",
"id":"refs/heads/root/CODE_OF_CONDUCTmd-1530600625006",
"latestCommit":"074e2b4dddc5b99df1bf9d4a3f66cfc15481fdc8",
"repository":{
"forkable":true,
"id":1,
"links":{
"clone":[
{
"href":"http://root@localhost:7990/scm/test/rouge.git",
"name":"http"
},
{
"href":"ssh://git@localhost:7999/test/rouge.git",
"name":"ssh"
}
],
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
},
"name":"rouge",
"project":{
"description":"Test",
"id":1,
"key":"TEST",
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST"
}
]
},
"name":"test",
"public":false,
"type":"NORMAL"
},
"public":false,
"scmId":"git",
"slug":"rouge",
"state":"AVAILABLE",
"statusMessage":"Available"
}
},
"id":7,
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/pull-requests/7"
}
]
},
"locked":false,
"open":false,
"participants":[
],
"properties":{
"commentCount":1,
"openTaskCount":0,
"resolvedTaskCount":0
},
"reviewers":[
],
"state":"MERGED",
"title":"Added a new line",
"toRef":{
"displayId":"master",
"id":"refs/heads/master",
"latestCommit":"839fa9a2d434eb697815b8fcafaecc51accfdbbc",
"repository":{
"forkable":true,
"id":1,
"links":{
"clone":[
{
"href":"http://root@localhost:7990/scm/test/rouge.git",
"name":"http"
},
{
"href":"ssh://git@localhost:7999/test/rouge.git",
"name":"ssh"
}
],
"self":[
{
"href":"http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
},
"name":"rouge",
"project":{
"description":"Test",
"id":1,
"key":"TEST",
"links":{
"self":[
{
"href":"http://localhost:7990/projects/TEST"
}
]
},
"name":"test",
"public":false,
"type":"NORMAL"
},
"public":false,
"scmId":"git",
"slug":"rouge",
"state":"AVAILABLE",
"statusMessage":"Available"
}
},
"updatedDate":1530600648850,
"version":2
}
...@@ -31,6 +31,44 @@ describe NamespacesHelper do ...@@ -31,6 +31,44 @@ describe NamespacesHelper do
expect(options).to include(user.name) expect(options).to include(user.name)
end end
it 'avoids duplicate groups when extra_group is used' do
allow(helper).to receive(:current_user).and_return(admin)
options = helper.namespaces_options(user_group.id, display_path: true, extra_group: build(:group, name: admin_group.name))
expect(options.scan("data-name=\"#{admin_group.name}\"").count).to eq(1)
expect(options).to include(admin_group.name)
end
it 'selects existing group' do
allow(helper).to receive(:current_user).and_return(admin)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: user_group)
expect(options).to include("selected=\"selected\" value=\"#{user_group.id}\"")
expect(options).to include(admin_group.name)
end
it 'selects the new group by default' do
allow(helper).to receive(:current_user).and_return(user)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: 'new-group'))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
expect(options).to include("selected=\"selected\" value=\"-1\"")
end
it 'falls back to current user selection' do
allow(helper).to receive(:current_user).and_return(user)
options = helper.namespaces_options(:extra_group, display_path: true, extra_group: build(:group, name: admin_group.name))
expect(options).to include(user_group.name)
expect(options).not_to include(admin_group.name)
expect(options).to include("selected=\"selected\" value=\"#{user.namespace.id}\"")
end
it 'returns only groups if groups_only option is true' do it 'returns only groups if groups_only option is true' do
allow(helper).to receive(:current_user).and_return(user) allow(helper).to receive(:current_user).and_return(user)
......
require 'spec_helper'
describe BitbucketServer::Client do
let(:base_uri) { 'https://test:7990/stash/' }
let(:options) { { base_uri: base_uri, user: 'bitbucket', password: 'mypassword' } }
let(:project) { 'SOME-PROJECT' }
let(:repo_slug) { 'my-repo' }
let(:headers) { { "Content-Type" => "application/json" } }
subject { described_class.new(options) }
describe '#pull_requests' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}/pull-requests?state=ALL" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :pull_request)
subject.pull_requests(project, repo_slug)
end
it 'throws an exception when connection fails' do
allow(BitbucketServer::Collection).to receive(:new).and_raise(OpenSSL::SSL::SSLError)
expect { subject.pull_requests(project, repo_slug) }.to raise_error(described_class::ServerError)
end
end
describe '#activities' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}/pull-requests/1/activities" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :activity)
subject.activities(project, repo_slug, 1)
end
end
describe '#repo' do
let(:path) { "/projects/#{project}/repos/#{repo_slug}" }
let(:url) { "#{base_uri}rest/api/1.0/projects/SOME-PROJECT/repos/my-repo" }
it 'requests a specific repository' do
stub_request(:get, url).to_return(status: 200, headers: headers, body: '{}')
subject.repo(project, repo_slug)
expect(WebMock).to have_requested(:get, url)
end
end
describe '#repos' do
let(:path) { "/repos" }
it 'requests a collection' do
expect(BitbucketServer::Paginator).to receive(:new).with(anything, path, :repo)
subject.repos
end
end
describe '#create_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
let(:url) { "#{base_uri}rest/api/1.0/projects/SOME-PROJECT/repos/my-repo/branches" }
it 'requests Bitbucket to create a branch' do
stub_request(:post, url).to_return(status: 204, headers: headers, body: '{}')
subject.create_branch(project, repo_slug, branch, sha)
expect(WebMock).to have_requested(:post, url)
end
end
describe '#delete_branch' do
let(:branch) { 'test-branch' }
let(:sha) { '12345678' }
let(:url) { "#{base_uri}rest/branch-utils/1.0/projects/SOME-PROJECT/repos/my-repo/branches" }
it 'requests Bitbucket to create a branch' do
stub_request(:delete, url).to_return(status: 204, headers: headers, body: '{}')
subject.delete_branch(project, repo_slug, branch, sha)
expect(WebMock).to have_requested(:delete, url)
end
end
end
require 'spec_helper'
describe BitbucketServer::Connection do
let(:options) { { base_uri: 'https://test:7990', user: 'bitbucket', password: 'mypassword' } }
let(:payload) { { 'test' => 1 } }
let(:headers) { { "Content-Type" => "application/json" } }
let(:url) { 'https://test:7990/rest/api/1.0/test?something=1' }
subject { described_class.new(options) }
describe '#get' do
it 'returns JSON body' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.get(url, { something: 1 })).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
it 'throws an exception if the response is not JSON' do
WebMock.stub_request(:get, url).with(headers: { 'Accept' => 'application/json' }).to_return(body: 'bad data', status: 200, headers: headers)
expect { subject.get(url) }.to raise_error(described_class::ConnectionError)
end
end
describe '#post' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
it 'returns JSON body' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.post(url, payload)).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:post, url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.post(url, payload) }.to raise_error(described_class::ConnectionError)
end
end
describe '#delete' do
let(:headers) { { 'Accept' => 'application/json', 'Content-Type' => 'application/json' } }
context 'branch API' do
let(:branch_path) { '/projects/foo/repos/bar/branches' }
let(:branch_url) { 'https://test:7990/rest/branch-utils/1.0/projects/foo/repos/bar/branches' }
let(:path) { }
it 'returns JSON body' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
expect(subject.delete(:branches, branch_path, payload)).to eq(payload)
end
it 'throws an exception if the response is not 200' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 500, headers: headers)
expect { subject.delete(:branches, branch_path, payload) }.to raise_error(described_class::ConnectionError)
end
end
end
end
require 'spec_helper'
describe BitbucketServer::Page do
let(:response) { { 'values' => [{ 'description' => 'Test' }], 'isLastPage' => false, 'nextPageStart' => 2 } }
before do
# Autoloading hack
BitbucketServer::Representation::PullRequest.new({})
end
describe '#items' do
it 'returns collection of needed objects' do
page = described_class.new(response, :pull_request)
expect(page.items.first).to be_a(BitbucketServer::Representation::PullRequest)
expect(page.items.count).to eq(1)
end
end
describe '#attrs' do
it 'returns attributes' do
page = described_class.new(response, :pull_request)
expect(page.attrs.keys).to include(:isLastPage, :nextPageStart)
end
end
describe '#next?' do
it 'returns true' do
page = described_class.new(response, :pull_request)
expect(page.next?).to be_truthy
end
it 'returns false' do
response['isLastPage'] = true
response.delete('nextPageStart')
page = described_class.new(response, :pull_request)
expect(page.next?).to be_falsey
end
end
describe '#next' do
it 'returns next attribute' do
page = described_class.new(response, :pull_request)
expect(page.next).to eq(2)
end
end
end
require 'spec_helper'
describe BitbucketServer::Paginator do
let(:last_page) { double(:page, next?: false, items: ['item_2']) }
let(:first_page) { double(:page, next?: true, next: last_page, items: ['item_1']) }
let(:connection) { instance_double(BitbucketServer::Connection) }
describe '#items' do
let(:paginator) { described_class.new(connection, 'http://more-data', :pull_request) }
let(:page_attrs) { { 'isLastPage' => false, 'nextPageStart' => 1 } }
it 'returns items and raises StopIteration in the end' do
allow(paginator).to receive(:fetch_next_page).and_return(first_page)
expect(paginator.items).to match(['item_1'])
allow(paginator).to receive(:fetch_next_page).and_return(last_page)
expect(paginator.items).to match(['item_2'])
allow(paginator).to receive(:fetch_next_page).and_return(nil)
expect { paginator.items }.to raise_error(StopIteration)
end
it 'calls the connection with different offsets' do
expect(connection).to receive(:get).with('http://more-data', start: 0, limit: BitbucketServer::Paginator::PAGE_LENGTH).and_return(page_attrs)
expect(paginator.items).to eq([])
expect(connection).to receive(:get).with('http://more-data', start: 1, limit: BitbucketServer::Paginator::PAGE_LENGTH).and_return({})
expect(paginator.items).to eq([])
expect { paginator.items }.to raise_error(StopIteration)
end
end
end
require 'spec_helper'
describe BitbucketServer::Representation::Activity do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:inline_comment) { activities.first }
let(:comment) { activities[3] }
let(:merge_event) { activities[4] }
describe 'regular comment' do
subject { described_class.new(comment) }
it { expect(subject.comment?).to be_truthy }
it { expect(subject.inline_comment?).to be_falsey }
it { expect(subject.comment).to be_a(BitbucketServer::Representation::Comment) }
it { expect(subject.created_at).to be_a(Time) }
end
describe 'inline comment' do
subject { described_class.new(inline_comment) }
it { expect(subject.comment?).to be_truthy }
it { expect(subject.inline_comment?).to be_truthy }
it { expect(subject.comment).to be_a(BitbucketServer::Representation::PullRequestComment) }
it { expect(subject.created_at).to be_a(Time) }
end
describe 'merge event' do
subject { described_class.new(merge_event) }
it { expect(subject.comment?).to be_falsey }
it { expect(subject.inline_comment?).to be_falsey }
it { expect(subject.committer_user).to eq('root') }
it { expect(subject.committer_email).to eq('test.user@example.com') }
it { expect(subject.merge_timestamp).to be_a(Time) }
it { expect(subject.created_at).to be_a(Time) }
it { expect(subject.merge_commit).to eq('839fa9a2d434eb697815b8fcafaecc51accfdbbc') }
end
end
require 'spec_helper'
describe BitbucketServer::Representation::Comment do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:comment) { activities.first }
subject { described_class.new(comment) }
describe '#id' do
it { expect(subject.id).to eq(9) }
end
describe '#author_username' do
it { expect(subject.author_username).to eq('root' ) }
end
describe '#author_email' do
it { expect(subject.author_email).to eq('test.user@example.com' ) }
end
describe '#note' do
it { expect(subject.note).to eq('is this a new line?') }
end
describe '#created_at' do
it { expect(subject.created_at).to be_a(Time) }
end
describe '#updated_at' do
it { expect(subject.created_at).to be_a(Time) }
end
describe '#comments' do
it { expect(subject.comments.count).to eq(4) }
it { expect(subject.comments).to all( be_a(described_class) ) }
it { expect(subject.comments.map(&:note)).to match_array(["Hello world", "Ok", "hello", "hi"]) }
# The thread should look like:
#
# is this a new line? (subject)
# -> Hello world (first)
# -> Ok (third)
# -> Hi (fourth)
# -> hello (second)
it 'comments have the right parent' do
first, second, third, fourth = subject.comments[0..4]
expect(subject.parent_comment).to be_nil
expect(first.parent_comment).to eq(subject)
expect(second.parent_comment).to eq(subject)
expect(third.parent_comment).to eq(first)
expect(fourth.parent_comment).to eq(first)
end
end
end
require 'spec_helper'
describe BitbucketServer::Representation::PullRequestComment do
let(:activities) { JSON.parse(fixture_file('importers/bitbucket_server/activities.json'))['values'] }
let(:comment) { activities.second }
subject { described_class.new(comment) }
describe '#id' do
it { expect(subject.id).to eq(7) }
end
describe '#from_sha' do
it { expect(subject.from_sha).to eq('c5f4288162e2e6218180779c7f6ac1735bb56eab') }
end
describe '#to_sha' do
it { expect(subject.to_sha).to eq('a4c2164330f2549f67c13f36a93884cf66e976be') }
end
describe '#to?' do
it { expect(subject.to?).to be_falsey }
end
describe '#from?' do
it { expect(subject.from?).to be_truthy }
end
describe '#added?' do
it { expect(subject.added?).to be_falsey }
end
describe '#removed?' do
it { expect(subject.removed?).to be_falsey }
end
describe '#new_pos' do
it { expect(subject.new_pos).to eq(11) }
end
describe '#old_pos' do
it { expect(subject.old_pos).to eq(9) }
end
describe '#file_path' do
it { expect(subject.file_path).to eq('CHANGELOG.md') }
end
end
require 'spec_helper'
describe BitbucketServer::Representation::PullRequest do
let(:sample_data) { JSON.parse(fixture_file('importers/bitbucket_server/pull_request.json')) }
subject { described_class.new(sample_data) }
describe '#author' do
it { expect(subject.author).to eq('root') }
end
describe '#author_email' do
it { expect(subject.author_email).to eq('joe.montana@49ers.com') }
end
describe '#description' do
it { expect(subject.description).to eq('Test') }
end
describe '#iid' do
it { expect(subject.iid).to eq(7) }
end
describe '#state' do
it { expect(subject.state).to eq('merged') }
context 'declined pull requests' do
before do
sample_data['state'] = 'DECLINED'
end
it 'returns closed' do
expect(subject.state).to eq('closed')
end
end
context 'open pull requests' do
before do
sample_data['state'] = 'OPEN'
end
it 'returns open' do
expect(subject.state).to eq('opened')
end
end
end
describe '#merged?' do
it { expect(subject.merged?).to be_truthy }
end
describe '#created_at' do
it { expect(subject.created_at.to_i).to eq(sample_data['createdDate'] / 1000) }
end
describe '#updated_at' do
it { expect(subject.updated_at.to_i).to eq(sample_data['updatedDate'] / 1000) }
end
describe '#title' do
it { expect(subject.title).to eq('Added a new line') }
end
describe '#source_branch_name' do
it { expect(subject.source_branch_name).to eq('refs/heads/root/CODE_OF_CONDUCTmd-1530600625006') }
end
describe '#source_branch_sha' do
it { expect(subject.source_branch_sha).to eq('074e2b4dddc5b99df1bf9d4a3f66cfc15481fdc8') }
end
describe '#target_branch_name' do
it { expect(subject.target_branch_name).to eq('refs/heads/master') }
end
describe '#target_branch_sha' do
it { expect(subject.target_branch_sha).to eq('839fa9a2d434eb697815b8fcafaecc51accfdbbc') }
end
end
require 'spec_helper'
describe BitbucketServer::Representation::Repo do
let(:sample_data) do
<<~DATA
{
"slug": "rouge",
"id": 1,
"name": "rouge",
"scmId": "git",
"state": "AVAILABLE",
"statusMessage": "Available",
"forkable": true,
"project": {
"key": "TEST",
"id": 1,
"name": "test",
"description": "Test",
"public": false,
"type": "NORMAL",
"links": {
"self": [
{
"href": "http://localhost:7990/projects/TEST"
}
]
}
},
"public": false,
"links": {
"clone": [
{
"href": "http://root@localhost:7990/scm/test/rouge.git",
"name": "http"
},
{
"href": "ssh://git@localhost:7999/test/rouge.git",
"name": "ssh"
}
],
"self": [
{
"href": "http://localhost:7990/projects/TEST/repos/rouge/browse"
}
]
}
}
DATA
end
subject { described_class.new(JSON.parse(sample_data)) }
describe '#project_key' do
it { expect(subject.project_key).to eq('TEST') }
end
describe '#project_name' do
it { expect(subject.project_name).to eq('test') }
end
describe '#slug' do
it { expect(subject.slug).to eq('rouge') }
end
describe '#browse_url' do
it { expect(subject.browse_url).to eq('http://localhost:7990/projects/TEST/repos/rouge/browse') }
end
describe '#clone_url' do
it { expect(subject.clone_url).to eq('http://root@localhost:7990/scm/test/rouge.git') }
end
describe '#description' do
it { expect(subject.description).to eq('Test') }
end
describe '#full_name' do
it { expect(subject.full_name).to eq('test/rouge') }
end
end
require 'spec_helper'
describe Gitlab::BitbucketServerImport::Importer do
include ImportSpecHelper
let(:project) { create(:project, :repository, import_url: 'http://my-bitbucket') }
let(:now) { Time.now.utc.change(usec: 0) }
let(:project_key) { 'TEST' }
let(:repo_slug) { 'rouge' }
let(:sample) { RepoHelpers.sample_compare }
subject { described_class.new(project, recover_missing_commits: true) }
before do
data = project.create_or_update_import_data(
data: { project_key: project_key, repo_slug: repo_slug },
credentials: { base_uri: 'http://my-bitbucket', user: 'bitbucket', password: 'test' }
)
data.save
project.save
end
describe '#import_repository' do
before do
expect(subject).to receive(:import_pull_requests)
expect(subject).to receive(:delete_temp_branches)
end
it 'adds a remote' do
expect(project.repository).to receive(:fetch_as_mirror)
.with('http://bitbucket:test@my-bitbucket',
refmap: [:heads, :tags, '+refs/pull-requests/*/to:refs/merge-requests/*/head'],
remote_name: 'bitbucket_server')
subject.execute
end
end
describe '#import_pull_requests' do
before do
allow(subject).to receive(:import_repository)
allow(subject).to receive(:delete_temp_branches)
allow(subject).to receive(:restore_branches)
pull_request = instance_double(
BitbucketServer::Representation::PullRequest,
iid: 10,
source_branch_sha: sample.commits.last,
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
target_branch_sha: sample.commits.first,
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
author: 'Test Author',
author_email: project.owner.email,
created_at: Time.now,
updated_at: Time.now,
merged?: true)
allow(subject.client).to receive(:pull_requests).and_return([pull_request])
@merge_event = instance_double(
BitbucketServer::Representation::Activity,
comment?: false,
merge_event?: true,
committer_email: project.owner.email,
merge_timestamp: now,
merge_commit: '12345678'
)
@pr_note = instance_double(
BitbucketServer::Representation::Comment,
note: 'Hello world',
author_email: 'unknown@gmail.com',
author_username: 'The Flash',
comments: [],
created_at: now,
updated_at: now,
parent_comment: nil)
@pr_comment = instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: false,
merge_event?: false,
comment: @pr_note)
end
it 'imports merge event' do
expect(subject.client).to receive(:activities).and_return([@merge_event])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.metrics.merged_by).to eq(project.owner)
expect(merge_request.metrics.merged_at).to eq(@merge_event.merge_timestamp)
expect(merge_request.merge_commit_sha).to eq('12345678')
end
it 'imports comments' do
expect(subject.client).to receive(:activities).and_return([@pr_comment])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.notes.count).to eq(1)
note = merge_request.notes.first
expect(note.note).to end_with(@pr_note.note)
expect(note.author).to eq(project.owner)
expect(note.created_at).to eq(@pr_note.created_at)
expect(note.updated_at).to eq(@pr_note.created_at)
end
it 'imports threaded discussions' do
reply = instance_double(
BitbucketServer::Representation::PullRequestComment,
author_email: 'someuser@gitlab.com',
author_username: 'Batman',
note: 'I agree',
created_at: now,
updated_at: now)
# https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
inline_note = instance_double(
BitbucketServer::Representation::PullRequestComment,
file_type: 'ADDED',
from_sha: sample.commits.first,
to_sha: sample.commits.last,
file_path: '.gitmodules',
old_pos: nil,
new_pos: 4,
note: 'Hello world',
author_email: 'unknown@gmail.com',
author_username: 'Superman',
comments: [reply],
created_at: now,
updated_at: now,
parent_comment: nil)
allow(reply).to receive(:parent_comment).and_return(inline_note)
inline_comment = instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: true,
merge_event?: false,
comment: inline_note)
expect(subject.client).to receive(:activities).and_return([inline_comment])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.notes.count).to eq(2)
expect(merge_request.notes.map(&:discussion_id).uniq.count).to eq(1)
notes = merge_request.notes.order(:id).to_a
start_note = notes.first
expect(start_note.type).to eq('DiffNote')
expect(start_note.note).to end_with(inline_note.note)
expect(start_note.created_at).to eq(inline_note.created_at)
expect(start_note.updated_at).to eq(inline_note.updated_at)
expect(start_note.position.base_sha).to eq(inline_note.from_sha)
expect(start_note.position.start_sha).to eq(inline_note.from_sha)
expect(start_note.position.head_sha).to eq(inline_note.to_sha)
expect(start_note.position.old_line).to be_nil
expect(start_note.position.new_line).to eq(inline_note.new_pos)
reply_note = notes.last
# Make sure author and reply context is included
expect(reply_note.note).to start_with("*By #{reply.author_username} (#{reply.author_email})*\n\n")
expect(reply_note.note).to end_with("> #{inline_note.note}\n\n#{reply.note}")
expect(reply_note.author).to eq(project.owner)
expect(reply_note.created_at).to eq(reply.created_at)
expect(reply_note.updated_at).to eq(reply.created_at)
expect(reply_note.position.base_sha).to eq(inline_note.from_sha)
expect(reply_note.position.start_sha).to eq(inline_note.from_sha)
expect(reply_note.position.head_sha).to eq(inline_note.to_sha)
expect(reply_note.position.old_line).to be_nil
expect(reply_note.position.new_line).to eq(inline_note.new_pos)
end
it 'falls back to comments if diff comments fail to validate' do
reply = instance_double(
BitbucketServer::Representation::Comment,
author_email: 'someuser@gitlab.com',
author_username: 'Aquaman',
note: 'I agree',
created_at: now,
updated_at: now)
# https://gitlab.com/gitlab-org/gitlab-test/compare/c1acaa58bbcbc3eafe538cb8274ba387047b69f8...5937ac0a7beb003549fc5fd26fc247ad
inline_note = instance_double(
BitbucketServer::Representation::PullRequestComment,
file_type: 'REMOVED',
from_sha: sample.commits.first,
to_sha: sample.commits.last,
file_path: '.gitmodules',
old_pos: 8,
new_pos: 9,
note: 'This is a note with an invalid line position.',
author_email: project.owner.email,
author_username: 'Owner',
comments: [reply],
created_at: now,
updated_at: now,
parent_comment: nil)
inline_comment = instance_double(
BitbucketServer::Representation::Activity,
comment?: true,
inline_comment?: true,
merge_event?: false,
comment: inline_note)
allow(reply).to receive(:parent_comment).and_return(inline_note)
expect(subject.client).to receive(:activities).and_return([inline_comment])
expect { subject.execute }.to change { MergeRequest.count }.by(1)
merge_request = MergeRequest.first
expect(merge_request.notes.count).to eq(2)
notes = merge_request.notes
expect(notes.first.note).to start_with('*Comment on .gitmodules')
expect(notes.second.note).to start_with('*Comment on .gitmodules')
end
end
describe 'inaccessible branches' do
let(:id) { 10 }
let(:temp_branch_from) { "gitlab/import/pull-request/#{id}/from" }
let(:temp_branch_to) { "gitlab/import/pull-request/#{id}/to" }
before do
pull_request = instance_double(
BitbucketServer::Representation::PullRequest,
iid: id,
source_branch_sha: '12345678',
source_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.source_branch,
target_branch_sha: '98765432',
target_branch_name: Gitlab::Git::BRANCH_REF_PREFIX + sample.target_branch,
title: 'This is a title',
description: 'This is a test pull request',
state: 'merged',
author: 'Test Author',
author_email: project.owner.email,
created_at: Time.now,
updated_at: Time.now,
merged?: true)
expect(subject.client).to receive(:pull_requests).and_return([pull_request])
expect(subject.client).to receive(:activities).and_return([])
expect(subject).to receive(:import_repository).twice
end
it '#restore_branches' do
expect(subject).to receive(:restore_branches).and_call_original
expect(subject).to receive(:delete_temp_branches)
expect(subject.client).to receive(:create_branch)
.with(project_key, repo_slug,
temp_branch_from,
'12345678')
expect(subject.client).to receive(:create_branch)
.with(project_key, repo_slug,
temp_branch_to,
'98765432')
expect { subject.execute }.to change { MergeRequest.count }.by(1)
end
it '#delete_temp_branches' do
expect(subject.client).to receive(:create_branch).twice
expect(subject).to receive(:delete_temp_branches).and_call_original
expect(subject.client).to receive(:delete_branch)
.with(project_key, repo_slug,
temp_branch_from,
'12345678')
expect(subject.client).to receive(:delete_branch)
.with(project_key, repo_slug,
temp_branch_to,
'98765432')
expect(project.repository).to receive(:delete_branch).with(temp_branch_from)
expect(project.repository).to receive(:delete_branch).with(temp_branch_to)
expect { subject.execute }.to change { MergeRequest.count }.by(1)
end
end
end
...@@ -6,7 +6,8 @@ describe Gitlab::ImportSources do ...@@ -6,7 +6,8 @@ describe Gitlab::ImportSources do
expected = expected =
{ {
'GitHub' => 'github', 'GitHub' => 'github',
'Bitbucket' => 'bitbucket', 'Bitbucket Cloud' => 'bitbucket',
'Bitbucket Server' => 'bitbucket_server',
'GitLab.com' => 'gitlab', 'GitLab.com' => 'gitlab',
'Google Code' => 'google_code', 'Google Code' => 'google_code',
'FogBugz' => 'fogbugz', 'FogBugz' => 'fogbugz',
...@@ -26,6 +27,7 @@ describe Gitlab::ImportSources do ...@@ -26,6 +27,7 @@ describe Gitlab::ImportSources do
%w( %w(
github github
bitbucket bitbucket
bitbucket_server
gitlab gitlab
google_code google_code
fogbugz fogbugz
...@@ -45,6 +47,7 @@ describe Gitlab::ImportSources do ...@@ -45,6 +47,7 @@ describe Gitlab::ImportSources do
%w( %w(
github github
bitbucket bitbucket
bitbucket_server
gitlab gitlab
google_code google_code
fogbugz fogbugz
...@@ -60,6 +63,7 @@ describe Gitlab::ImportSources do ...@@ -60,6 +63,7 @@ describe Gitlab::ImportSources do
import_sources = { import_sources = {
'github' => Gitlab::GithubImport::ParallelImporter, 'github' => Gitlab::GithubImport::ParallelImporter,
'bitbucket' => Gitlab::BitbucketImport::Importer, 'bitbucket' => Gitlab::BitbucketImport::Importer,
'bitbucket_server' => Gitlab::BitbucketServerImport::Importer,
'gitlab' => Gitlab::GitlabImport::Importer, 'gitlab' => Gitlab::GitlabImport::Importer,
'google_code' => Gitlab::GoogleCodeImport::Importer, 'google_code' => Gitlab::GoogleCodeImport::Importer,
'fogbugz' => Gitlab::FogbugzImport::Importer, 'fogbugz' => Gitlab::FogbugzImport::Importer,
...@@ -79,7 +83,8 @@ describe Gitlab::ImportSources do ...@@ -79,7 +83,8 @@ describe Gitlab::ImportSources do
describe '.title' do describe '.title' do
import_sources = { import_sources = {
'github' => 'GitHub', 'github' => 'GitHub',
'bitbucket' => 'Bitbucket', 'bitbucket' => 'Bitbucket Cloud',
'bitbucket_server' => 'Bitbucket Server',
'gitlab' => 'GitLab.com', 'gitlab' => 'GitLab.com',
'google_code' => 'Google Code', 'google_code' => 'Google Code',
'fogbugz' => 'FogBugz', 'fogbugz' => 'FogBugz',
...@@ -97,7 +102,7 @@ describe Gitlab::ImportSources do ...@@ -97,7 +102,7 @@ describe Gitlab::ImportSources do
end end
describe 'imports_repository? checker' do describe 'imports_repository? checker' do
let(:allowed_importers) { %w[github gitlab_project] } let(:allowed_importers) { %w[github gitlab_project bitbucket_server] }
it 'fails if any importer other than the allowed ones implements this method' do it 'fails if any importer other than the allowed ones implements this method' do
current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) } current_importers = described_class.values.select { |kind| described_class.importer(kind).try(:imports_repository?) }
......
...@@ -114,6 +114,17 @@ describe Projects::CreateService, '#execute' do ...@@ -114,6 +114,17 @@ describe Projects::CreateService, '#execute' do
end end
end end
context 'import data' do
it 'stores import data and URL' do
import_data = { data: { 'test' => 'some data' } }
project = create_project(user, { name: 'test', import_url: 'http://import-url', import_data: import_data })
expect(project.import_data).to be_persisted
expect(project.import_data.data).to eq(import_data[:data])
expect(project.import_url).to eq('http://import-url')
end
end
context 'builds_enabled global setting' do context 'builds_enabled global setting' do
let(:project) { create_project(user, opts) } let(:project) { create_project(user, opts) }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment