Commit 91e530df authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'master' into feature/sm/35954-create-kubernetes-cluster-on-gke-from-k8s-service

parents f2932885 9560d5b8
/* eslint-disable func-names, space-before-function-paren, wrap-iife, prefer-arrow-callback, no-unused-vars, no-return-assign, max-len */
import { visitUrl } from './lib/utils/url_utility';
import { convertPermissionToBoolean } from './lib/utils/common_utils';
window.BuildArtifacts = (function() {
function BuildArtifacts() {
this.disablePropagation();
this.setupEntryClick();
this.setupTooltips();
}
BuildArtifacts.prototype.disablePropagation = function() {
......@@ -17,7 +20,26 @@ window.BuildArtifacts = (function() {
BuildArtifacts.prototype.setupEntryClick = function() {
return $('.tree-holder').on('click', 'tr[data-link]', function(e) {
return window.location = this.dataset.link;
visitUrl(this.dataset.link, convertPermissionToBoolean(this.dataset.externalLink));
});
};
BuildArtifacts.prototype.setupTooltips = function() {
$('.js-artifact-tree-tooltip').tooltip({
placement: 'bottom',
// Stop the tooltip from hiding when we stop hovering the element directly
// We handle all the showing/hiding below
trigger: 'manual',
});
// We want the tooltip to show if you hover anywhere on the row
// But be placed below and in the middle of the file name
$('.js-artifact-tree-row')
.on('mouseenter', (e) => {
$(e.currentTarget).find('.js-artifact-tree-tooltip').tooltip('show');
})
.on('mouseleave', (e) => {
$(e.currentTarget).find('.js-artifact-tree-tooltip').tooltip('hide');
});
};
......
......@@ -738,7 +738,7 @@ GitLabDropdown = (function() {
: selectedObject.id;
if (isInput) {
field = $(this.el);
} else if (value) {
} else if (value != null) {
field = this.dropdown.parent().find("input[name='" + fieldName + "'][value='" + value.toString().replace(/'/g, '\\\'') + "']");
}
......@@ -746,7 +746,7 @@ GitLabDropdown = (function() {
return;
}
if (el.hasClass(ACTIVE_CLASS)) {
if (el.hasClass(ACTIVE_CLASS) && value !== 0) {
isMarking = false;
el.removeClass(ACTIVE_CLASS);
if (field && field.length) {
......@@ -852,7 +852,7 @@ GitLabDropdown = (function() {
if (href && href !== '#') {
gl.utils.visitUrl(href);
} else {
$el.first().trigger('click');
$el.trigger('click');
}
}
};
......
/* eslint-disable func-names, space-before-function-paren, wrap-iife, no-var, no-param-reassign, no-cond-assign, one-var, one-var-declaration-per-line, no-void, guard-for-in, no-restricted-syntax, prefer-template, quotes, max-len */
var base;
var w = window;
if (w.gl == null) {
......@@ -86,6 +87,21 @@ w.gl.utils.getLocationHash = function(url) {
w.gl.utils.refreshCurrentPage = () => gl.utils.visitUrl(document.location.href);
w.gl.utils.visitUrl = (url) => {
// eslint-disable-next-line import/prefer-default-export
export function visitUrl(url, external = false) {
if (external) {
// Simulate `target="blank" rel="noopener noreferrer"`
// See https://mathiasbynens.github.io/rel-noopener/
const otherWindow = window.open();
otherWindow.opener = null;
otherWindow.location = url;
} else {
document.location.href = url;
}
}
window.gl = window.gl || {};
window.gl.utils = {
...(window.gl.utils || {}),
visitUrl,
};
import _ from 'underscore';
import ProtectedBranchAccessDropdown from './protected_branch_access_dropdown';
import ProtectedBranchDropdown from './protected_branch_dropdown';
import AccessorUtilities from '../lib/utils/accessor';
const PB_LOCAL_STORAGE_KEY = 'protected-branches-defaults';
export default class ProtectedBranchCreate {
constructor() {
this.$form = $('.js-new-protected-branch');
this.isLocalStorageAvailable = AccessorUtilities.isLocalStorageAccessSafe();
this.currentProjectUserDefaults = {};
this.buildDropdowns();
}
buildDropdowns() {
const $allowedToMergeDropdown = this.$form.find('.js-allowed-to-merge');
const $allowedToPushDropdown = this.$form.find('.js-allowed-to-push');
const $protectedBranchDropdown = this.$form.find('.js-protected-branch-select');
// Cache callback
this.onSelectCallback = this.onSelect.bind(this);
......@@ -28,15 +35,13 @@ export default class ProtectedBranchCreate {
onSelect: this.onSelectCallback,
});
// Select default
$allowedToPushDropdown.data('glDropdown').selectRowAtIndex(0);
$allowedToMergeDropdown.data('glDropdown').selectRowAtIndex(0);
// Protected branch dropdown
this.protectedBranchDropdown = new ProtectedBranchDropdown({
$dropdown: this.$form.find('.js-protected-branch-select'),
$dropdown: $protectedBranchDropdown,
onSelect: this.onSelectCallback,
});
this.loadPreviousSelection($allowedToMergeDropdown.data('glDropdown'), $allowedToPushDropdown.data('glDropdown'));
}
// This will run after clicked callback
......@@ -45,7 +50,41 @@ export default class ProtectedBranchCreate {
const $branchInput = this.$form.find('input[name="protected_branch[name]"]');
const $allowedToMergeInput = this.$form.find('input[name="protected_branch[merge_access_levels_attributes][0][access_level]"]');
const $allowedToPushInput = this.$form.find('input[name="protected_branch[push_access_levels_attributes][0][access_level]"]');
const completedForm = !(
$branchInput.val() &&
$allowedToMergeInput.length &&
$allowedToPushInput.length
);
this.savePreviousSelection($allowedToMergeInput.val(), $allowedToPushInput.val());
this.$form.find('input[type="submit"]').attr('disabled', completedForm);
}
loadPreviousSelection(mergeDropdown, pushDropdown) {
let mergeIndex = 0;
let pushIndex = 0;
if (this.isLocalStorageAvailable) {
const savedDefaults = JSON.parse(window.localStorage.getItem(PB_LOCAL_STORAGE_KEY));
if (savedDefaults != null) {
mergeIndex = _.findLastIndex(mergeDropdown.fullData.roles, {
id: parseInt(savedDefaults.mergeSelection, 0),
});
pushIndex = _.findLastIndex(pushDropdown.fullData.roles, {
id: parseInt(savedDefaults.pushSelection, 0),
});
}
}
mergeDropdown.selectRowAtIndex(mergeIndex);
pushDropdown.selectRowAtIndex(pushIndex);
}
this.$form.find('input[type="submit"]').attr('disabled', !($branchInput.val() && $allowedToMergeInput.length && $allowedToPushInput.length));
savePreviousSelection(mergeSelection, pushSelection) {
if (this.isLocalStorageAvailable) {
const branchDefaults = {
mergeSelection,
pushSelection,
};
window.localStorage.setItem(PB_LOCAL_STORAGE_KEY, JSON.stringify(branchDefaults));
}
}
}
......@@ -67,7 +67,7 @@ export default {
return defaultClass;
},
iconClass() {
if (this.status === 'failed' || !this.commitMessage.length || !this.isMergeAllowed() || this.mr.preventMerge) {
if (this.status === 'failed' || !this.commitMessage.length || !this.mr.isMergeAllowed || this.mr.preventMerge) {
return 'failed';
}
return 'success';
......@@ -100,13 +100,8 @@ export default {
},
},
methods: {
isMergeAllowed() {
return !this.mr.onlyAllowMergeIfPipelineSucceeds ||
this.mr.isPipelinePassing ||
this.mr.isPipelineSkipped;
},
shouldShowMergeControls() {
return this.isMergeAllowed() || this.shouldShowMergeWhenPipelineSucceedsText;
return this.mr.isMergeAllowed || this.shouldShowMergeWhenPipelineSucceedsText;
},
updateCommitMessage() {
const cmwd = this.mr.commitMessageWithDescription;
......
......@@ -73,6 +73,7 @@ export default class MergeRequestStore {
this.canCancelAutomaticMerge = !!data.cancel_merge_when_pipeline_succeeds_path;
this.hasSHAChanged = this.sha !== data.diff_head_sha;
this.canBeMerged = data.can_be_merged || false;
this.isMergeAllowed = data.mergeable || false;
this.mergeOngoing = data.merge_ongoing;
// Cherry-pick and Revert actions related
......
......@@ -169,6 +169,14 @@
}
}
.tree-item-file-external-link {
margin-right: 4px;
span {
text-decoration: inherit;
}
}
.tree_commit {
max-width: 320px;
......
......@@ -7,9 +7,8 @@ class Dashboard::TodosController < Dashboard::ApplicationController
def index
@sort = params[:sort]
@todos = @todos.page(params[:page])
if @todos.out_of_range? && @todos.total_pages != 0
redirect_to url_for(params.merge(page: @todos.total_pages, only_path: true))
end
return if redirect_out_of_range(@todos)
end
def destroy
......@@ -60,7 +59,7 @@ class Dashboard::TodosController < Dashboard::ApplicationController
end
def find_todos
@todos ||= TodosFinder.new(current_user, params).execute
@todos ||= TodosFinder.new(current_user, todo_params).execute
end
def todos_counts
......@@ -69,4 +68,27 @@ class Dashboard::TodosController < Dashboard::ApplicationController
done_count: number_with_delimiter(current_user.todos_done_count)
}
end
def todo_params
params.permit(:action_id, :author_id, :project_id, :type, :sort, :state)
end
def redirect_out_of_range(todos)
total_pages =
if todo_params.except(:sort, :page).empty?
(current_user.todos_pending_count / todos.limit_value).ceil
else
todos.total_pages
end
return false if total_pages.zero?
out_of_range = todos.current_page > total_pages
if out_of_range
redirect_to url_for(params.merge(page: total_pages, only_path: true))
end
out_of_range
end
end
......@@ -29,6 +29,9 @@ class Projects::ArtifactsController < Projects::ApplicationController
blob = @entry.blob
conditionally_expand_blob(blob)
if blob.external_link?(build)
redirect_to blob.external_url(@project, build)
else
respond_to do |format|
format.html do
render 'file'
......@@ -39,6 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end
end
end
end
def raw
path = Gitlab::Ci::Build::Artifacts::Path
......
......@@ -13,7 +13,7 @@ class SessionsController < Devise::SessionsController
before_action :auto_sign_in_with_provider, only: [:new]
before_action :load_recaptcha
after_action :log_failed_login, only: [:new]
after_action :log_failed_login, only: [:new], if: :failed_login?
def new
set_minimum_password_length
......@@ -46,8 +46,6 @@ class SessionsController < Devise::SessionsController
private
def log_failed_login
return unless failed_login?
Gitlab::AppLogger.info("Failed Login: username=#{user_params[:login]} ip=#{request.remote_ip}")
end
......
......@@ -2,6 +2,8 @@ module Ci
class ArtifactBlob
include BlobLike
EXTENTIONS_SERVED_BY_PAGES = %w[.html .htm .txt .json].freeze
attr_reader :entry
def initialize(entry)
......@@ -17,6 +19,7 @@ module Ci
def size
entry.metadata[:size]
end
alias_method :external_size, :size
def data
"Build artifact #{path}"
......@@ -30,6 +33,27 @@ module Ci
:build_artifact
end
alias_method :external_size, :size
def external_url(project, job)
return unless external_link?(job)
components = project.full_path_components
components << "-/jobs/#{job.id}/artifacts/file/#{path}"
artifact_path = components[1..-1].join('/')
"#{pages_config.protocol}://#{components[0]}.#{pages_config.host}/#{artifact_path}"
end
def external_link?(job)
pages_config.enabled &&
pages_config.artifacts_server &&
EXTENTIONS_SERVED_BY_PAGES.include?(File.extname(name)) &&
job.project.public?
end
private
def pages_config
Gitlab.config.pages
end
end
end
module RepositoryMirroring
def set_remote_as_mirror(name)
config = raw_repository.rugged.config
IMPORT_HEAD_REFS = '+refs/heads/*:refs/heads/*'.freeze
IMPORT_TAG_REFS = '+refs/tags/*:refs/tags/*'.freeze
def set_remote_as_mirror(name)
# This is used to define repository as equivalent as "git clone --mirror"
config["remote.#{name}.fetch"] = 'refs/*:refs/*'
config["remote.#{name}.mirror"] = true
config["remote.#{name}.prune"] = true
raw_repository.rugged.config["remote.#{name}.fetch"] = 'refs/*:refs/*'
raw_repository.rugged.config["remote.#{name}.mirror"] = true
raw_repository.rugged.config["remote.#{name}.prune"] = true
end
def set_import_remote_as_mirror(remote_name)
# Add first fetch with Rugged so it does not create its own.
raw_repository.rugged.config["remote.#{remote_name}.fetch"] = IMPORT_HEAD_REFS
add_remote_fetch_config(remote_name, IMPORT_TAG_REFS)
raw_repository.rugged.config["remote.#{remote_name}.mirror"] = true
raw_repository.rugged.config["remote.#{remote_name}.prune"] = true
end
def add_remote_fetch_config(remote_name, refspec)
run_git(%W[config --add remote.#{remote_name}.fetch #{refspec}])
end
def fetch_mirror(remote, url)
......
......@@ -106,6 +106,10 @@ module Routable
RequestStore[full_path_key] ||= uncached_full_path
end
def full_path_components
full_path.split('/')
end
def expires_full_path_cache
RequestStore.delete(full_path_key) if RequestStore.active?
@full_path = nil
......
......@@ -415,6 +415,8 @@ class MergeRequest < ActiveRecord::Base
end
def create_merge_request_diff
fetch_ref
# n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37435
Gitlab::GitalyClient.allow_n_plus_1_calls do
merge_request_diffs.create
......@@ -462,6 +464,7 @@ class MergeRequest < ActiveRecord::Base
return unless open?
old_diff_refs = self.diff_refs
create_merge_request_diff
MergeRequests::MergeRequestDiffCacheService.new.execute(self)
new_diff_refs = self.diff_refs
......
......@@ -55,7 +55,6 @@ class MergeRequestDiff < ActiveRecord::Base
end
def ensure_commit_shas
merge_request.fetch_ref
self.start_commit_sha ||= merge_request.target_branch_sha
self.head_commit_sha ||= merge_request.source_branch_sha
self.base_commit_sha ||= find_base_sha
......
......@@ -1035,6 +1035,8 @@ class Project < ActiveRecord::Base
end
true
rescue GRPC::Internal # if the path is too long
false
end
def create_repository(force: false)
......
......@@ -42,6 +42,7 @@ class MergeRequestEntity < IssuableEntity
expose :commits_count
expose :cannot_be_merged?, as: :has_conflicts
expose :can_be_merged?, as: :can_be_merged
expose :mergeable?, as: :mergeable
expose :remove_source_branch?, as: :remove_source_branch
expose :project_archived do |merge_request|
......
- blob = file.blob
- path_to_file = file_project_job_artifacts_path(@project, @build, path: file.path)
- external_link = blob.external_link?(@build)
%tr.tree-item{ 'data-link' => path_to_file }
- blob = file.blob
%tr.tree-item.js-artifact-tree-row{ data: { link: path_to_file, external_link: "#{external_link}" } }
%td.tree-item-file-name
= tree_icon('file', blob.mode, blob.name)
- if external_link
= link_to path_to_file, class: 'tree-item-file-external-link js-artifact-tree-tooltip',
target: '_blank', rel: 'noopener noreferrer', title: _('Opens in a new window') do
%span.str-truncated>= blob.name
= icon('external-link', class: 'js-artifact-tree-external-icon')
- else
= link_to path_to_file do
%span.str-truncated= blob.name
%td
......
---
title: Added defaults for protected branches dropdowns on the repository settings
merge_request: 14278
author:
type: changed
---
title: Add online view of HTML artifacts for public projects
merge_request: 14399
author:
type: added
---
title: Allow merge in MR widget with no pipeline but using "Only allow merge requests
to be merged if the pipeline succeeds"
merge_request: 14633
author:
type: fixed
---
title: Improve GitHub import performance
merge_request: 14445
author:
type: other
---
title: Remove a SQL query from the todos index page
merge_request:
author:
type: other
......@@ -164,6 +164,7 @@ production: &base
host: example.com
port: 80 # Set to 443 if you serve the pages with HTTPS
https: false # Set to true if you serve the pages with HTTPS
artifacts_server: true
# external_http: ["1.1.1.1:80", "[2001::1]:80"] # If defined, enables custom domain support in GitLab Pages
# external_https: ["1.1.1.1:443", "[2001::1]:443"] # If defined, enables custom domain and certificate support in GitLab Pages
......
......@@ -325,6 +325,7 @@ Settings.pages['protocol'] ||= Settings.pages.https ? "https" : "http"
Settings.pages['url'] ||= Settings.__send__(:build_pages_url)
Settings.pages['external_http'] ||= false unless Settings.pages['external_http'].present?
Settings.pages['external_https'] ||= false unless Settings.pages['external_https'].present?
Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pages['artifacts_server'].nil?
#
# Git LFS
......
......@@ -42,6 +42,37 @@ By default, tooltips should be placed below the referring element. However, if t
---
## Popovers
Popovers provide additional, useful, unique information about the referring elements and can provide one or multiple actionable elements. They inform the user of additional information within the context of their original view, but without forcing the user to act upon it like a modal. Popovers are different from tooltips, which do not provide rich markup and actionable items. A popover can contain a header section with a different background color.
Popovers are summoned:
* Upon hover or touch on an element
### Usage
A popover should be used:
* When you don't want to let the user lose context, but still want to provide additional useful unique information about referring elements
* When it isn’t critical for the user to act upon the information
* When you want to give a user a summary of extended information and the option to switch context if they want to dive in deeper.
### Styling
A popover can contain a header section with a different background color if that improves readability and separation of content within.
![Popover usage](img/popover-placement-below.png)
This example shows two sections, where each section includes an actionable element. The first section shows a summary of the content shown when clicking the "read more" link. With this information the user can decide to dive deeper or start their GitLab Enterprise Edition trial immediately.
### Placement
By default, tooltips should be placed below the referring element. However, if there isn’t enough space in the viewport or it blocks related content, the tooltip should be moved to the side or above as needed.
![Tooltip placement location](img/popover-placement-above.png)
In this example we let the user know more about the setting they are deciding over, without loosing context. If they want to know even more they can do so, but with the expectation of opening that content in a new view.
---
## Anchor links
Anchor links are used for navigational actions and lone, secondary commands (such as 'Reset filters' on the Issues List) when deemed appropriate by the UX team.
......
......@@ -50,6 +50,10 @@ For more examples on artifacts, follow the [artifacts reference in
With GitLab 9.2, PDFs, images, videos and other formats can be previewed
directly in the job artifacts browser without the need to download them.
>**Note:**
With [GitLab 10.1][ce-14399], HTML files in a public project can be previewed
directly in a new tab without the need to download them.
After a job finishes, if you visit the job's specific page, there are three
buttons. You can download the artifacts archive or browse its contents, whereas
the **Keep** button appears only if you have set an [expiry date] to the
......@@ -64,7 +68,8 @@ archive. If your artifacts contained directories, then you are also able to
browse inside them.
Below you can see how browsing looks like. In this case we have browsed inside
the archive and at this point there is one directory and one HTML file.
the archive and at this point there is one directory, a couple files, and
one HTML file that you can view directly online (opens in a new tab).
![Job artifacts browser](img/job_artifacts_browser.png)
......@@ -158,3 +163,4 @@ information in the UI.
[expiry date]: ../../../ci/yaml/README.md#artifacts-expire_in
[ce-14399]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/14399
......@@ -9,7 +9,7 @@ module Github
include Gitlab::ShellAdapter
attr_reader :project, :repository, :repo, :repo_url, :wiki_url,
:options, :errors, :cached, :verbose
:options, :errors, :cached, :verbose, :last_fetched_at
def initialize(project, options = {})
@project = project
......@@ -21,12 +21,13 @@ module Github
@verbose = options.fetch(:verbose, false)
@cached = Hash.new { |hash, key| hash[key] = Hash.new }
@errors = []
@last_fetched_at = nil
end
# rubocop: disable Rails/Output
def execute
puts 'Fetching repository...'.color(:aqua) if verbose
fetch_repository
setup_and_fetch_repository
puts 'Fetching labels...'.color(:aqua) if verbose
fetch_labels
puts 'Fetching milestones...'.color(:aqua) if verbose
......@@ -42,7 +43,7 @@ module Github
puts 'Expiring repository cache...'.color(:aqua) if verbose
expire_repository_cache
true
errors.empty?
rescue Github::RepositoryFetchError
expire_repository_cache
false
......@@ -52,18 +53,24 @@ module Github
private
def fetch_repository
def setup_and_fetch_repository
begin
project.ensure_repository
project.repository.add_remote('github', repo_url)
project.repository.set_remote_as_mirror('github')
project.repository.fetch_remote('github', forced: true)
project.repository.set_import_remote_as_mirror('github')
project.repository.add_remote_fetch_config('github', '+refs/pull/*/head:refs/merge-requests/*/head')
fetch_remote(forced: true)
rescue Gitlab::Git::Repository::NoRepository, Gitlab::Shell::Error => e
error(:project, repo_url, e.message)
raise Github::RepositoryFetchError
end
end
def fetch_remote(forced: false)
@last_fetched_at = Time.now
project.repository.fetch_remote('github', forced: forced)
end
def fetch_wiki_repository
return if project.wiki.repository_exists?
......@@ -92,7 +99,7 @@ module Github
label.color = representation.color
end
cached[:label_ids][label.title] = label.id
cached[:label_ids][representation.title] = label.id
rescue => e
error(:label, representation.url, e.message)
end
......@@ -143,7 +150,9 @@ module Github
next unless merge_request.new_record? && pull_request.valid?
begin
pull_request.restore_branches!
# If the PR has been created/updated after we last fetched the
# remote, we fetch again to get the up-to-date refs.
fetch_remote if pull_request.updated_at > last_fetched_at
author_id = user_id(pull_request.author, project.creator_id)
description = format_description(pull_request.description, pull_request.author)
......@@ -152,6 +161,7 @@ module Github
iid: pull_request.iid,
title: pull_request.title,
description: description,
ref_fetched: true,
source_project: pull_request.source_project,
source_branch: pull_request.source_branch_name,
source_branch_sha: pull_request.source_branch_sha,
......@@ -173,8 +183,6 @@ module Github
fetch_comments(merge_request, :review_comment, review_comments_url, LegacyDiffNote)
rescue => e
error(:pull_request, pull_request.url, e.message)
ensure
pull_request.remove_restored_branches!
end
end
......@@ -203,11 +211,11 @@ module Github
# for both features, like manipulating assignees, labels
# and milestones, are provided within the Issues API.
if representation.pull_request?
return unless representation.has_labels? || representation.has_comments?
return unless representation.labels? || representation.comments?
merge_request = MergeRequest.find_by!(target_project_id: project.id, iid: representation.iid)
if representation.has_labels?
if representation.labels?
merge_request.update_attribute(:label_ids, label_ids(representation.labels))
end
......@@ -222,14 +230,16 @@ module Github
issue.title = representation.title
issue.description = format_description(representation.description, representation.author)
issue.state = representation.state
issue.label_ids = label_ids(representation.labels)
issue.milestone_id = milestone_id(representation.milestone)
issue.author_id = author_id
issue.assignee_ids = [user_id(representation.assignee)]
issue.created_at = representation.created_at
issue.updated_at = representation.updated_at
issue.save!(validate: false)
issue.update(
label_ids: label_ids(representation.labels),
assignee_ids: assignee_ids(representation.assignees))
fetch_comments_conditionally(issue, representation)
end
rescue => e
......@@ -238,7 +248,7 @@ module Github
end
def fetch_comments_conditionally(issuable, representation)
if representation.has_comments?
if representation.comments?
comments_url = "/repos/#{repo}/issues/#{issuable.iid}/comments"
fetch_comments(issuable, :comment, comments_url)
end
......@@ -302,7 +312,11 @@ module Github
end
def label_ids(labels)
labels.map { |attrs| cached[:label_ids][attrs.fetch('name')] }.compact
labels.map { |label| cached[:label_ids][label.title] }.compact
end
def assignee_ids(assignees)
assignees.map { |assignee| user_id(assignee) }.compact
end
def milestone_id(milestone)
......
......@@ -7,10 +7,14 @@ module Github
raw.dig('user', 'login') || 'unknown'
end
def repo?
raw['repo'].present?
end
def repo
return @repo if defined?(@repo)
return unless repo?
@repo = Github::Representation::Repo.new(raw['repo']) if raw['repo'].present?
@repo ||= Github::Representation::Repo.new(raw['repo'])
end
def ref
......@@ -25,10 +29,6 @@ module Github
Commit.truncate_sha(sha)
end
def exists?
@exists ||= branch_exists? && commit_exists?
end
def valid?
sha.present? && ref.present?
end
......@@ -47,14 +47,6 @@ module Github
private
def branch_exists?
repository.branch_exists?(ref)
end
def commit_exists?
repository.branch_names_contains(sha).include?(ref)
end
def repository
@repository ||= options.fetch(:repository)
end
......
......@@ -23,14 +23,14 @@ module Github
@author ||= Github::Representation::User.new(raw['user'], options)
end
def assignee
return unless assigned?
@assignee ||= Github::Representation::User.new(raw['assignee'], options)
def labels?
raw['labels'].any?
end
def assigned?
raw['assignee'].present?
def labels
@labels ||= Array(raw['labels']).map do |label|
Github::Representation::Label.new(label, options)
end
end
end
end
......
module Github
module Representation
class Issue < Representation::Issuable
def labels
raw['labels']
end
def state
raw['state'] == 'closed' ? 'closed' : 'opened'
end
def has_comments?
def comments?
raw['comments'] > 0
end
def has_labels?
labels.count > 0
end
def pull_request?
raw['pull_request'].present?
end
def assigned?
raw['assignees'].present?
end
def assignees
@assignees ||= Array(raw['assignees']).map do |user|
Github::Representation::User.new(user, options)
end
end
end
end
end
module Github
module Representation
class PullRequest < Representation::Issuable
delegate :user, :repo, :ref, :sha, to: :source_branch, prefix: true
delegate :user, :exists?, :repo, :ref, :sha, :short_sha, to: :target_branch, prefix: true
delegate :sha, to: :source_branch, prefix: true
delegate :sha, to: :target_branch, prefix: true
def source_project
project
end
def source_branch_name
@source_branch_name ||=
if cross_project? || !source_branch_exists?
source_branch_name_prefixed
else
source_branch_ref
end
end
def source_branch_exists?
return @source_branch_exists if defined?(@source_branch_exists)
@source_branch_exists = !cross_project? && source_branch.exists?
# Mimic the "user:branch" displayed in the MR widget,
# i.e. "Request to merge rymai:add-external-mounts into master"
cross_project? ? "#{source_branch.user}:#{source_branch.ref}" : source_branch.ref
end
def target_project
......@@ -28,11 +19,7 @@ module Github
end
def target_branch_name
@target_branch_name ||= target_branch_exists? ? target_branch_ref : target_branch_name_prefixed
end
def target_branch_exists?
@target_branch_exists ||= target_branch.exists?
target_branch.ref
end
def state
......@@ -50,16 +37,14 @@ module Github
source_branch.valid? && target_branch.valid?
end
def restore_branches!
restore_source_branch!
restore_target_branch!
def assigned?
raw['assignee'].present?
end
def remove_restored_branches!
return if opened?
def assignee
return unless assigned?
remove_source_branch!
remove_target_branch!
@assignee ||= Github::Representation::User.new(raw['assignee'], options)
end
private
......@@ -72,48 +57,14 @@ module Github
@source_branch ||= Representation::Branch.new(raw['head'], repository: project.repository)
end
def source_branch_name_prefixed
"gh-#{target_branch_short_sha}/#{iid}/#{source_branch_user}/#{source_branch_ref}"
end
def target_branch
@target_branch ||= Representation::Branch.new(raw['base'], repository: project.repository)
end
def target_branch_name_prefixed
"gl-#{target_branch_short_sha}/#{iid}/#{target_branch_user}/#{target_branch_ref}"
end
def cross_project?
return true if source_branch_repo.nil?
source_branch_repo.id != target_branch_repo.id
end
def restore_source_branch!
return if source_branch_exists?
source_branch.restore!(source_branch_name)
end
def restore_target_branch!
return if target_branch_exists?
target_branch.restore!(target_branch_name)
end
def remove_source_branch!
# We should remove the source/target branches only if they were
# restored. Otherwise, we'll remove branches like 'master' that
# target_branch_exists? returns true. In other words, we need
# to clean up only the restored branches that (source|target)_branch_exists?
# returns false for the first time it has been called, because of
# this that is important to memoize these values.
source_branch.remove!(source_branch_name) unless source_branch_exists?
end
return true unless source_branch.repo?
def remove_target_branch!
target_branch.remove!(target_branch_name) unless target_branch_exists?
source_branch.repo.id != target_branch.repo.id
end
end
end
......
......@@ -86,7 +86,7 @@ module Gitlab
user_name: user.name,
user_username: user.username,
user_email: user.email,
user_avatar: user.avatar_url,
user_avatar: user.avatar_url(only_path: false),
project_id: project.id,
project: project.hook_attrs,
commits: commit_attrs,
......
......@@ -1112,6 +1112,8 @@ module Gitlab
raise NoRepository.new(e)
rescue GRPC::BadStatus => e
raise CommandError.new(e)
rescue GRPC::InvalidArgument => e
raise ArgumentError.new(e)
end
private
......
module Gitlab
module GitalyClient
class NamespaceService
def initialize(storage)
@storage = storage
end
def exists?(name)
request = Gitaly::NamespaceExistsRequest.new(storage_name: @storage, name: name)
gitaly_client_call(:namespace_exists, request).exists
end
def add(name)
request = Gitaly::AddNamespaceRequest.new(storage_name: @storage, name: name)
gitaly_client_call(:add_namespace, request)
end
def remove(name)
request = Gitaly::RemoveNamespaceRequest.new(storage_name: @storage, name: name)
gitaly_client_call(:remove_namespace, request)
end
def rename(from, to)
request = Gitaly::RenameNamespaceRequest.new(storage_name: @storage, from: from, to: to)
gitaly_client_call(:rename_namespace, request)
end
private
def gitaly_client_call(type, request)
GitalyClient.call(@storage, :namespace_service, type, request)
end
end
end
end
......@@ -222,10 +222,18 @@ module Gitlab
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def add_namespace(storage, name)
Gitlab::GitalyClient.migrate(:add_namespace) do |enabled|
if enabled
gitaly_namespace_client(storage).add(name)
else
path = full_path(storage, name)
FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name)
end
end
rescue Errno::EEXIST => e
Rails.logger.warn("Directory exists as a file: #{e} at: #{path}")
rescue GRPC::InvalidArgument => e
raise ArgumentError, e.message
end
# Remove directory from repositories storage
......@@ -236,8 +244,16 @@ module Gitlab
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def rm_namespace(storage, name)
Gitlab::GitalyClient.migrate(:remove_namespace) do |enabled|
if enabled
gitaly_namespace_client(storage).remove(name)
else
FileUtils.rm_r(full_path(storage, name), force: true)
end
end
rescue GRPC::InvalidArgument => e
raise ArgumentError, e.message
end
# Move namespace directory inside repositories storage
#
......@@ -246,10 +262,18 @@ module Gitlab
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def mv_namespace(storage, old_name, new_name)
Gitlab::GitalyClient.migrate(:rename_namespace) do |enabled|
if enabled
gitaly_namespace_client(storage).rename(old_name, new_name)
else
return false if exists?(storage, new_name) || !exists?(storage, old_name)
FileUtils.mv(full_path(storage, old_name), full_path(storage, new_name))
end
end
rescue GRPC::InvalidArgument
false
end
def url_to_repo(path)
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{path}.git"
......@@ -272,8 +296,14 @@ module Gitlab
#
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def exists?(storage, dir_name)
Gitlab::GitalyClient.migrate(:namespace_exists) do |enabled|
if enabled
gitaly_namespace_client(storage).exists?(dir_name)
else
File.exist?(full_path(storage, dir_name))
end
end
end
protected
......@@ -349,6 +379,14 @@ module Gitlab
Bundler.with_original_env { Popen.popen(cmd, nil, vars) }
end
def gitaly_namespace_client(storage_path)
storage, _value = Gitlab.config.repositories.storages.find do |storage, value|
value['path'] == storage_path
end
Gitlab::GitalyClient::NamespaceService.new(storage)
end
def gitaly_migrate(method, &block)
Gitlab::GitalyClient.migrate(method, &block)
rescue GRPC::NotFound, GRPC::BadStatus => e
......
......@@ -50,6 +50,8 @@ namespace :gitlab do
# only generate a configuration for the most common and simplest case: when
# we have exactly one Gitaly process and we are sure it is running locally
# because it uses a Unix socket.
# For development and testing purposes, an extra storage is added to gitaly,
# which is not known to Rails, but must be explicitly stubbed.
def gitaly_configuration_toml(gitaly_ruby: true)
storages = []
address = nil
......@@ -67,6 +69,11 @@ namespace :gitlab do
storages << { name: key, path: val['path'] }
end
if Rails.env.test?
storages << { name: 'test_second_storage', path: Rails.root.join('tmp', 'tests', 'second_storage').to_s }
end
config = { socket_path: address.sub(%r{\Aunix:}, ''), storage: storages }
config[:auth] = { token: 'secret' } if Rails.env.test?
config[:'gitaly-ruby'] = { dir: File.join(Dir.pwd, 'ruby') } if gitaly_ruby
......
......@@ -39,13 +39,19 @@ class GithubImport
def import!
@project.force_import_start
import_success = false
timings = Benchmark.measure do
Github::Import.new(@project, @options).execute
import_success = Github::Import.new(@project, @options).execute
end
puts "Import finished. Timings: #{timings}".color(:green)
if import_success
@project.import_finish
puts "Import finished. Timings: #{timings}".color(:green)
else
puts "Import was not successful. Errors were as follows:"
puts @project.import_error
end
end
def new_project
......@@ -53,18 +59,23 @@ class GithubImport
namespace_path, _sep, name = @project_path.rpartition('/')
namespace = find_or_create_namespace(namespace_path)
Projects::CreateService.new(
project = Projects::CreateService.new(
@current_user,
name: name,
path: name,
description: @repo['description'],
namespace_id: namespace.id,
visibility_level: visibility_level,
import_type: 'github',
import_source: @repo['full_name'],
import_url: @repo['clone_url'].sub('://', "://#{@options[:token]}@"),
skip_wiki: @repo['has_wiki']
).execute
project.update!(
import_type: 'github',
import_source: @repo['full_name'],
import_url: @repo['clone_url'].sub('://', "://#{@options[:token]}@")
)
project
end
end
......
......@@ -2,7 +2,7 @@ require 'spec_helper'
describe Admin::UsersController do
let(:user) { create(:user) }
let(:admin) { create(:admin) }
set(:admin) { create(:admin) }
before do
sign_in(admin)
......
......@@ -57,7 +57,7 @@ describe Dashboard::TodosController do
expect(response).to redirect_to(dashboard_todos_path(page: last_page))
end
it 'redirects to correspondent page' do
it 'goes to the correct page' do
get :index, page: last_page
expect(assigns(:todos).current_page).to eq(last_page)
......@@ -70,6 +70,30 @@ describe Dashboard::TodosController do
expect(response).to redirect_to(dashboard_todos_path(page: last_page))
end
context 'when providing no filters' do
it 'does not perform a query to get the page count, but gets that from the user' do
allow(controller).to receive(:current_user).and_return(user)
expect(user).to receive(:todos_pending_count).and_call_original
get :index, page: (last_page + 1).to_param, sort: :created_asc
expect(response).to redirect_to(dashboard_todos_path(page: last_page, sort: :created_asc))
end
end
context 'when providing filters' do
it 'performs a query to get the correct page count' do
allow(controller).to receive(:current_user).and_return(user)
expect(user).not_to receive(:todos_pending_count)
get :index, page: (last_page + 1).to_param, project_id: project.id
expect(response).to redirect_to(dashboard_todos_path(page: last_page, project_id: project.id))
end
end
end
end
......
require 'spec_helper'
describe Projects::ArtifactsController do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
set(:user) { create(:user) }
set(:project) { create(:project, :repository, :public) }
let(:pipeline) do
create(:ci_pipeline,
......@@ -15,7 +15,7 @@ describe Projects::ArtifactsController do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do
project.team << [user, :developer]
project.add_developer(user)
sign_in(user)
end
......@@ -47,10 +47,38 @@ describe Projects::ArtifactsController do
end
describe 'GET file' do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
end
context 'when the file is served by GitLab Pages' do
before do
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context 'when the file exists' do
it 'renders the file view' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
expect(response).to have_http_status(302)
end
end
context 'when the file does not exist' do
it 'responds Not Found' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'unknown'
expect(response).to be_not_found
end
end
end
context 'when the file is served through Rails' do
context 'when the file exists' do
it 'renders the file view' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
expect(response).to have_http_status(:ok)
expect(response).to render_template('projects/artifacts/file')
end
end
......@@ -64,6 +92,26 @@ describe Projects::ArtifactsController do
end
end
context 'when the project is private' do
let(:private_project) { create(:project, :repository, :private) }
let(:pipeline) { create(:ci_pipeline, project: private_project) }
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do
private_project.add_developer(user)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
it 'does not redirect the request' do
get :file, namespace_id: private_project.namespace, project_id: private_project, job_id: job, path: 'ci_artifacts.txt'
expect(response).to have_http_status(:ok)
expect(response).to render_template('projects/artifacts/file')
end
end
end
describe 'GET raw' do
context 'when the file exists' do
it 'serves the file using workhorse' do
......
......@@ -140,7 +140,8 @@ describe ProjectsController do
end
context 'when the storage is not available', broken_storage: true do
let(:project) { create(:project, :broken_storage) }
set(:project) { create(:project, :broken_storage) }
before do
project.add_developer(user)
sign_in(user)
......
......@@ -3,10 +3,13 @@ require 'rails_helper'
describe 'Merge request', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:project_only_mwps) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:merge_request_in_only_mwps_project) { create(:merge_request, source_project: project_only_mwps) }
before do
project.team << [user, :master]
project.add_master(user)
project_only_mwps.add_master(user)
sign_in(user)
end
......@@ -160,6 +163,20 @@ describe 'Merge request', :js do
end
end
context 'view merge request in project with only-mwps setting enabled but no CI is setup' do
before do
visit project_merge_request_path(project_only_mwps, merge_request_in_only_mwps_project)
end
it 'should be allowed to merge' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request')
expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
end
context 'view merge request with MWPS enabled but automatically merge fails' do
before do
merge_request.update(
......
......@@ -4,16 +4,15 @@ feature 'Browse artifact', :js do
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
let(:browse_url) do
browse_path('other_artifacts_0.1.2')
end
def browse_path(path)
browse_project_job_artifacts_path(project, job, path)
end
context 'when visiting old URL' do
let(:browse_url) do
browse_path('other_artifacts_0.1.2')
end
before do
visit browse_url.sub('/-/jobs', '/builds')
end
......@@ -22,4 +21,47 @@ feature 'Browse artifact', :js do
expect(page.current_path).to eq(browse_url)
end
end
context 'when browsing a directory with an text file' do
let(:txt_entry) { job.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context 'when the project is public' do
it "shows external link icon and styles" do
visit browse_url
link = first('.tree-item-file-external-link')
expect(page).to have_link('doc_sample.txt', href: file_project_job_artifacts_path(project, job, path: txt_entry.blob.path))
expect(link[:target]).to eq('_blank')
expect(link[:rel]).to include('noopener')
expect(link[:rel]).to include('noreferrer')
expect(page).to have_selector('.js-artifact-tree-external-icon')
end
end
context 'when the project is private' do
let!(:private_project) { create(:project, :private) }
let(:pipeline) { create(:ci_empty_pipeline, project: private_project) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
let(:user) { create(:user) }
before do
private_project.add_developer(user)
sign_in(user)
end
it 'shows internal link styles' do
visit browse_project_job_artifacts_path(private_project, job, 'other_artifacts_0.1.2')
expect(page).to have_link('doc_sample.txt')
expect(page).not_to have_selector('.js-artifact-tree-external-icon')
end
end
end
end
......@@ -60,6 +60,23 @@ feature 'Protected Branches', :js do
expect(page).to have_content('No branches to show')
end
end
describe "Saved defaults" do
it "keeps the allowed to merge and push dropdowns defaults based on the previous selection" do
visit project_protected_branches_path(project)
find(".js-allowed-to-merge").trigger('click')
click_link 'No one'
find(".js-allowed-to-push").trigger('click')
click_link 'Developers + Masters'
visit project_protected_branches_path(project)
page.within(".js-allowed-to-merge") do
expect(page.find(".dropdown-toggle-text")).to have_content("No one")
end
page.within(".js-allowed-to-push") do
expect(page.find(".dropdown-toggle-text")).to have_content("Developers + Masters")
end
end
end
end
context 'logged in as admin' do
......
......@@ -46,6 +46,7 @@
"branch_missing": { "type": "boolean" },
"has_conflicts": { "type": "boolean" },
"can_be_merged": { "type": "boolean" },
"mergeable": { "type": "boolean" },
"project_archived": { "type": "boolean" },
"only_allow_merge_if_pipeline_succeeds": { "type": "boolean" },
"has_ci": { "type": "boolean" },
......
......@@ -12,6 +12,7 @@ const createComponent = (customConfig = {}) => {
pipeline: null,
isPipelineFailed: false,
isPipelinePassing: false,
isMergeAllowed: true,
onlyAllowMergeIfPipelineSucceeds: false,
hasCI: false,
ciStatus: null,
......@@ -212,21 +213,24 @@ describe('MRWidgetReadyToMerge', () => {
describe('isMergeButtonDisabled', () => {
it('should return false with initial data', () => {
vm.mr.isMergeAllowed = true;
expect(vm.isMergeButtonDisabled).toBeFalsy();
});
it('should return true when there is no commit message', () => {
vm.mr.isMergeAllowed = true;
vm.commitMessage = '';
expect(vm.isMergeButtonDisabled).toBeTruthy();
});
it('should return true if merge is not allowed', () => {
vm.mr.isMergeAllowed = false;
vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
vm.mr.isPipelineFailed = true;
expect(vm.isMergeButtonDisabled).toBeTruthy();
});
it('should return true when the vm instance is making request', () => {
vm.mr.isMergeAllowed = true;
vm.isMakingRequest = true;
expect(vm.isMergeButtonDisabled).toBeTruthy();
});
......@@ -234,53 +238,27 @@ describe('MRWidgetReadyToMerge', () => {
});
describe('methods', () => {
describe('isMergeAllowed', () => {
it('should return true when no pipeline and not required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = false;
vm.mr.isPipelinePassing = false;
expect(vm.isMergeAllowed()).toBeTruthy();
});
it('should return true when pipeline failed and not required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = false;
vm.mr.isPipelinePassing = false;
expect(vm.isMergeAllowed()).toBeTruthy();
});
it('should return false when pipeline failed and required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
vm.mr.isPipelinePassing = false;
expect(vm.isMergeAllowed()).toBeFalsy();
});
it('should return true when pipeline succeeded and required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
vm.mr.isPipelinePassing = true;
expect(vm.isMergeAllowed()).toBeTruthy();
});
});
describe('shouldShowMergeControls', () => {
it('should return false when an external pipeline is running and required to succeed', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(false);
vm.mr.isMergeAllowed = false;
vm.mr.isPipelineActive = false;
expect(vm.shouldShowMergeControls()).toBeFalsy();
});
it('should return true when the build succeeded or build not required to succeed', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(true);
vm.mr.isMergeAllowed = true;
vm.mr.isPipelineActive = false;
expect(vm.shouldShowMergeControls()).toBeTruthy();
});
it('should return true when showing the MWPS button and a pipeline is running that needs to be successful', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(false);
vm.mr.isMergeAllowed = false;
vm.mr.isPipelineActive = true;
expect(vm.shouldShowMergeControls()).toBeTruthy();
});
it('should return true when showing the MWPS button but not required for the pipeline to succeed', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(true);
vm.mr.isMergeAllowed = true;
vm.mr.isPipelineActive = true;
expect(vm.shouldShowMergeControls()).toBeTruthy();
});
......
......@@ -15,10 +15,6 @@ describe Gitlab::Shell do
it { is_expected.to respond_to :add_repository }
it { is_expected.to respond_to :remove_repository }
it { is_expected.to respond_to :fork_repository }
it { is_expected.to respond_to :add_namespace }
it { is_expected.to respond_to :rm_namespace }
it { is_expected.to respond_to :mv_namespace }
it { is_expected.to respond_to :exists? }
it { expect(gitlab_shell.url_to_repo('diaspora')).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "diaspora.git") }
......@@ -363,4 +359,52 @@ describe Gitlab::Shell do
end
end
end
describe 'namespace actions' do
subject { described_class.new }
let(:storage_path) { Gitlab.config.repositories.storages.default.path }
describe '#add_namespace' do
it 'creates a namespace' do
subject.add_namespace(storage_path, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true)
end
end
describe '#exists?' do
context 'when the namespace does not exist' do
it 'returns false' do
expect(subject.exists?(storage_path, "non-existing")).to be(false)
end
end
context 'when the namespace exists' do
it 'returns true' do
subject.add_namespace(storage_path, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true)
end
end
end
describe '#remove' do
it 'removes the namespace' do
subject.add_namespace(storage_path, "mepmep")
subject.rm_namespace(storage_path, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(false)
end
end
describe '#mv_namespace' do
it 'renames the namespace' do
subject.add_namespace(storage_path, "mepmep")
subject.mv_namespace(storage_path, "mepmep", "2mep")
expect(subject.exists?(storage_path, "mepmep")).to be(false)
expect(subject.exists?(storage_path, "2mep")).to be(true)
end
end
end
end
require 'spec_helper'
describe Ci::ArtifactBlob do
let(:build) { create(:ci_build, :artifacts) }
set(:project) { create(:project, :public) }
set(:build) { create(:ci_build, :artifacts, project: project) }
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') }
subject { described_class.new(entry) }
......@@ -41,4 +42,51 @@ describe Ci::ArtifactBlob do
expect(subject.external_storage).to eq(:build_artifact)
end
end
describe '#external_url' do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context '.gif extension' do
it 'returns nil' do
expect(subject.external_url(build.project, build)).to be_nil
end
end
context 'txt extensions' do
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
it 'returns a URL' do
url = subject.external_url(build.project, build)
expect(url).not_to be_nil
expect(url).to start_with("http")
expect(url).to match Gitlab.config.pages.host
expect(url).to end_with(entry.path)
end
end
end
describe '#external_link?' do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context 'gif extensions' do
it 'returns false' do
expect(subject.external_link?(build)).to be false
end
end
context 'txt extensions' do
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
it 'returns true' do
expect(subject.external_link?(build)).to be true
end
end
end
end
......@@ -152,22 +152,24 @@ describe Namespace do
end
describe '#move_dir' do
let(:namespace) { create(:namespace) }
let!(:project) { create(:project_empty_repo, namespace: namespace) }
before do
@namespace = create :namespace
@project = create(:project_empty_repo, namespace: @namespace)
allow(@namespace).to receive(:path_changed?).and_return(true)
allow(namespace).to receive(:path_changed?).and_return(true)
end
it "raises error when directory exists" do
expect { @namespace.move_dir }.to raise_error("namespace directory cannot be moved")
expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
end
it "moves dir if path changed" do
new_path = @namespace.full_path + "_new"
allow(@namespace).to receive(:full_path_was).and_return(@namespace.full_path)
allow(@namespace).to receive(:full_path).and_return(new_path)
expect(@namespace).to receive(:remove_exports!)
expect(@namespace.move_dir).to be_truthy
new_path = namespace.full_path + "_new"
allow(namespace).to receive(:full_path_was).and_return(namespace.full_path)
allow(namespace).to receive(:full_path).and_return(new_path)
expect(namespace).to receive(:remove_exports!)
expect(namespace.move_dir).to be_truthy
end
context "when any project has container images" do
......@@ -177,14 +179,14 @@ describe Namespace do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag'])
create(:project, namespace: @namespace, container_repositories: [container_repository])
create(:project, namespace: namespace, container_repositories: [container_repository])
allow(@namespace).to receive(:path_was).and_return(@namespace.path)
allow(@namespace).to receive(:path).and_return('new_path')
allow(namespace).to receive(:path_was).and_return(namespace.path)
allow(namespace).to receive(:path).and_return('new_path')
end
it 'raises an error about not movable project' do
expect { @namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
end
end
......
......@@ -422,20 +422,10 @@ describe Project do
end
describe '#repository_storage_path' do
let(:project) { create(:project, repository_storage: 'custom') }
before do
FileUtils.mkdir('tmp/tests/custom_repositories')
storages = { 'custom' => { 'path' => 'tmp/tests/custom_repositories' } }
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
after do
FileUtils.rm_rf('tmp/tests/custom_repositories')
end
let(:project) { create(:project) }
it 'returns the repository storage path' do
expect(project.repository_storage_path).to eq('tmp/tests/custom_repositories')
expect(Dir.exist?(project.repository_storage_path)).to be(true)
end
end
......
......@@ -634,6 +634,10 @@ describe Repository do
end
describe '#fetch_ref' do
# Setting the var here, sidesteps the stub that makes gitaly raise an error
# before the actual test call
set(:broken_repository) { create(:project, :broken_storage).repository }
describe 'when storage is broken', broken_storage: true do
it 'should raise a storage error' do
expect_to_raise_storage_error do
......
......@@ -224,17 +224,20 @@ describe 'gitlab:app namespace rake task' do
end
context 'multiple repository storages' do
let(:gitaly_address) { Gitlab.config.repositories.storages.default.gitaly_address }
let(:storages) do
{
'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address },
'test_second_storage' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address }
}
end
let(:project_a) { create(:project, :repository, repository_storage: 'default') }
let(:project_b) { create(:project, :repository, repository_storage: 'custom') }
let(:project_b) { create(:project, :repository, repository_storage: 'test_second_storage') }
before do
FileUtils.mkdir('tmp/tests/default_storage')
FileUtils.mkdir('tmp/tests/custom_storage')
gitaly_address = Gitlab.config.repositories.storages.default.gitaly_address
storages = {
'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address },
'custom' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address }
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
# Create the projects now, after mocking the settings but before doing the backup
......@@ -253,7 +256,7 @@ describe 'gitlab:app namespace rake task' do
after do
FileUtils.rm_rf('tmp/tests/default_storage')
FileUtils.rm_rf('tmp/tests/custom_storage')
FileUtils.rm(@backup_tar)
FileUtils.rm(@backup_tar) if @backup_tar
end
it 'includes repositories in all repository storages' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment