Commit 91e530df authored by Shinya Maeda's avatar Shinya Maeda

Merge branch 'master' into feature/sm/35954-create-kubernetes-cluster-on-gke-from-k8s-service

parents f2932885 9560d5b8
/* eslint-disable func-names, space-before-function-paren, wrap-iife, prefer-arrow-callback, no-unused-vars, no-return-assign, max-len */ /* eslint-disable func-names, space-before-function-paren, wrap-iife, prefer-arrow-callback, no-unused-vars, no-return-assign, max-len */
import { visitUrl } from './lib/utils/url_utility';
import { convertPermissionToBoolean } from './lib/utils/common_utils';
window.BuildArtifacts = (function() { window.BuildArtifacts = (function() {
function BuildArtifacts() { function BuildArtifacts() {
this.disablePropagation(); this.disablePropagation();
this.setupEntryClick(); this.setupEntryClick();
this.setupTooltips();
} }
BuildArtifacts.prototype.disablePropagation = function() { BuildArtifacts.prototype.disablePropagation = function() {
...@@ -17,9 +20,28 @@ window.BuildArtifacts = (function() { ...@@ -17,9 +20,28 @@ window.BuildArtifacts = (function() {
BuildArtifacts.prototype.setupEntryClick = function() { BuildArtifacts.prototype.setupEntryClick = function() {
return $('.tree-holder').on('click', 'tr[data-link]', function(e) { return $('.tree-holder').on('click', 'tr[data-link]', function(e) {
return window.location = this.dataset.link; visitUrl(this.dataset.link, convertPermissionToBoolean(this.dataset.externalLink));
}); });
}; };
BuildArtifacts.prototype.setupTooltips = function() {
$('.js-artifact-tree-tooltip').tooltip({
placement: 'bottom',
// Stop the tooltip from hiding when we stop hovering the element directly
// We handle all the showing/hiding below
trigger: 'manual',
});
// We want the tooltip to show if you hover anywhere on the row
// But be placed below and in the middle of the file name
$('.js-artifact-tree-row')
.on('mouseenter', (e) => {
$(e.currentTarget).find('.js-artifact-tree-tooltip').tooltip('show');
})
.on('mouseleave', (e) => {
$(e.currentTarget).find('.js-artifact-tree-tooltip').tooltip('hide');
});
};
return BuildArtifacts; return BuildArtifacts;
})(); })();
...@@ -738,7 +738,7 @@ GitLabDropdown = (function() { ...@@ -738,7 +738,7 @@ GitLabDropdown = (function() {
: selectedObject.id; : selectedObject.id;
if (isInput) { if (isInput) {
field = $(this.el); field = $(this.el);
} else if (value) { } else if (value != null) {
field = this.dropdown.parent().find("input[name='" + fieldName + "'][value='" + value.toString().replace(/'/g, '\\\'') + "']"); field = this.dropdown.parent().find("input[name='" + fieldName + "'][value='" + value.toString().replace(/'/g, '\\\'') + "']");
} }
...@@ -746,7 +746,7 @@ GitLabDropdown = (function() { ...@@ -746,7 +746,7 @@ GitLabDropdown = (function() {
return; return;
} }
if (el.hasClass(ACTIVE_CLASS)) { if (el.hasClass(ACTIVE_CLASS) && value !== 0) {
isMarking = false; isMarking = false;
el.removeClass(ACTIVE_CLASS); el.removeClass(ACTIVE_CLASS);
if (field && field.length) { if (field && field.length) {
...@@ -852,7 +852,7 @@ GitLabDropdown = (function() { ...@@ -852,7 +852,7 @@ GitLabDropdown = (function() {
if (href && href !== '#') { if (href && href !== '#') {
gl.utils.visitUrl(href); gl.utils.visitUrl(href);
} else { } else {
$el.first().trigger('click'); $el.trigger('click');
} }
} }
}; };
......
/* eslint-disable func-names, space-before-function-paren, wrap-iife, no-var, no-param-reassign, no-cond-assign, one-var, one-var-declaration-per-line, no-void, guard-for-in, no-restricted-syntax, prefer-template, quotes, max-len */ /* eslint-disable func-names, space-before-function-paren, wrap-iife, no-var, no-param-reassign, no-cond-assign, one-var, one-var-declaration-per-line, no-void, guard-for-in, no-restricted-syntax, prefer-template, quotes, max-len */
var base; var base;
var w = window; var w = window;
if (w.gl == null) { if (w.gl == null) {
...@@ -86,6 +87,21 @@ w.gl.utils.getLocationHash = function(url) { ...@@ -86,6 +87,21 @@ w.gl.utils.getLocationHash = function(url) {
w.gl.utils.refreshCurrentPage = () => gl.utils.visitUrl(document.location.href); w.gl.utils.refreshCurrentPage = () => gl.utils.visitUrl(document.location.href);
w.gl.utils.visitUrl = (url) => { // eslint-disable-next-line import/prefer-default-export
document.location.href = url; export function visitUrl(url, external = false) {
if (external) {
// Simulate `target="blank" rel="noopener noreferrer"`
// See https://mathiasbynens.github.io/rel-noopener/
const otherWindow = window.open();
otherWindow.opener = null;
otherWindow.location = url;
} else {
document.location.href = url;
}
}
window.gl = window.gl || {};
window.gl.utils = {
...(window.gl.utils || {}),
visitUrl,
}; };
import _ from 'underscore';
import ProtectedBranchAccessDropdown from './protected_branch_access_dropdown'; import ProtectedBranchAccessDropdown from './protected_branch_access_dropdown';
import ProtectedBranchDropdown from './protected_branch_dropdown'; import ProtectedBranchDropdown from './protected_branch_dropdown';
import AccessorUtilities from '../lib/utils/accessor';
const PB_LOCAL_STORAGE_KEY = 'protected-branches-defaults';
export default class ProtectedBranchCreate { export default class ProtectedBranchCreate {
constructor() { constructor() {
this.$form = $('.js-new-protected-branch'); this.$form = $('.js-new-protected-branch');
this.isLocalStorageAvailable = AccessorUtilities.isLocalStorageAccessSafe();
this.currentProjectUserDefaults = {};
this.buildDropdowns(); this.buildDropdowns();
} }
buildDropdowns() { buildDropdowns() {
const $allowedToMergeDropdown = this.$form.find('.js-allowed-to-merge'); const $allowedToMergeDropdown = this.$form.find('.js-allowed-to-merge');
const $allowedToPushDropdown = this.$form.find('.js-allowed-to-push'); const $allowedToPushDropdown = this.$form.find('.js-allowed-to-push');
const $protectedBranchDropdown = this.$form.find('.js-protected-branch-select');
// Cache callback // Cache callback
this.onSelectCallback = this.onSelect.bind(this); this.onSelectCallback = this.onSelect.bind(this);
...@@ -28,15 +35,13 @@ export default class ProtectedBranchCreate { ...@@ -28,15 +35,13 @@ export default class ProtectedBranchCreate {
onSelect: this.onSelectCallback, onSelect: this.onSelectCallback,
}); });
// Select default
$allowedToPushDropdown.data('glDropdown').selectRowAtIndex(0);
$allowedToMergeDropdown.data('glDropdown').selectRowAtIndex(0);
// Protected branch dropdown // Protected branch dropdown
this.protectedBranchDropdown = new ProtectedBranchDropdown({ this.protectedBranchDropdown = new ProtectedBranchDropdown({
$dropdown: this.$form.find('.js-protected-branch-select'), $dropdown: $protectedBranchDropdown,
onSelect: this.onSelectCallback, onSelect: this.onSelectCallback,
}); });
this.loadPreviousSelection($allowedToMergeDropdown.data('glDropdown'), $allowedToPushDropdown.data('glDropdown'));
} }
// This will run after clicked callback // This will run after clicked callback
...@@ -45,7 +50,41 @@ export default class ProtectedBranchCreate { ...@@ -45,7 +50,41 @@ export default class ProtectedBranchCreate {
const $branchInput = this.$form.find('input[name="protected_branch[name]"]'); const $branchInput = this.$form.find('input[name="protected_branch[name]"]');
const $allowedToMergeInput = this.$form.find('input[name="protected_branch[merge_access_levels_attributes][0][access_level]"]'); const $allowedToMergeInput = this.$form.find('input[name="protected_branch[merge_access_levels_attributes][0][access_level]"]');
const $allowedToPushInput = this.$form.find('input[name="protected_branch[push_access_levels_attributes][0][access_level]"]'); const $allowedToPushInput = this.$form.find('input[name="protected_branch[push_access_levels_attributes][0][access_level]"]');
const completedForm = !(
$branchInput.val() &&
$allowedToMergeInput.length &&
$allowedToPushInput.length
);
this.savePreviousSelection($allowedToMergeInput.val(), $allowedToPushInput.val());
this.$form.find('input[type="submit"]').attr('disabled', completedForm);
}
loadPreviousSelection(mergeDropdown, pushDropdown) {
let mergeIndex = 0;
let pushIndex = 0;
if (this.isLocalStorageAvailable) {
const savedDefaults = JSON.parse(window.localStorage.getItem(PB_LOCAL_STORAGE_KEY));
if (savedDefaults != null) {
mergeIndex = _.findLastIndex(mergeDropdown.fullData.roles, {
id: parseInt(savedDefaults.mergeSelection, 0),
});
pushIndex = _.findLastIndex(pushDropdown.fullData.roles, {
id: parseInt(savedDefaults.pushSelection, 0),
});
}
}
mergeDropdown.selectRowAtIndex(mergeIndex);
pushDropdown.selectRowAtIndex(pushIndex);
}
this.$form.find('input[type="submit"]').attr('disabled', !($branchInput.val() && $allowedToMergeInput.length && $allowedToPushInput.length)); savePreviousSelection(mergeSelection, pushSelection) {
if (this.isLocalStorageAvailable) {
const branchDefaults = {
mergeSelection,
pushSelection,
};
window.localStorage.setItem(PB_LOCAL_STORAGE_KEY, JSON.stringify(branchDefaults));
}
} }
} }
...@@ -67,7 +67,7 @@ export default { ...@@ -67,7 +67,7 @@ export default {
return defaultClass; return defaultClass;
}, },
iconClass() { iconClass() {
if (this.status === 'failed' || !this.commitMessage.length || !this.isMergeAllowed() || this.mr.preventMerge) { if (this.status === 'failed' || !this.commitMessage.length || !this.mr.isMergeAllowed || this.mr.preventMerge) {
return 'failed'; return 'failed';
} }
return 'success'; return 'success';
...@@ -100,13 +100,8 @@ export default { ...@@ -100,13 +100,8 @@ export default {
}, },
}, },
methods: { methods: {
isMergeAllowed() {
return !this.mr.onlyAllowMergeIfPipelineSucceeds ||
this.mr.isPipelinePassing ||
this.mr.isPipelineSkipped;
},
shouldShowMergeControls() { shouldShowMergeControls() {
return this.isMergeAllowed() || this.shouldShowMergeWhenPipelineSucceedsText; return this.mr.isMergeAllowed || this.shouldShowMergeWhenPipelineSucceedsText;
}, },
updateCommitMessage() { updateCommitMessage() {
const cmwd = this.mr.commitMessageWithDescription; const cmwd = this.mr.commitMessageWithDescription;
......
...@@ -73,6 +73,7 @@ export default class MergeRequestStore { ...@@ -73,6 +73,7 @@ export default class MergeRequestStore {
this.canCancelAutomaticMerge = !!data.cancel_merge_when_pipeline_succeeds_path; this.canCancelAutomaticMerge = !!data.cancel_merge_when_pipeline_succeeds_path;
this.hasSHAChanged = this.sha !== data.diff_head_sha; this.hasSHAChanged = this.sha !== data.diff_head_sha;
this.canBeMerged = data.can_be_merged || false; this.canBeMerged = data.can_be_merged || false;
this.isMergeAllowed = data.mergeable || false;
this.mergeOngoing = data.merge_ongoing; this.mergeOngoing = data.merge_ongoing;
// Cherry-pick and Revert actions related // Cherry-pick and Revert actions related
......
...@@ -169,6 +169,14 @@ ...@@ -169,6 +169,14 @@
} }
} }
.tree-item-file-external-link {
margin-right: 4px;
span {
text-decoration: inherit;
}
}
.tree_commit { .tree_commit {
max-width: 320px; max-width: 320px;
......
...@@ -7,9 +7,8 @@ class Dashboard::TodosController < Dashboard::ApplicationController ...@@ -7,9 +7,8 @@ class Dashboard::TodosController < Dashboard::ApplicationController
def index def index
@sort = params[:sort] @sort = params[:sort]
@todos = @todos.page(params[:page]) @todos = @todos.page(params[:page])
if @todos.out_of_range? && @todos.total_pages != 0
redirect_to url_for(params.merge(page: @todos.total_pages, only_path: true)) return if redirect_out_of_range(@todos)
end
end end
def destroy def destroy
...@@ -60,7 +59,7 @@ class Dashboard::TodosController < Dashboard::ApplicationController ...@@ -60,7 +59,7 @@ class Dashboard::TodosController < Dashboard::ApplicationController
end end
def find_todos def find_todos
@todos ||= TodosFinder.new(current_user, params).execute @todos ||= TodosFinder.new(current_user, todo_params).execute
end end
def todos_counts def todos_counts
...@@ -69,4 +68,27 @@ class Dashboard::TodosController < Dashboard::ApplicationController ...@@ -69,4 +68,27 @@ class Dashboard::TodosController < Dashboard::ApplicationController
done_count: number_with_delimiter(current_user.todos_done_count) done_count: number_with_delimiter(current_user.todos_done_count)
} }
end end
def todo_params
params.permit(:action_id, :author_id, :project_id, :type, :sort, :state)
end
def redirect_out_of_range(todos)
total_pages =
if todo_params.except(:sort, :page).empty?
(current_user.todos_pending_count / todos.limit_value).ceil
else
todos.total_pages
end
return false if total_pages.zero?
out_of_range = todos.current_page > total_pages
if out_of_range
redirect_to url_for(params.merge(page: total_pages, only_path: true))
end
out_of_range
end
end end
...@@ -29,13 +29,17 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -29,13 +29,17 @@ class Projects::ArtifactsController < Projects::ApplicationController
blob = @entry.blob blob = @entry.blob
conditionally_expand_blob(blob) conditionally_expand_blob(blob)
respond_to do |format| if blob.external_link?(build)
format.html do redirect_to blob.external_url(@project, build)
render 'file' else
end respond_to do |format|
format.html do
format.json do render 'file'
render_blob_json(blob) end
format.json do
render_blob_json(blob)
end
end end
end end
end end
......
...@@ -13,7 +13,7 @@ class SessionsController < Devise::SessionsController ...@@ -13,7 +13,7 @@ class SessionsController < Devise::SessionsController
before_action :auto_sign_in_with_provider, only: [:new] before_action :auto_sign_in_with_provider, only: [:new]
before_action :load_recaptcha before_action :load_recaptcha
after_action :log_failed_login, only: [:new] after_action :log_failed_login, only: [:new], if: :failed_login?
def new def new
set_minimum_password_length set_minimum_password_length
...@@ -46,8 +46,6 @@ class SessionsController < Devise::SessionsController ...@@ -46,8 +46,6 @@ class SessionsController < Devise::SessionsController
private private
def log_failed_login def log_failed_login
return unless failed_login?
Gitlab::AppLogger.info("Failed Login: username=#{user_params[:login]} ip=#{request.remote_ip}") Gitlab::AppLogger.info("Failed Login: username=#{user_params[:login]} ip=#{request.remote_ip}")
end end
......
...@@ -2,6 +2,8 @@ module Ci ...@@ -2,6 +2,8 @@ module Ci
class ArtifactBlob class ArtifactBlob
include BlobLike include BlobLike
EXTENTIONS_SERVED_BY_PAGES = %w[.html .htm .txt .json].freeze
attr_reader :entry attr_reader :entry
def initialize(entry) def initialize(entry)
...@@ -17,6 +19,7 @@ module Ci ...@@ -17,6 +19,7 @@ module Ci
def size def size
entry.metadata[:size] entry.metadata[:size]
end end
alias_method :external_size, :size
def data def data
"Build artifact #{path}" "Build artifact #{path}"
...@@ -30,6 +33,27 @@ module Ci ...@@ -30,6 +33,27 @@ module Ci
:build_artifact :build_artifact
end end
alias_method :external_size, :size def external_url(project, job)
return unless external_link?(job)
components = project.full_path_components
components << "-/jobs/#{job.id}/artifacts/file/#{path}"
artifact_path = components[1..-1].join('/')
"#{pages_config.protocol}://#{components[0]}.#{pages_config.host}/#{artifact_path}"
end
def external_link?(job)
pages_config.enabled &&
pages_config.artifacts_server &&
EXTENTIONS_SERVED_BY_PAGES.include?(File.extname(name)) &&
job.project.public?
end
private
def pages_config
Gitlab.config.pages
end
end end
end end
module RepositoryMirroring module RepositoryMirroring
def set_remote_as_mirror(name) IMPORT_HEAD_REFS = '+refs/heads/*:refs/heads/*'.freeze
config = raw_repository.rugged.config IMPORT_TAG_REFS = '+refs/tags/*:refs/tags/*'.freeze
def set_remote_as_mirror(name)
# This is used to define repository as equivalent as "git clone --mirror" # This is used to define repository as equivalent as "git clone --mirror"
config["remote.#{name}.fetch"] = 'refs/*:refs/*' raw_repository.rugged.config["remote.#{name}.fetch"] = 'refs/*:refs/*'
config["remote.#{name}.mirror"] = true raw_repository.rugged.config["remote.#{name}.mirror"] = true
config["remote.#{name}.prune"] = true raw_repository.rugged.config["remote.#{name}.prune"] = true
end
def set_import_remote_as_mirror(remote_name)
# Add first fetch with Rugged so it does not create its own.
raw_repository.rugged.config["remote.#{remote_name}.fetch"] = IMPORT_HEAD_REFS
add_remote_fetch_config(remote_name, IMPORT_TAG_REFS)
raw_repository.rugged.config["remote.#{remote_name}.mirror"] = true
raw_repository.rugged.config["remote.#{remote_name}.prune"] = true
end
def add_remote_fetch_config(remote_name, refspec)
run_git(%W[config --add remote.#{remote_name}.fetch #{refspec}])
end end
def fetch_mirror(remote, url) def fetch_mirror(remote, url)
......
...@@ -106,6 +106,10 @@ module Routable ...@@ -106,6 +106,10 @@ module Routable
RequestStore[full_path_key] ||= uncached_full_path RequestStore[full_path_key] ||= uncached_full_path
end end
def full_path_components
full_path.split('/')
end
def expires_full_path_cache def expires_full_path_cache
RequestStore.delete(full_path_key) if RequestStore.active? RequestStore.delete(full_path_key) if RequestStore.active?
@full_path = nil @full_path = nil
......
...@@ -415,6 +415,8 @@ class MergeRequest < ActiveRecord::Base ...@@ -415,6 +415,8 @@ class MergeRequest < ActiveRecord::Base
end end
def create_merge_request_diff def create_merge_request_diff
fetch_ref
# n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37435 # n+1: https://gitlab.com/gitlab-org/gitlab-ce/issues/37435
Gitlab::GitalyClient.allow_n_plus_1_calls do Gitlab::GitalyClient.allow_n_plus_1_calls do
merge_request_diffs.create merge_request_diffs.create
...@@ -462,6 +464,7 @@ class MergeRequest < ActiveRecord::Base ...@@ -462,6 +464,7 @@ class MergeRequest < ActiveRecord::Base
return unless open? return unless open?
old_diff_refs = self.diff_refs old_diff_refs = self.diff_refs
create_merge_request_diff create_merge_request_diff
MergeRequests::MergeRequestDiffCacheService.new.execute(self) MergeRequests::MergeRequestDiffCacheService.new.execute(self)
new_diff_refs = self.diff_refs new_diff_refs = self.diff_refs
......
...@@ -55,7 +55,6 @@ class MergeRequestDiff < ActiveRecord::Base ...@@ -55,7 +55,6 @@ class MergeRequestDiff < ActiveRecord::Base
end end
def ensure_commit_shas def ensure_commit_shas
merge_request.fetch_ref
self.start_commit_sha ||= merge_request.target_branch_sha self.start_commit_sha ||= merge_request.target_branch_sha
self.head_commit_sha ||= merge_request.source_branch_sha self.head_commit_sha ||= merge_request.source_branch_sha
self.base_commit_sha ||= find_base_sha self.base_commit_sha ||= find_base_sha
......
...@@ -1035,6 +1035,8 @@ class Project < ActiveRecord::Base ...@@ -1035,6 +1035,8 @@ class Project < ActiveRecord::Base
end end
true true
rescue GRPC::Internal # if the path is too long
false
end end
def create_repository(force: false) def create_repository(force: false)
......
...@@ -42,6 +42,7 @@ class MergeRequestEntity < IssuableEntity ...@@ -42,6 +42,7 @@ class MergeRequestEntity < IssuableEntity
expose :commits_count expose :commits_count
expose :cannot_be_merged?, as: :has_conflicts expose :cannot_be_merged?, as: :has_conflicts
expose :can_be_merged?, as: :can_be_merged expose :can_be_merged?, as: :can_be_merged
expose :mergeable?, as: :mergeable
expose :remove_source_branch?, as: :remove_source_branch expose :remove_source_branch?, as: :remove_source_branch
expose :project_archived do |merge_request| expose :project_archived do |merge_request|
......
- blob = file.blob
- path_to_file = file_project_job_artifacts_path(@project, @build, path: file.path) - path_to_file = file_project_job_artifacts_path(@project, @build, path: file.path)
- external_link = blob.external_link?(@build)
%tr.tree-item{ 'data-link' => path_to_file } %tr.tree-item.js-artifact-tree-row{ data: { link: path_to_file, external_link: "#{external_link}" } }
- blob = file.blob
%td.tree-item-file-name %td.tree-item-file-name
= tree_icon('file', blob.mode, blob.name) = tree_icon('file', blob.mode, blob.name)
= link_to path_to_file do - if external_link
%span.str-truncated= blob.name = link_to path_to_file, class: 'tree-item-file-external-link js-artifact-tree-tooltip',
target: '_blank', rel: 'noopener noreferrer', title: _('Opens in a new window') do
%span.str-truncated>= blob.name
= icon('external-link', class: 'js-artifact-tree-external-icon')
- else
= link_to path_to_file do
%span.str-truncated= blob.name
%td %td
= number_to_human_size(blob.size, precision: 2) = number_to_human_size(blob.size, precision: 2)
---
title: Added defaults for protected branches dropdowns on the repository settings
merge_request: 14278
author:
type: changed
---
title: Add online view of HTML artifacts for public projects
merge_request: 14399
author:
type: added
---
title: Allow merge in MR widget with no pipeline but using "Only allow merge requests
to be merged if the pipeline succeeds"
merge_request: 14633
author:
type: fixed
---
title: Improve GitHub import performance
merge_request: 14445
author:
type: other
---
title: Remove a SQL query from the todos index page
merge_request:
author:
type: other
...@@ -164,6 +164,7 @@ production: &base ...@@ -164,6 +164,7 @@ production: &base
host: example.com host: example.com
port: 80 # Set to 443 if you serve the pages with HTTPS port: 80 # Set to 443 if you serve the pages with HTTPS
https: false # Set to true if you serve the pages with HTTPS https: false # Set to true if you serve the pages with HTTPS
artifacts_server: true
# external_http: ["1.1.1.1:80", "[2001::1]:80"] # If defined, enables custom domain support in GitLab Pages # external_http: ["1.1.1.1:80", "[2001::1]:80"] # If defined, enables custom domain support in GitLab Pages
# external_https: ["1.1.1.1:443", "[2001::1]:443"] # If defined, enables custom domain and certificate support in GitLab Pages # external_https: ["1.1.1.1:443", "[2001::1]:443"] # If defined, enables custom domain and certificate support in GitLab Pages
......
...@@ -316,15 +316,16 @@ Settings.registry['path'] = Settings.absolute(Settings.registry['path ...@@ -316,15 +316,16 @@ Settings.registry['path'] = Settings.absolute(Settings.registry['path
# Pages # Pages
# #
Settings['pages'] ||= Settingslogic.new({}) Settings['pages'] ||= Settingslogic.new({})
Settings.pages['enabled'] = false if Settings.pages['enabled'].nil? Settings.pages['enabled'] = false if Settings.pages['enabled'].nil?
Settings.pages['path'] = Settings.absolute(Settings.pages['path'] || File.join(Settings.shared['path'], "pages")) Settings.pages['path'] = Settings.absolute(Settings.pages['path'] || File.join(Settings.shared['path'], "pages"))
Settings.pages['https'] = false if Settings.pages['https'].nil? Settings.pages['https'] = false if Settings.pages['https'].nil?
Settings.pages['host'] ||= "example.com" Settings.pages['host'] ||= "example.com"
Settings.pages['port'] ||= Settings.pages.https ? 443 : 80 Settings.pages['port'] ||= Settings.pages.https ? 443 : 80
Settings.pages['protocol'] ||= Settings.pages.https ? "https" : "http" Settings.pages['protocol'] ||= Settings.pages.https ? "https" : "http"
Settings.pages['url'] ||= Settings.__send__(:build_pages_url) Settings.pages['url'] ||= Settings.__send__(:build_pages_url)
Settings.pages['external_http'] ||= false unless Settings.pages['external_http'].present? Settings.pages['external_http'] ||= false unless Settings.pages['external_http'].present?
Settings.pages['external_https'] ||= false unless Settings.pages['external_https'].present? Settings.pages['external_https'] ||= false unless Settings.pages['external_https'].present?
Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pages['artifacts_server'].nil?
# #
# Git LFS # Git LFS
......
...@@ -42,6 +42,37 @@ By default, tooltips should be placed below the referring element. However, if t ...@@ -42,6 +42,37 @@ By default, tooltips should be placed below the referring element. However, if t
--- ---
## Popovers
Popovers provide additional, useful, unique information about the referring elements and can provide one or multiple actionable elements. They inform the user of additional information within the context of their original view, but without forcing the user to act upon it like a modal. Popovers are different from tooltips, which do not provide rich markup and actionable items. A popover can contain a header section with a different background color.
Popovers are summoned:
* Upon hover or touch on an element
### Usage
A popover should be used:
* When you don't want to let the user lose context, but still want to provide additional useful unique information about referring elements
* When it isn’t critical for the user to act upon the information
* When you want to give a user a summary of extended information and the option to switch context if they want to dive in deeper.
### Styling
A popover can contain a header section with a different background color if that improves readability and separation of content within.
![Popover usage](img/popover-placement-below.png)
This example shows two sections, where each section includes an actionable element. The first section shows a summary of the content shown when clicking the "read more" link. With this information the user can decide to dive deeper or start their GitLab Enterprise Edition trial immediately.
### Placement
By default, tooltips should be placed below the referring element. However, if there isn’t enough space in the viewport or it blocks related content, the tooltip should be moved to the side or above as needed.
![Tooltip placement location](img/popover-placement-above.png)
In this example we let the user know more about the setting they are deciding over, without loosing context. If they want to know even more they can do so, but with the expectation of opening that content in a new view.
---
## Anchor links ## Anchor links
Anchor links are used for navigational actions and lone, secondary commands (such as 'Reset filters' on the Issues List) when deemed appropriate by the UX team. Anchor links are used for navigational actions and lone, secondary commands (such as 'Reset filters' on the Issues List) when deemed appropriate by the UX team.
......
...@@ -50,6 +50,10 @@ For more examples on artifacts, follow the [artifacts reference in ...@@ -50,6 +50,10 @@ For more examples on artifacts, follow the [artifacts reference in
With GitLab 9.2, PDFs, images, videos and other formats can be previewed With GitLab 9.2, PDFs, images, videos and other formats can be previewed
directly in the job artifacts browser without the need to download them. directly in the job artifacts browser without the need to download them.
>**Note:**
With [GitLab 10.1][ce-14399], HTML files in a public project can be previewed
directly in a new tab without the need to download them.
After a job finishes, if you visit the job's specific page, there are three After a job finishes, if you visit the job's specific page, there are three
buttons. You can download the artifacts archive or browse its contents, whereas buttons. You can download the artifacts archive or browse its contents, whereas
the **Keep** button appears only if you have set an [expiry date] to the the **Keep** button appears only if you have set an [expiry date] to the
...@@ -64,7 +68,8 @@ archive. If your artifacts contained directories, then you are also able to ...@@ -64,7 +68,8 @@ archive. If your artifacts contained directories, then you are also able to
browse inside them. browse inside them.
Below you can see how browsing looks like. In this case we have browsed inside Below you can see how browsing looks like. In this case we have browsed inside
the archive and at this point there is one directory and one HTML file. the archive and at this point there is one directory, a couple files, and
one HTML file that you can view directly online (opens in a new tab).
![Job artifacts browser](img/job_artifacts_browser.png) ![Job artifacts browser](img/job_artifacts_browser.png)
...@@ -158,3 +163,4 @@ information in the UI. ...@@ -158,3 +163,4 @@ information in the UI.
[expiry date]: ../../../ci/yaml/README.md#artifacts-expire_in [expiry date]: ../../../ci/yaml/README.md#artifacts-expire_in
[ce-14399]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/14399
...@@ -9,7 +9,7 @@ module Github ...@@ -9,7 +9,7 @@ module Github
include Gitlab::ShellAdapter include Gitlab::ShellAdapter
attr_reader :project, :repository, :repo, :repo_url, :wiki_url, attr_reader :project, :repository, :repo, :repo_url, :wiki_url,
:options, :errors, :cached, :verbose :options, :errors, :cached, :verbose, :last_fetched_at
def initialize(project, options = {}) def initialize(project, options = {})
@project = project @project = project
...@@ -21,12 +21,13 @@ module Github ...@@ -21,12 +21,13 @@ module Github
@verbose = options.fetch(:verbose, false) @verbose = options.fetch(:verbose, false)
@cached = Hash.new { |hash, key| hash[key] = Hash.new } @cached = Hash.new { |hash, key| hash[key] = Hash.new }
@errors = [] @errors = []
@last_fetched_at = nil
end end
# rubocop: disable Rails/Output # rubocop: disable Rails/Output
def execute def execute
puts 'Fetching repository...'.color(:aqua) if verbose puts 'Fetching repository...'.color(:aqua) if verbose
fetch_repository setup_and_fetch_repository
puts 'Fetching labels...'.color(:aqua) if verbose puts 'Fetching labels...'.color(:aqua) if verbose
fetch_labels fetch_labels
puts 'Fetching milestones...'.color(:aqua) if verbose puts 'Fetching milestones...'.color(:aqua) if verbose
...@@ -42,7 +43,7 @@ module Github ...@@ -42,7 +43,7 @@ module Github
puts 'Expiring repository cache...'.color(:aqua) if verbose puts 'Expiring repository cache...'.color(:aqua) if verbose
expire_repository_cache expire_repository_cache
true errors.empty?
rescue Github::RepositoryFetchError rescue Github::RepositoryFetchError
expire_repository_cache expire_repository_cache
false false
...@@ -52,18 +53,24 @@ module Github ...@@ -52,18 +53,24 @@ module Github
private private
def fetch_repository def setup_and_fetch_repository
begin begin
project.ensure_repository project.ensure_repository
project.repository.add_remote('github', repo_url) project.repository.add_remote('github', repo_url)
project.repository.set_remote_as_mirror('github') project.repository.set_import_remote_as_mirror('github')
project.repository.fetch_remote('github', forced: true) project.repository.add_remote_fetch_config('github', '+refs/pull/*/head:refs/merge-requests/*/head')
fetch_remote(forced: true)
rescue Gitlab::Git::Repository::NoRepository, Gitlab::Shell::Error => e rescue Gitlab::Git::Repository::NoRepository, Gitlab::Shell::Error => e
error(:project, repo_url, e.message) error(:project, repo_url, e.message)
raise Github::RepositoryFetchError raise Github::RepositoryFetchError
end end
end end
def fetch_remote(forced: false)
@last_fetched_at = Time.now
project.repository.fetch_remote('github', forced: forced)
end
def fetch_wiki_repository def fetch_wiki_repository
return if project.wiki.repository_exists? return if project.wiki.repository_exists?
...@@ -92,7 +99,7 @@ module Github ...@@ -92,7 +99,7 @@ module Github
label.color = representation.color label.color = representation.color
end end
cached[:label_ids][label.title] = label.id cached[:label_ids][representation.title] = label.id
rescue => e rescue => e
error(:label, representation.url, e.message) error(:label, representation.url, e.message)
end end
...@@ -143,7 +150,9 @@ module Github ...@@ -143,7 +150,9 @@ module Github
next unless merge_request.new_record? && pull_request.valid? next unless merge_request.new_record? && pull_request.valid?
begin begin
pull_request.restore_branches! # If the PR has been created/updated after we last fetched the
# remote, we fetch again to get the up-to-date refs.
fetch_remote if pull_request.updated_at > last_fetched_at
author_id = user_id(pull_request.author, project.creator_id) author_id = user_id(pull_request.author, project.creator_id)
description = format_description(pull_request.description, pull_request.author) description = format_description(pull_request.description, pull_request.author)
...@@ -152,6 +161,7 @@ module Github ...@@ -152,6 +161,7 @@ module Github
iid: pull_request.iid, iid: pull_request.iid,
title: pull_request.title, title: pull_request.title,
description: description, description: description,
ref_fetched: true,
source_project: pull_request.source_project, source_project: pull_request.source_project,
source_branch: pull_request.source_branch_name, source_branch: pull_request.source_branch_name,
source_branch_sha: pull_request.source_branch_sha, source_branch_sha: pull_request.source_branch_sha,
...@@ -173,8 +183,6 @@ module Github ...@@ -173,8 +183,6 @@ module Github
fetch_comments(merge_request, :review_comment, review_comments_url, LegacyDiffNote) fetch_comments(merge_request, :review_comment, review_comments_url, LegacyDiffNote)
rescue => e rescue => e
error(:pull_request, pull_request.url, e.message) error(:pull_request, pull_request.url, e.message)
ensure
pull_request.remove_restored_branches!
end end
end end
...@@ -203,11 +211,11 @@ module Github ...@@ -203,11 +211,11 @@ module Github
# for both features, like manipulating assignees, labels # for both features, like manipulating assignees, labels
# and milestones, are provided within the Issues API. # and milestones, are provided within the Issues API.
if representation.pull_request? if representation.pull_request?
return unless representation.has_labels? || representation.has_comments? return unless representation.labels? || representation.comments?
merge_request = MergeRequest.find_by!(target_project_id: project.id, iid: representation.iid) merge_request = MergeRequest.find_by!(target_project_id: project.id, iid: representation.iid)
if representation.has_labels? if representation.labels?
merge_request.update_attribute(:label_ids, label_ids(representation.labels)) merge_request.update_attribute(:label_ids, label_ids(representation.labels))
end end
...@@ -222,14 +230,16 @@ module Github ...@@ -222,14 +230,16 @@ module Github
issue.title = representation.title issue.title = representation.title
issue.description = format_description(representation.description, representation.author) issue.description = format_description(representation.description, representation.author)
issue.state = representation.state issue.state = representation.state
issue.label_ids = label_ids(representation.labels)
issue.milestone_id = milestone_id(representation.milestone) issue.milestone_id = milestone_id(representation.milestone)
issue.author_id = author_id issue.author_id = author_id
issue.assignee_ids = [user_id(representation.assignee)]
issue.created_at = representation.created_at issue.created_at = representation.created_at
issue.updated_at = representation.updated_at issue.updated_at = representation.updated_at
issue.save!(validate: false) issue.save!(validate: false)
issue.update(
label_ids: label_ids(representation.labels),
assignee_ids: assignee_ids(representation.assignees))
fetch_comments_conditionally(issue, representation) fetch_comments_conditionally(issue, representation)
end end
rescue => e rescue => e
...@@ -238,7 +248,7 @@ module Github ...@@ -238,7 +248,7 @@ module Github
end end
def fetch_comments_conditionally(issuable, representation) def fetch_comments_conditionally(issuable, representation)
if representation.has_comments? if representation.comments?
comments_url = "/repos/#{repo}/issues/#{issuable.iid}/comments" comments_url = "/repos/#{repo}/issues/#{issuable.iid}/comments"
fetch_comments(issuable, :comment, comments_url) fetch_comments(issuable, :comment, comments_url)
end end
...@@ -302,7 +312,11 @@ module Github ...@@ -302,7 +312,11 @@ module Github
end end
def label_ids(labels) def label_ids(labels)
labels.map { |attrs| cached[:label_ids][attrs.fetch('name')] }.compact labels.map { |label| cached[:label_ids][label.title] }.compact
end
def assignee_ids(assignees)
assignees.map { |assignee| user_id(assignee) }.compact
end end
def milestone_id(milestone) def milestone_id(milestone)
......
...@@ -7,10 +7,14 @@ module Github ...@@ -7,10 +7,14 @@ module Github
raw.dig('user', 'login') || 'unknown' raw.dig('user', 'login') || 'unknown'
end end
def repo?
raw['repo'].present?
end
def repo def repo
return @repo if defined?(@repo) return unless repo?
@repo = Github::Representation::Repo.new(raw['repo']) if raw['repo'].present? @repo ||= Github::Representation::Repo.new(raw['repo'])
end end
def ref def ref
...@@ -25,10 +29,6 @@ module Github ...@@ -25,10 +29,6 @@ module Github
Commit.truncate_sha(sha) Commit.truncate_sha(sha)
end end
def exists?
@exists ||= branch_exists? && commit_exists?
end
def valid? def valid?
sha.present? && ref.present? sha.present? && ref.present?
end end
...@@ -47,14 +47,6 @@ module Github ...@@ -47,14 +47,6 @@ module Github
private private
def branch_exists?
repository.branch_exists?(ref)
end
def commit_exists?
repository.branch_names_contains(sha).include?(ref)
end
def repository def repository
@repository ||= options.fetch(:repository) @repository ||= options.fetch(:repository)
end end
......
...@@ -23,14 +23,14 @@ module Github ...@@ -23,14 +23,14 @@ module Github
@author ||= Github::Representation::User.new(raw['user'], options) @author ||= Github::Representation::User.new(raw['user'], options)
end end
def assignee def labels?
return unless assigned? raw['labels'].any?
@assignee ||= Github::Representation::User.new(raw['assignee'], options)
end end
def assigned? def labels
raw['assignee'].present? @labels ||= Array(raw['labels']).map do |label|
Github::Representation::Label.new(label, options)
end
end end
end end
end end
......
module Github module Github
module Representation module Representation
class Issue < Representation::Issuable class Issue < Representation::Issuable
def labels
raw['labels']
end
def state def state
raw['state'] == 'closed' ? 'closed' : 'opened' raw['state'] == 'closed' ? 'closed' : 'opened'
end end
def has_comments? def comments?
raw['comments'] > 0 raw['comments'] > 0
end end
def has_labels?
labels.count > 0
end
def pull_request? def pull_request?
raw['pull_request'].present? raw['pull_request'].present?
end end
def assigned?
raw['assignees'].present?
end
def assignees
@assignees ||= Array(raw['assignees']).map do |user|
Github::Representation::User.new(user, options)
end
end
end end
end end
end end
module Github module Github
module Representation module Representation
class PullRequest < Representation::Issuable class PullRequest < Representation::Issuable
delegate :user, :repo, :ref, :sha, to: :source_branch, prefix: true delegate :sha, to: :source_branch, prefix: true
delegate :user, :exists?, :repo, :ref, :sha, :short_sha, to: :target_branch, prefix: true delegate :sha, to: :target_branch, prefix: true
def source_project def source_project
project project
end end
def source_branch_name def source_branch_name
@source_branch_name ||= # Mimic the "user:branch" displayed in the MR widget,
if cross_project? || !source_branch_exists? # i.e. "Request to merge rymai:add-external-mounts into master"
source_branch_name_prefixed cross_project? ? "#{source_branch.user}:#{source_branch.ref}" : source_branch.ref
else
source_branch_ref
end
end
def source_branch_exists?
return @source_branch_exists if defined?(@source_branch_exists)
@source_branch_exists = !cross_project? && source_branch.exists?
end end
def target_project def target_project
...@@ -28,11 +19,7 @@ module Github ...@@ -28,11 +19,7 @@ module Github
end end
def target_branch_name def target_branch_name
@target_branch_name ||= target_branch_exists? ? target_branch_ref : target_branch_name_prefixed target_branch.ref
end
def target_branch_exists?
@target_branch_exists ||= target_branch.exists?
end end
def state def state
...@@ -50,16 +37,14 @@ module Github ...@@ -50,16 +37,14 @@ module Github
source_branch.valid? && target_branch.valid? source_branch.valid? && target_branch.valid?
end end
def restore_branches! def assigned?
restore_source_branch! raw['assignee'].present?
restore_target_branch!
end end
def remove_restored_branches! def assignee
return if opened? return unless assigned?
remove_source_branch! @assignee ||= Github::Representation::User.new(raw['assignee'], options)
remove_target_branch!
end end
private private
...@@ -72,48 +57,14 @@ module Github ...@@ -72,48 +57,14 @@ module Github
@source_branch ||= Representation::Branch.new(raw['head'], repository: project.repository) @source_branch ||= Representation::Branch.new(raw['head'], repository: project.repository)
end end
def source_branch_name_prefixed
"gh-#{target_branch_short_sha}/#{iid}/#{source_branch_user}/#{source_branch_ref}"
end
def target_branch def target_branch
@target_branch ||= Representation::Branch.new(raw['base'], repository: project.repository) @target_branch ||= Representation::Branch.new(raw['base'], repository: project.repository)
end end
def target_branch_name_prefixed
"gl-#{target_branch_short_sha}/#{iid}/#{target_branch_user}/#{target_branch_ref}"
end
def cross_project? def cross_project?
return true if source_branch_repo.nil? return true unless source_branch.repo?
source_branch_repo.id != target_branch_repo.id
end
def restore_source_branch!
return if source_branch_exists?
source_branch.restore!(source_branch_name)
end
def restore_target_branch!
return if target_branch_exists?
target_branch.restore!(target_branch_name)
end
def remove_source_branch!
# We should remove the source/target branches only if they were
# restored. Otherwise, we'll remove branches like 'master' that
# target_branch_exists? returns true. In other words, we need
# to clean up only the restored branches that (source|target)_branch_exists?
# returns false for the first time it has been called, because of
# this that is important to memoize these values.
source_branch.remove!(source_branch_name) unless source_branch_exists?
end
def remove_target_branch! source_branch.repo.id != target_branch.repo.id
target_branch.remove!(target_branch_name) unless target_branch_exists?
end end
end end
end end
......
...@@ -86,7 +86,7 @@ module Gitlab ...@@ -86,7 +86,7 @@ module Gitlab
user_name: user.name, user_name: user.name,
user_username: user.username, user_username: user.username,
user_email: user.email, user_email: user.email,
user_avatar: user.avatar_url, user_avatar: user.avatar_url(only_path: false),
project_id: project.id, project_id: project.id,
project: project.hook_attrs, project: project.hook_attrs,
commits: commit_attrs, commits: commit_attrs,
......
...@@ -1112,6 +1112,8 @@ module Gitlab ...@@ -1112,6 +1112,8 @@ module Gitlab
raise NoRepository.new(e) raise NoRepository.new(e)
rescue GRPC::BadStatus => e rescue GRPC::BadStatus => e
raise CommandError.new(e) raise CommandError.new(e)
rescue GRPC::InvalidArgument => e
raise ArgumentError.new(e)
end end
private private
......
module Gitlab
module GitalyClient
class NamespaceService
def initialize(storage)
@storage = storage
end
def exists?(name)
request = Gitaly::NamespaceExistsRequest.new(storage_name: @storage, name: name)
gitaly_client_call(:namespace_exists, request).exists
end
def add(name)
request = Gitaly::AddNamespaceRequest.new(storage_name: @storage, name: name)
gitaly_client_call(:add_namespace, request)
end
def remove(name)
request = Gitaly::RemoveNamespaceRequest.new(storage_name: @storage, name: name)
gitaly_client_call(:remove_namespace, request)
end
def rename(from, to)
request = Gitaly::RenameNamespaceRequest.new(storage_name: @storage, from: from, to: to)
gitaly_client_call(:rename_namespace, request)
end
private
def gitaly_client_call(type, request)
GitalyClient.call(@storage, :namespace_service, type, request)
end
end
end
end
...@@ -222,10 +222,18 @@ module Gitlab ...@@ -222,10 +222,18 @@ module Gitlab
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385 # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def add_namespace(storage, name) def add_namespace(storage, name)
path = full_path(storage, name) Gitlab::GitalyClient.migrate(:add_namespace) do |enabled|
FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name) if enabled
gitaly_namespace_client(storage).add(name)
else
path = full_path(storage, name)
FileUtils.mkdir_p(path, mode: 0770) unless exists?(storage, name)
end
end
rescue Errno::EEXIST => e rescue Errno::EEXIST => e
Rails.logger.warn("Directory exists as a file: #{e} at: #{path}") Rails.logger.warn("Directory exists as a file: #{e} at: #{path}")
rescue GRPC::InvalidArgument => e
raise ArgumentError, e.message
end end
# Remove directory from repositories storage # Remove directory from repositories storage
...@@ -236,7 +244,15 @@ module Gitlab ...@@ -236,7 +244,15 @@ module Gitlab
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385 # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def rm_namespace(storage, name) def rm_namespace(storage, name)
FileUtils.rm_r(full_path(storage, name), force: true) Gitlab::GitalyClient.migrate(:remove_namespace) do |enabled|
if enabled
gitaly_namespace_client(storage).remove(name)
else
FileUtils.rm_r(full_path(storage, name), force: true)
end
end
rescue GRPC::InvalidArgument => e
raise ArgumentError, e.message
end end
# Move namespace directory inside repositories storage # Move namespace directory inside repositories storage
...@@ -246,9 +262,17 @@ module Gitlab ...@@ -246,9 +262,17 @@ module Gitlab
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385 # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def mv_namespace(storage, old_name, new_name) def mv_namespace(storage, old_name, new_name)
return false if exists?(storage, new_name) || !exists?(storage, old_name) Gitlab::GitalyClient.migrate(:rename_namespace) do |enabled|
if enabled
gitaly_namespace_client(storage).rename(old_name, new_name)
else
return false if exists?(storage, new_name) || !exists?(storage, old_name)
FileUtils.mv(full_path(storage, old_name), full_path(storage, new_name)) FileUtils.mv(full_path(storage, old_name), full_path(storage, new_name))
end
end
rescue GRPC::InvalidArgument
false
end end
def url_to_repo(path) def url_to_repo(path)
...@@ -272,7 +296,13 @@ module Gitlab ...@@ -272,7 +296,13 @@ module Gitlab
# #
# Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385 # Gitaly migration: https://gitlab.com/gitlab-org/gitaly/issues/385
def exists?(storage, dir_name) def exists?(storage, dir_name)
File.exist?(full_path(storage, dir_name)) Gitlab::GitalyClient.migrate(:namespace_exists) do |enabled|
if enabled
gitaly_namespace_client(storage).exists?(dir_name)
else
File.exist?(full_path(storage, dir_name))
end
end
end end
protected protected
...@@ -349,6 +379,14 @@ module Gitlab ...@@ -349,6 +379,14 @@ module Gitlab
Bundler.with_original_env { Popen.popen(cmd, nil, vars) } Bundler.with_original_env { Popen.popen(cmd, nil, vars) }
end end
def gitaly_namespace_client(storage_path)
storage, _value = Gitlab.config.repositories.storages.find do |storage, value|
value['path'] == storage_path
end
Gitlab::GitalyClient::NamespaceService.new(storage)
end
def gitaly_migrate(method, &block) def gitaly_migrate(method, &block)
Gitlab::GitalyClient.migrate(method, &block) Gitlab::GitalyClient.migrate(method, &block)
rescue GRPC::NotFound, GRPC::BadStatus => e rescue GRPC::NotFound, GRPC::BadStatus => e
......
...@@ -50,6 +50,8 @@ namespace :gitlab do ...@@ -50,6 +50,8 @@ namespace :gitlab do
# only generate a configuration for the most common and simplest case: when # only generate a configuration for the most common and simplest case: when
# we have exactly one Gitaly process and we are sure it is running locally # we have exactly one Gitaly process and we are sure it is running locally
# because it uses a Unix socket. # because it uses a Unix socket.
# For development and testing purposes, an extra storage is added to gitaly,
# which is not known to Rails, but must be explicitly stubbed.
def gitaly_configuration_toml(gitaly_ruby: true) def gitaly_configuration_toml(gitaly_ruby: true)
storages = [] storages = []
address = nil address = nil
...@@ -67,6 +69,11 @@ namespace :gitlab do ...@@ -67,6 +69,11 @@ namespace :gitlab do
storages << { name: key, path: val['path'] } storages << { name: key, path: val['path'] }
end end
if Rails.env.test?
storages << { name: 'test_second_storage', path: Rails.root.join('tmp', 'tests', 'second_storage').to_s }
end
config = { socket_path: address.sub(%r{\Aunix:}, ''), storage: storages } config = { socket_path: address.sub(%r{\Aunix:}, ''), storage: storages }
config[:auth] = { token: 'secret' } if Rails.env.test? config[:auth] = { token: 'secret' } if Rails.env.test?
config[:'gitaly-ruby'] = { dir: File.join(Dir.pwd, 'ruby') } if gitaly_ruby config[:'gitaly-ruby'] = { dir: File.join(Dir.pwd, 'ruby') } if gitaly_ruby
......
...@@ -39,13 +39,19 @@ class GithubImport ...@@ -39,13 +39,19 @@ class GithubImport
def import! def import!
@project.force_import_start @project.force_import_start
import_success = false
timings = Benchmark.measure do timings = Benchmark.measure do
Github::Import.new(@project, @options).execute import_success = Github::Import.new(@project, @options).execute
end end
puts "Import finished. Timings: #{timings}".color(:green) if import_success
@project.import_finish
@project.import_finish puts "Import finished. Timings: #{timings}".color(:green)
else
puts "Import was not successful. Errors were as follows:"
puts @project.import_error
end
end end
def new_project def new_project
...@@ -53,18 +59,23 @@ class GithubImport ...@@ -53,18 +59,23 @@ class GithubImport
namespace_path, _sep, name = @project_path.rpartition('/') namespace_path, _sep, name = @project_path.rpartition('/')
namespace = find_or_create_namespace(namespace_path) namespace = find_or_create_namespace(namespace_path)
Projects::CreateService.new( project = Projects::CreateService.new(
@current_user, @current_user,
name: name, name: name,
path: name, path: name,
description: @repo['description'], description: @repo['description'],
namespace_id: namespace.id, namespace_id: namespace.id,
visibility_level: visibility_level, visibility_level: visibility_level,
import_type: 'github',
import_source: @repo['full_name'],
import_url: @repo['clone_url'].sub('://', "://#{@options[:token]}@"),
skip_wiki: @repo['has_wiki'] skip_wiki: @repo['has_wiki']
).execute ).execute
project.update!(
import_type: 'github',
import_source: @repo['full_name'],
import_url: @repo['clone_url'].sub('://', "://#{@options[:token]}@")
)
project
end end
end end
......
...@@ -2,7 +2,7 @@ require 'spec_helper' ...@@ -2,7 +2,7 @@ require 'spec_helper'
describe Admin::UsersController do describe Admin::UsersController do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:admin) { create(:admin) } set(:admin) { create(:admin) }
before do before do
sign_in(admin) sign_in(admin)
......
...@@ -57,7 +57,7 @@ describe Dashboard::TodosController do ...@@ -57,7 +57,7 @@ describe Dashboard::TodosController do
expect(response).to redirect_to(dashboard_todos_path(page: last_page)) expect(response).to redirect_to(dashboard_todos_path(page: last_page))
end end
it 'redirects to correspondent page' do it 'goes to the correct page' do
get :index, page: last_page get :index, page: last_page
expect(assigns(:todos).current_page).to eq(last_page) expect(assigns(:todos).current_page).to eq(last_page)
...@@ -70,6 +70,30 @@ describe Dashboard::TodosController do ...@@ -70,6 +70,30 @@ describe Dashboard::TodosController do
expect(response).to redirect_to(dashboard_todos_path(page: last_page)) expect(response).to redirect_to(dashboard_todos_path(page: last_page))
end end
context 'when providing no filters' do
it 'does not perform a query to get the page count, but gets that from the user' do
allow(controller).to receive(:current_user).and_return(user)
expect(user).to receive(:todos_pending_count).and_call_original
get :index, page: (last_page + 1).to_param, sort: :created_asc
expect(response).to redirect_to(dashboard_todos_path(page: last_page, sort: :created_asc))
end
end
context 'when providing filters' do
it 'performs a query to get the correct page count' do
allow(controller).to receive(:current_user).and_return(user)
expect(user).not_to receive(:todos_pending_count)
get :index, page: (last_page + 1).to_param, project_id: project.id
expect(response).to redirect_to(dashboard_todos_path(page: last_page, project_id: project.id))
end
end
end end
end end
......
require 'spec_helper' require 'spec_helper'
describe Projects::ArtifactsController do describe Projects::ArtifactsController do
let(:user) { create(:user) } set(:user) { create(:user) }
let(:project) { create(:project, :repository) } set(:project) { create(:project, :repository, :public) }
let(:pipeline) do let(:pipeline) do
create(:ci_pipeline, create(:ci_pipeline,
...@@ -15,7 +15,7 @@ describe Projects::ArtifactsController do ...@@ -15,7 +15,7 @@ describe Projects::ArtifactsController do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do before do
project.team << [user, :developer] project.add_developer(user)
sign_in(user) sign_in(user)
end end
...@@ -47,19 +47,67 @@ describe Projects::ArtifactsController do ...@@ -47,19 +47,67 @@ describe Projects::ArtifactsController do
end end
describe 'GET file' do describe 'GET file' do
context 'when the file exists' do before do
it 'renders the file view' do allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt' end
expect(response).to render_template('projects/artifacts/file') context 'when the file is served by GitLab Pages' do
before do
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context 'when the file exists' do
it 'renders the file view' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
expect(response).to have_http_status(302)
end
end
context 'when the file does not exist' do
it 'responds Not Found' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'unknown'
expect(response).to be_not_found
end
end end
end end
context 'when the file does not exist' do context 'when the file is served through Rails' do
it 'responds Not Found' do context 'when the file exists' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'unknown' it 'renders the file view' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'ci_artifacts.txt'
expect(response).to be_not_found expect(response).to have_http_status(:ok)
expect(response).to render_template('projects/artifacts/file')
end
end
context 'when the file does not exist' do
it 'responds Not Found' do
get :file, namespace_id: project.namespace, project_id: project, job_id: job, path: 'unknown'
expect(response).to be_not_found
end
end
end
context 'when the project is private' do
let(:private_project) { create(:project, :repository, :private) }
let(:pipeline) { create(:ci_pipeline, project: private_project) }
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do
private_project.add_developer(user)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
it 'does not redirect the request' do
get :file, namespace_id: private_project.namespace, project_id: private_project, job_id: job, path: 'ci_artifacts.txt'
expect(response).to have_http_status(:ok)
expect(response).to render_template('projects/artifacts/file')
end end
end end
end end
......
...@@ -140,7 +140,8 @@ describe ProjectsController do ...@@ -140,7 +140,8 @@ describe ProjectsController do
end end
context 'when the storage is not available', broken_storage: true do context 'when the storage is not available', broken_storage: true do
let(:project) { create(:project, :broken_storage) } set(:project) { create(:project, :broken_storage) }
before do before do
project.add_developer(user) project.add_developer(user)
sign_in(user) sign_in(user)
......
...@@ -3,10 +3,13 @@ require 'rails_helper' ...@@ -3,10 +3,13 @@ require 'rails_helper'
describe 'Merge request', :js do describe 'Merge request', :js do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:project_only_mwps) { create(:project, :repository, only_allow_merge_if_pipeline_succeeds: true) }
let(:merge_request) { create(:merge_request, source_project: project) } let(:merge_request) { create(:merge_request, source_project: project) }
let(:merge_request_in_only_mwps_project) { create(:merge_request, source_project: project_only_mwps) }
before do before do
project.team << [user, :master] project.add_master(user)
project_only_mwps.add_master(user)
sign_in(user) sign_in(user)
end end
...@@ -160,6 +163,20 @@ describe 'Merge request', :js do ...@@ -160,6 +163,20 @@ describe 'Merge request', :js do
end end
end end
context 'view merge request in project with only-mwps setting enabled but no CI is setup' do
before do
visit project_merge_request_path(project_only_mwps, merge_request_in_only_mwps_project)
end
it 'should be allowed to merge' do
# Wait for the `ci_status` and `merge_check` requests
wait_for_requests
expect(page).to have_selector('.accept-merge-request')
expect(find('.accept-merge-request')['disabled']).not_to be(true)
end
end
context 'view merge request with MWPS enabled but automatically merge fails' do context 'view merge request with MWPS enabled but automatically merge fails' do
before do before do
merge_request.update( merge_request.update(
......
...@@ -4,16 +4,15 @@ feature 'Browse artifact', :js do ...@@ -4,16 +4,15 @@ feature 'Browse artifact', :js do
let(:project) { create(:project, :public) } let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) } let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
let(:browse_url) do
browse_path('other_artifacts_0.1.2')
end
def browse_path(path) def browse_path(path)
browse_project_job_artifacts_path(project, job, path) browse_project_job_artifacts_path(project, job, path)
end end
context 'when visiting old URL' do context 'when visiting old URL' do
let(:browse_url) do
browse_path('other_artifacts_0.1.2')
end
before do before do
visit browse_url.sub('/-/jobs', '/builds') visit browse_url.sub('/-/jobs', '/builds')
end end
...@@ -22,4 +21,47 @@ feature 'Browse artifact', :js do ...@@ -22,4 +21,47 @@ feature 'Browse artifact', :js do
expect(page.current_path).to eq(browse_url) expect(page.current_path).to eq(browse_url)
end end
end end
context 'when browsing a directory with an text file' do
let(:txt_entry) { job.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context 'when the project is public' do
it "shows external link icon and styles" do
visit browse_url
link = first('.tree-item-file-external-link')
expect(page).to have_link('doc_sample.txt', href: file_project_job_artifacts_path(project, job, path: txt_entry.blob.path))
expect(link[:target]).to eq('_blank')
expect(link[:rel]).to include('noopener')
expect(link[:rel]).to include('noreferrer')
expect(page).to have_selector('.js-artifact-tree-external-icon')
end
end
context 'when the project is private' do
let!(:private_project) { create(:project, :private) }
let(:pipeline) { create(:ci_empty_pipeline, project: private_project) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
let(:user) { create(:user) }
before do
private_project.add_developer(user)
sign_in(user)
end
it 'shows internal link styles' do
visit browse_project_job_artifacts_path(private_project, job, 'other_artifacts_0.1.2')
expect(page).to have_link('doc_sample.txt')
expect(page).not_to have_selector('.js-artifact-tree-external-icon')
end
end
end
end end
...@@ -60,6 +60,23 @@ feature 'Protected Branches', :js do ...@@ -60,6 +60,23 @@ feature 'Protected Branches', :js do
expect(page).to have_content('No branches to show') expect(page).to have_content('No branches to show')
end end
end end
describe "Saved defaults" do
it "keeps the allowed to merge and push dropdowns defaults based on the previous selection" do
visit project_protected_branches_path(project)
find(".js-allowed-to-merge").trigger('click')
click_link 'No one'
find(".js-allowed-to-push").trigger('click')
click_link 'Developers + Masters'
visit project_protected_branches_path(project)
page.within(".js-allowed-to-merge") do
expect(page.find(".dropdown-toggle-text")).to have_content("No one")
end
page.within(".js-allowed-to-push") do
expect(page.find(".dropdown-toggle-text")).to have_content("Developers + Masters")
end
end
end
end end
context 'logged in as admin' do context 'logged in as admin' do
......
...@@ -46,6 +46,7 @@ ...@@ -46,6 +46,7 @@
"branch_missing": { "type": "boolean" }, "branch_missing": { "type": "boolean" },
"has_conflicts": { "type": "boolean" }, "has_conflicts": { "type": "boolean" },
"can_be_merged": { "type": "boolean" }, "can_be_merged": { "type": "boolean" },
"mergeable": { "type": "boolean" },
"project_archived": { "type": "boolean" }, "project_archived": { "type": "boolean" },
"only_allow_merge_if_pipeline_succeeds": { "type": "boolean" }, "only_allow_merge_if_pipeline_succeeds": { "type": "boolean" },
"has_ci": { "type": "boolean" }, "has_ci": { "type": "boolean" },
......
...@@ -12,6 +12,7 @@ const createComponent = (customConfig = {}) => { ...@@ -12,6 +12,7 @@ const createComponent = (customConfig = {}) => {
pipeline: null, pipeline: null,
isPipelineFailed: false, isPipelineFailed: false,
isPipelinePassing: false, isPipelinePassing: false,
isMergeAllowed: true,
onlyAllowMergeIfPipelineSucceeds: false, onlyAllowMergeIfPipelineSucceeds: false,
hasCI: false, hasCI: false,
ciStatus: null, ciStatus: null,
...@@ -212,21 +213,24 @@ describe('MRWidgetReadyToMerge', () => { ...@@ -212,21 +213,24 @@ describe('MRWidgetReadyToMerge', () => {
describe('isMergeButtonDisabled', () => { describe('isMergeButtonDisabled', () => {
it('should return false with initial data', () => { it('should return false with initial data', () => {
vm.mr.isMergeAllowed = true;
expect(vm.isMergeButtonDisabled).toBeFalsy(); expect(vm.isMergeButtonDisabled).toBeFalsy();
}); });
it('should return true when there is no commit message', () => { it('should return true when there is no commit message', () => {
vm.mr.isMergeAllowed = true;
vm.commitMessage = ''; vm.commitMessage = '';
expect(vm.isMergeButtonDisabled).toBeTruthy(); expect(vm.isMergeButtonDisabled).toBeTruthy();
}); });
it('should return true if merge is not allowed', () => { it('should return true if merge is not allowed', () => {
vm.mr.isMergeAllowed = false;
vm.mr.onlyAllowMergeIfPipelineSucceeds = true; vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
vm.mr.isPipelineFailed = true;
expect(vm.isMergeButtonDisabled).toBeTruthy(); expect(vm.isMergeButtonDisabled).toBeTruthy();
}); });
it('should return true when the vm instance is making request', () => { it('should return true when the vm instance is making request', () => {
vm.mr.isMergeAllowed = true;
vm.isMakingRequest = true; vm.isMakingRequest = true;
expect(vm.isMergeButtonDisabled).toBeTruthy(); expect(vm.isMergeButtonDisabled).toBeTruthy();
}); });
...@@ -234,53 +238,27 @@ describe('MRWidgetReadyToMerge', () => { ...@@ -234,53 +238,27 @@ describe('MRWidgetReadyToMerge', () => {
}); });
describe('methods', () => { describe('methods', () => {
describe('isMergeAllowed', () => {
it('should return true when no pipeline and not required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = false;
vm.mr.isPipelinePassing = false;
expect(vm.isMergeAllowed()).toBeTruthy();
});
it('should return true when pipeline failed and not required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = false;
vm.mr.isPipelinePassing = false;
expect(vm.isMergeAllowed()).toBeTruthy();
});
it('should return false when pipeline failed and required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
vm.mr.isPipelinePassing = false;
expect(vm.isMergeAllowed()).toBeFalsy();
});
it('should return true when pipeline succeeded and required to succeed', () => {
vm.mr.onlyAllowMergeIfPipelineSucceeds = true;
vm.mr.isPipelinePassing = true;
expect(vm.isMergeAllowed()).toBeTruthy();
});
});
describe('shouldShowMergeControls', () => { describe('shouldShowMergeControls', () => {
it('should return false when an external pipeline is running and required to succeed', () => { it('should return false when an external pipeline is running and required to succeed', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(false); vm.mr.isMergeAllowed = false;
vm.mr.isPipelineActive = false; vm.mr.isPipelineActive = false;
expect(vm.shouldShowMergeControls()).toBeFalsy(); expect(vm.shouldShowMergeControls()).toBeFalsy();
}); });
it('should return true when the build succeeded or build not required to succeed', () => { it('should return true when the build succeeded or build not required to succeed', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(true); vm.mr.isMergeAllowed = true;
vm.mr.isPipelineActive = false; vm.mr.isPipelineActive = false;
expect(vm.shouldShowMergeControls()).toBeTruthy(); expect(vm.shouldShowMergeControls()).toBeTruthy();
}); });
it('should return true when showing the MWPS button and a pipeline is running that needs to be successful', () => { it('should return true when showing the MWPS button and a pipeline is running that needs to be successful', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(false); vm.mr.isMergeAllowed = false;
vm.mr.isPipelineActive = true; vm.mr.isPipelineActive = true;
expect(vm.shouldShowMergeControls()).toBeTruthy(); expect(vm.shouldShowMergeControls()).toBeTruthy();
}); });
it('should return true when showing the MWPS button but not required for the pipeline to succeed', () => { it('should return true when showing the MWPS button but not required for the pipeline to succeed', () => {
spyOn(vm, 'isMergeAllowed').and.returnValue(true); vm.mr.isMergeAllowed = true;
vm.mr.isPipelineActive = true; vm.mr.isPipelineActive = true;
expect(vm.shouldShowMergeControls()).toBeTruthy(); expect(vm.shouldShowMergeControls()).toBeTruthy();
}); });
......
...@@ -15,10 +15,6 @@ describe Gitlab::Shell do ...@@ -15,10 +15,6 @@ describe Gitlab::Shell do
it { is_expected.to respond_to :add_repository } it { is_expected.to respond_to :add_repository }
it { is_expected.to respond_to :remove_repository } it { is_expected.to respond_to :remove_repository }
it { is_expected.to respond_to :fork_repository } it { is_expected.to respond_to :fork_repository }
it { is_expected.to respond_to :add_namespace }
it { is_expected.to respond_to :rm_namespace }
it { is_expected.to respond_to :mv_namespace }
it { is_expected.to respond_to :exists? }
it { expect(gitlab_shell.url_to_repo('diaspora')).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "diaspora.git") } it { expect(gitlab_shell.url_to_repo('diaspora')).to eq(Gitlab.config.gitlab_shell.ssh_path_prefix + "diaspora.git") }
...@@ -363,4 +359,52 @@ describe Gitlab::Shell do ...@@ -363,4 +359,52 @@ describe Gitlab::Shell do
end end
end end
end end
describe 'namespace actions' do
subject { described_class.new }
let(:storage_path) { Gitlab.config.repositories.storages.default.path }
describe '#add_namespace' do
it 'creates a namespace' do
subject.add_namespace(storage_path, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true)
end
end
describe '#exists?' do
context 'when the namespace does not exist' do
it 'returns false' do
expect(subject.exists?(storage_path, "non-existing")).to be(false)
end
end
context 'when the namespace exists' do
it 'returns true' do
subject.add_namespace(storage_path, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(true)
end
end
end
describe '#remove' do
it 'removes the namespace' do
subject.add_namespace(storage_path, "mepmep")
subject.rm_namespace(storage_path, "mepmep")
expect(subject.exists?(storage_path, "mepmep")).to be(false)
end
end
describe '#mv_namespace' do
it 'renames the namespace' do
subject.add_namespace(storage_path, "mepmep")
subject.mv_namespace(storage_path, "mepmep", "2mep")
expect(subject.exists?(storage_path, "mepmep")).to be(false)
expect(subject.exists?(storage_path, "2mep")).to be(true)
end
end
end
end end
require 'spec_helper' require 'spec_helper'
describe Ci::ArtifactBlob do describe Ci::ArtifactBlob do
let(:build) { create(:ci_build, :artifacts) } set(:project) { create(:project, :public) }
set(:build) { create(:ci_build, :artifacts, project: project) }
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') } let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') }
subject { described_class.new(entry) } subject { described_class.new(entry) }
...@@ -41,4 +42,51 @@ describe Ci::ArtifactBlob do ...@@ -41,4 +42,51 @@ describe Ci::ArtifactBlob do
expect(subject.external_storage).to eq(:build_artifact) expect(subject.external_storage).to eq(:build_artifact)
end end
end end
describe '#external_url' do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context '.gif extension' do
it 'returns nil' do
expect(subject.external_url(build.project, build)).to be_nil
end
end
context 'txt extensions' do
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
it 'returns a URL' do
url = subject.external_url(build.project, build)
expect(url).not_to be_nil
expect(url).to start_with("http")
expect(url).to match Gitlab.config.pages.host
expect(url).to end_with(entry.path)
end
end
end
describe '#external_link?' do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
allow(Gitlab.config.pages).to receive(:artifacts_server).and_return(true)
end
context 'gif extensions' do
it 'returns false' do
expect(subject.external_link?(build)).to be false
end
end
context 'txt extensions' do
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
it 'returns true' do
expect(subject.external_link?(build)).to be true
end
end
end
end end
...@@ -152,22 +152,24 @@ describe Namespace do ...@@ -152,22 +152,24 @@ describe Namespace do
end end
describe '#move_dir' do describe '#move_dir' do
let(:namespace) { create(:namespace) }
let!(:project) { create(:project_empty_repo, namespace: namespace) }
before do before do
@namespace = create :namespace allow(namespace).to receive(:path_changed?).and_return(true)
@project = create(:project_empty_repo, namespace: @namespace)
allow(@namespace).to receive(:path_changed?).and_return(true)
end end
it "raises error when directory exists" do it "raises error when directory exists" do
expect { @namespace.move_dir }.to raise_error("namespace directory cannot be moved") expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
end end
it "moves dir if path changed" do it "moves dir if path changed" do
new_path = @namespace.full_path + "_new" new_path = namespace.full_path + "_new"
allow(@namespace).to receive(:full_path_was).and_return(@namespace.full_path)
allow(@namespace).to receive(:full_path).and_return(new_path) allow(namespace).to receive(:full_path_was).and_return(namespace.full_path)
expect(@namespace).to receive(:remove_exports!) allow(namespace).to receive(:full_path).and_return(new_path)
expect(@namespace.move_dir).to be_truthy expect(namespace).to receive(:remove_exports!)
expect(namespace.move_dir).to be_truthy
end end
context "when any project has container images" do context "when any project has container images" do
...@@ -177,14 +179,14 @@ describe Namespace do ...@@ -177,14 +179,14 @@ describe Namespace do
stub_container_registry_config(enabled: true) stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: :any, tags: ['tag']) stub_container_registry_tags(repository: :any, tags: ['tag'])
create(:project, namespace: @namespace, container_repositories: [container_repository]) create(:project, namespace: namespace, container_repositories: [container_repository])
allow(@namespace).to receive(:path_was).and_return(@namespace.path) allow(namespace).to receive(:path_was).and_return(namespace.path)
allow(@namespace).to receive(:path).and_return('new_path') allow(namespace).to receive(:path).and_return('new_path')
end end
it 'raises an error about not movable project' do it 'raises an error about not movable project' do
expect { @namespace.move_dir }.to raise_error(/Namespace cannot be moved/) expect { namespace.move_dir }.to raise_error(/Namespace cannot be moved/)
end end
end end
......
...@@ -422,20 +422,10 @@ describe Project do ...@@ -422,20 +422,10 @@ describe Project do
end end
describe '#repository_storage_path' do describe '#repository_storage_path' do
let(:project) { create(:project, repository_storage: 'custom') } let(:project) { create(:project) }
before do
FileUtils.mkdir('tmp/tests/custom_repositories')
storages = { 'custom' => { 'path' => 'tmp/tests/custom_repositories' } }
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
end
after do
FileUtils.rm_rf('tmp/tests/custom_repositories')
end
it 'returns the repository storage path' do it 'returns the repository storage path' do
expect(project.repository_storage_path).to eq('tmp/tests/custom_repositories') expect(Dir.exist?(project.repository_storage_path)).to be(true)
end end
end end
......
...@@ -634,6 +634,10 @@ describe Repository do ...@@ -634,6 +634,10 @@ describe Repository do
end end
describe '#fetch_ref' do describe '#fetch_ref' do
# Setting the var here, sidesteps the stub that makes gitaly raise an error
# before the actual test call
set(:broken_repository) { create(:project, :broken_storage).repository }
describe 'when storage is broken', broken_storage: true do describe 'when storage is broken', broken_storage: true do
it 'should raise a storage error' do it 'should raise a storage error' do
expect_to_raise_storage_error do expect_to_raise_storage_error do
......
...@@ -224,17 +224,20 @@ describe 'gitlab:app namespace rake task' do ...@@ -224,17 +224,20 @@ describe 'gitlab:app namespace rake task' do
end end
context 'multiple repository storages' do context 'multiple repository storages' do
let(:gitaly_address) { Gitlab.config.repositories.storages.default.gitaly_address }
let(:storages) do
{
'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address },
'test_second_storage' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address }
}
end
let(:project_a) { create(:project, :repository, repository_storage: 'default') } let(:project_a) { create(:project, :repository, repository_storage: 'default') }
let(:project_b) { create(:project, :repository, repository_storage: 'custom') } let(:project_b) { create(:project, :repository, repository_storage: 'test_second_storage') }
before do before do
FileUtils.mkdir('tmp/tests/default_storage') FileUtils.mkdir('tmp/tests/default_storage')
FileUtils.mkdir('tmp/tests/custom_storage') FileUtils.mkdir('tmp/tests/custom_storage')
gitaly_address = Gitlab.config.repositories.storages.default.gitaly_address
storages = {
'default' => { 'path' => Settings.absolute('tmp/tests/default_storage'), 'gitaly_address' => gitaly_address },
'custom' => { 'path' => Settings.absolute('tmp/tests/custom_storage'), 'gitaly_address' => gitaly_address }
}
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages) allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
# Create the projects now, after mocking the settings but before doing the backup # Create the projects now, after mocking the settings but before doing the backup
...@@ -253,7 +256,7 @@ describe 'gitlab:app namespace rake task' do ...@@ -253,7 +256,7 @@ describe 'gitlab:app namespace rake task' do
after do after do
FileUtils.rm_rf('tmp/tests/default_storage') FileUtils.rm_rf('tmp/tests/default_storage')
FileUtils.rm_rf('tmp/tests/custom_storage') FileUtils.rm_rf('tmp/tests/custom_storage')
FileUtils.rm(@backup_tar) FileUtils.rm(@backup_tar) if @backup_tar
end end
it 'includes repositories in all repository storages' do it 'includes repositories in all repository storages' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment