Commit 938624c9 authored by Dmitriy Zaporozhets's avatar Dmitriy Zaporozhets

Merge branch 'ce-to-ee' into 'master'

CE upstream

Closes gitlab-ce#26785

See merge request !1096
parents e965311a caf48647
8.16.0-ee-pre 8.17.0-ee-pre
...@@ -62,6 +62,7 @@ var DropDown = function(list) { ...@@ -62,6 +62,7 @@ var DropDown = function(list) {
this.list = list; this.list = list;
this.items = []; this.items = [];
this.getItems(); this.getItems();
this.initTemplateString();
this.addEvents(); this.addEvents();
this.initialState = list.innerHTML; this.initialState = list.innerHTML;
}; };
...@@ -72,6 +73,17 @@ Object.assign(DropDown.prototype, { ...@@ -72,6 +73,17 @@ Object.assign(DropDown.prototype, {
return this.items; return this.items;
}, },
initTemplateString: function() {
var items = this.items || this.getItems();
var templateString = '';
if(items.length > 0) {
templateString = items[items.length - 1].outerHTML;
}
this.templateString = templateString;
return this.templateString;
},
clickEvent: function(e) { clickEvent: function(e) {
// climb up the tree to find the LI // climb up the tree to find the LI
var selected = utils.closest(e.target, 'LI'); var selected = utils.closest(e.target, 'LI');
...@@ -111,30 +123,21 @@ Object.assign(DropDown.prototype, { ...@@ -111,30 +123,21 @@ Object.assign(DropDown.prototype, {
addData: function(data) { addData: function(data) {
this.data = (this.data || []).concat(data); this.data = (this.data || []).concat(data);
this.render(data); this.render(this.data);
}, },
// call render manually on data; // call render manually on data;
render: function(data){ render: function(data){
// debugger // debugger
// empty the list first // empty the list first
var sampleItem; var templateString = this.templateString;
var newChildren = []; var newChildren = [];
var toAppend; var toAppend;
for(var i = 0; i < this.items.length; i++) { newChildren = (data ||[]).map(function(dat){
var item = this.items[i]; var html = utils.t(templateString, dat);
sampleItem = item;
if(item.parentNode && item.parentNode.dataset.hasOwnProperty('dynamic')) {
item.parentNode.removeChild(item);
}
}
newChildren = this.data.map(function(dat){
var html = utils.t(sampleItem.outerHTML, dat);
var template = document.createElement('div'); var template = document.createElement('div');
template.innerHTML = html; template.innerHTML = html;
// console.log(template.content)
// Help set the image src template // Help set the image src template
var imageTags = template.querySelectorAll('img[data-src]'); var imageTags = template.querySelectorAll('img[data-src]');
...@@ -173,10 +176,7 @@ Object.assign(DropDown.prototype, { ...@@ -173,10 +176,7 @@ Object.assign(DropDown.prototype, {
}, },
destroy: function() { destroy: function() {
if (!this.hidden) {
this.hide(); this.hide();
}
this.list.removeEventListener('click', this.clickWrapper); this.list.removeEventListener('click', this.clickWrapper);
} }
}); });
...@@ -278,7 +278,7 @@ require('./window')(function(w){ ...@@ -278,7 +278,7 @@ require('./window')(function(w){
self.hooks[i].list.hide(); self.hooks[i].list.hide();
} }
}.bind(this); }.bind(this);
w.addEventListener('click', this.windowClickedWrapper); document.addEventListener('click', this.windowClickedWrapper);
}, },
removeEvents: function(){ removeEvents: function(){
...@@ -462,6 +462,8 @@ Object.assign(HookInput.prototype, { ...@@ -462,6 +462,8 @@ Object.assign(HookInput.prototype, {
var self = this; var self = this;
this.mousedown = function mousedown(e) { this.mousedown = function mousedown(e) {
if(self.hasRemovedEvents) return;
var mouseEvent = new CustomEvent('mousedown.dl', { var mouseEvent = new CustomEvent('mousedown.dl', {
detail: { detail: {
hook: self, hook: self,
...@@ -474,6 +476,8 @@ Object.assign(HookInput.prototype, { ...@@ -474,6 +476,8 @@ Object.assign(HookInput.prototype, {
} }
this.input = function input(e) { this.input = function input(e) {
if(self.hasRemovedEvents) return;
var inputEvent = new CustomEvent('input.dl', { var inputEvent = new CustomEvent('input.dl', {
detail: { detail: {
hook: self, hook: self,
...@@ -487,10 +491,14 @@ Object.assign(HookInput.prototype, { ...@@ -487,10 +491,14 @@ Object.assign(HookInput.prototype, {
} }
this.keyup = function keyup(e) { this.keyup = function keyup(e) {
if(self.hasRemovedEvents) return;
keyEvent(e, 'keyup.dl'); keyEvent(e, 'keyup.dl');
} }
this.keydown = function keydown(e) { this.keydown = function keydown(e) {
if(self.hasRemovedEvents) return;
keyEvent(e, 'keydown.dl'); keyEvent(e, 'keydown.dl');
} }
...@@ -520,7 +528,8 @@ Object.assign(HookInput.prototype, { ...@@ -520,7 +528,8 @@ Object.assign(HookInput.prototype, {
this.trigger.addEventListener('keydown', this.keydown); this.trigger.addEventListener('keydown', this.keydown);
}, },
removeEvents: function(){ removeEvents: function() {
this.hasRemovedEvents = true;
this.trigger.removeEventListener('mousedown', this.mousedown); this.trigger.removeEventListener('mousedown', this.mousedown);
this.trigger.removeEventListener('input', this.input); this.trigger.removeEventListener('input', this.input);
this.trigger.removeEventListener('keyup', this.keyup); this.trigger.removeEventListener('keyup', this.keyup);
...@@ -668,14 +677,14 @@ var camelize = function(str) { ...@@ -668,14 +677,14 @@ var camelize = function(str) {
}; };
var closest = function(thisTag, stopTag) { var closest = function(thisTag, stopTag) {
while(thisTag.tagName !== stopTag && thisTag.tagName !== 'HTML'){ while(thisTag && thisTag.tagName !== stopTag && thisTag.tagName !== 'HTML'){
thisTag = thisTag.parentNode; thisTag = thisTag.parentNode;
} }
return thisTag; return thisTag;
}; };
var isDropDownParts = function(target) { var isDropDownParts = function(target) {
if(target.tagName === 'HTML') { return false; } if(!target || target.tagName === 'HTML') { return false; }
return ( return (
target.hasAttribute(DATA_TRIGGER) || target.hasAttribute(DATA_TRIGGER) ||
target.hasAttribute(DATA_DROPDOWN) target.hasAttribute(DATA_DROPDOWN)
......
/* global CustomEvent */
/* eslint-disable no-global-assign */
// Custom event support for IE
CustomEvent = function CustomEvent(event, parameters) {
const params = parameters || { bubbles: false, cancelable: false, detail: undefined };
const evt = document.createEvent('CustomEvent');
evt.initCustomEvent(event, params.bubbles, params.cancelable, params.detail);
return evt;
};
CustomEvent.prototype = window.Event.prototype;
...@@ -78,7 +78,10 @@ ...@@ -78,7 +78,10 @@
dispatchInputEvent() { dispatchInputEvent() {
// Propogate input change to FilteredSearchDropdownManager // Propogate input change to FilteredSearchDropdownManager
// so that it can determine which dropdowns to open // so that it can determine which dropdowns to open
this.input.dispatchEvent(new Event('input')); this.input.dispatchEvent(new CustomEvent('input', {
bubbles: true,
cancelable: true,
}));
} }
hideDropdown() { hideDropdown() {
......
...@@ -95,7 +95,15 @@ ...@@ -95,7 +95,15 @@
const input = this.filteredSearchInput; const input = this.filteredSearchInput;
const inputText = input.value.slice(0, input.selectionStart); const inputText = input.value.slice(0, input.selectionStart);
const filterIconPadding = 27; const filterIconPadding = 27;
const offset = gl.text.getTextWidth(inputText, this.font) + filterIconPadding; let offset = gl.text.getTextWidth(inputText, this.font) + filterIconPadding;
const currentDropdownWidth = this.mapping[key].element.clientWidth === 0 ? 200 :
this.mapping[key].element.clientWidth;
const offsetMaxWidth = this.filteredSearchInput.clientWidth - currentDropdownWidth;
if (offsetMaxWidth < offset) {
offset = offsetMaxWidth;
}
this.mapping[key].reference.setOffset(offset); this.mapping[key].reference.setOffset(offset);
} }
......
...@@ -2,12 +2,12 @@ ...@@ -2,12 +2,12 @@
(function() { (function() {
this.GroupAvatar = (function() { this.GroupAvatar = (function() {
function GroupAvatar() { function GroupAvatar() {
$('.js-choose-group-avatar-button').bind("click", function() { $('.js-choose-group-avatar-button').on("click", function() {
var form; var form;
form = $(this).closest("form"); form = $(this).closest("form");
return form.find(".js-group-avatar-input").click(); return form.find(".js-group-avatar-input").click();
}); });
$('.js-group-avatar-input').bind("change", function() { $('.js-group-avatar-input').on("change", function() {
var filename, form; var filename, form;
form = $(this).closest("form"); form = $(this).closest("form");
filename = $(this).val().replace(/^.*[\\\/]/, ''); filename = $(this).val().replace(/^.*[\\\/]/, '');
......
...@@ -8,31 +8,42 @@ ...@@ -8,31 +8,42 @@
* temporarily. * temporarily.
* */ * */
if ($('.accept-mr-form').length) { $(document)
$('.accept-mr-form').on('ajax:send', () => { .off('ajax:send', '.accept-mr-form')
.on('ajax:send', '.accept-mr-form', () => {
$('.accept-mr-form :input').disable(); $('.accept-mr-form :input').disable();
}); });
$('.accept_merge_request').on('click', () => { $(document)
.off('click', '.accept_merge_request')
.on('click', '.accept_merge_request', () => {
$('.js-merge-button').html('<i class="fa fa-spinner fa-spin"></i> Merge in progress'); $('.js-merge-button').html('<i class="fa fa-spinner fa-spin"></i> Merge in progress');
}); });
$('.merge_when_build_succeeds').on('click', () => { $(document)
.off('click', '.merge_when_build_succeeds')
.on('click', '.merge_when_build_succeeds', () => {
$('#merge_when_build_succeeds').val('1'); $('#merge_when_build_succeeds').val('1');
}); });
$('.js-merge-dropdown a').on('click', (e) => { $(document)
.off('click', '.js-merge-dropdown a')
.on('click', '.js-merge-dropdown a', (e) => {
e.preventDefault(); e.preventDefault();
$(this).closest('form').submit(); $(e.target).closest('form').submit();
}); });
} else if ($('.rebase-in-progress').length) { if ($('.rebase-in-progress').length) {
merge_request_widget.rebaseInProgress(); merge_request_widget.rebaseInProgress();
} else if ($('.rebase-mr-form').length) { } else if ($('.rebase-mr-form').length) {
$('.rebase-mr-form').on('ajax:send', () => { $(document)
.off('ajax:send', '.rebase-mr-form')
.on('ajax:send', '.rebase-mr-form', () => {
$('.rebase-mr-form :input').disable(); $('.rebase-mr-form :input').disable();
}); });
$('.js-rebase-button').on('click', () => { $(document)
.off('click', '.js-rebase-button')
.on('click', '.js-rebase-button', () => {
$('.js-rebase-button').html("<i class='fa fa-spinner fa-spin'></i> Rebase in progress"); $('.js-rebase-button').html("<i class='fa fa-spinner fa-spin'></i> Rebase in progress");
}); });
} else { } else {
......
/* global Vue, Flash, gl */ /* global Vue, Flash, gl */
/* eslint-disable no-param-reassign, no-bitwise */ /* eslint-disable no-param-reassign */
((gl) => { ((gl) => {
gl.VueStage = Vue.extend({ gl.VueStage = Vue.extend({
...@@ -9,7 +9,20 @@ ...@@ -9,7 +9,20 @@
spinner: '<span class="fa fa-spinner fa-spin"></span>', spinner: '<span class="fa fa-spinner fa-spin"></span>',
}; };
}, },
props: ['stage', 'svgs', 'match'], props: {
stage: {
type: Object,
required: true,
},
svgs: {
type: DOMStringMap,
required: true,
},
match: {
type: Function,
required: true,
},
},
methods: { methods: {
fetchBuilds(e) { fetchBuilds(e) {
const areaExpanded = e.currentTarget.attributes['aria-expanded']; const areaExpanded = e.currentTarget.attributes['aria-expanded'];
...@@ -24,6 +37,18 @@ ...@@ -24,6 +37,18 @@
return flash; return flash;
}); });
}, },
keepGraph(e) {
const { target } = e;
if (target.className.indexOf('js-ci-action-icon') >= 0) return null;
if (
target.parentElement &&
(target.parentElement.className.indexOf('js-ci-action-icon') >= 0)
) return null;
return e.stopPropagation();
},
}, },
computed: { computed: {
buildsOrSpinner() { buildsOrSpinner() {
...@@ -64,7 +89,7 @@ ...@@ -64,7 +89,7 @@
<ul class="dropdown-menu mini-pipeline-graph-dropdown-menu js-builds-dropdown-container"> <ul class="dropdown-menu mini-pipeline-graph-dropdown-menu js-builds-dropdown-container">
<div class="arrow-up"></div> <div class="arrow-up"></div>
<div <div
@click='' @click='keepGraph($event)'
:class="dropdownClass" :class="dropdownClass"
class="js-builds-dropdown-list scrollable-menu" class="js-builds-dropdown-list scrollable-menu"
v-html="buildsOrSpinner" v-html="buildsOrSpinner"
......
...@@ -377,6 +377,10 @@ ...@@ -377,6 +377,10 @@
display: inline-block; display: inline-block;
padding: 5px; padding: 5px;
&:nth-of-type(7n) {
padding-right: 0;
}
.author_link { .author_link {
display: block; display: block;
} }
......
...@@ -33,6 +33,18 @@ class Projects::IssuesController < Projects::ApplicationController ...@@ -33,6 +33,18 @@ class Projects::IssuesController < Projects::ApplicationController
@labels = LabelsFinder.new(current_user, project_id: @project.id, title: params[:label_name]).execute @labels = LabelsFinder.new(current_user, project_id: @project.id, title: params[:label_name]).execute
end end
@users = []
if params[:assignee_id].present?
assignee = User.find_by_id(params[:assignee_id])
@users.push(assignee) if assignee
end
if params[:author_id].present?
author = User.find_by_id(params[:author_id])
@users.push(author) if author
end
respond_to do |format| respond_to do |format|
format.html format.html
format.atom { render layout: false } format.atom { render layout: false }
......
...@@ -126,9 +126,11 @@ module Ci ...@@ -126,9 +126,11 @@ module Ci
end end
def tick_runner_queue def tick_runner_queue
new_update = SecureRandom.hex SecureRandom.hex.tap do |new_update|
Gitlab::Redis.with { |redis| redis.set(runner_queue_key, new_update, ex: RUNNER_QUEUE_EXPIRY_TIME) } Gitlab::Redis.with do |redis|
new_update redis.set(runner_queue_key, new_update, ex: RUNNER_QUEUE_EXPIRY_TIME)
end
end
end end
def ensure_runner_queue_value def ensure_runner_queue_value
......
...@@ -4,6 +4,8 @@ class Key < ActiveRecord::Base ...@@ -4,6 +4,8 @@ class Key < ActiveRecord::Base
include AfterCommitQueue include AfterCommitQueue
include Sortable include Sortable
LAST_USED_AT_REFRESH_TIME = 1.day.to_i
belongs_to :user belongs_to :user
before_validation :generate_fingerprint before_validation :generate_fingerprint
...@@ -52,7 +54,10 @@ class Key < ActiveRecord::Base ...@@ -52,7 +54,10 @@ class Key < ActiveRecord::Base
end end
def update_last_used_at def update_last_used_at
UseKeyWorker.perform_async(self.id) lease = Gitlab::ExclusiveLease.new("key_update_last_used_at:#{id}", timeout: LAST_USED_AT_REFRESH_TIME)
return unless lease.try_obtain
UseKeyWorker.perform_async(id)
end end
def add_to_shell def add_to_shell
......
...@@ -40,11 +40,13 @@ class PipelineEntity < Grape::Entity ...@@ -40,11 +40,13 @@ class PipelineEntity < Grape::Entity
end end
expose :path do |pipeline| expose :path do |pipeline|
if pipeline.ref
namespace_project_tree_path( namespace_project_tree_path(
pipeline.project.namespace, pipeline.project.namespace,
pipeline.project, pipeline.project,
id: pipeline.ref) id: pipeline.ref)
end end
end
expose :tag?, as: :tag expose :tag?, as: :tag
expose :branch?, as: :branch expose :branch?, as: :branch
......
...@@ -6,6 +6,14 @@ module Ci ...@@ -6,6 +6,14 @@ module Ci
runner.tick_runner_queue runner.tick_runner_queue
end end
end end
return unless build.project.shared_runners_enabled?
Ci::Runner.shared.each do |runner|
if runner.can_pick?(build)
runner.tick_runner_queue
end
end
end end
end end
end end
- content_for :page_specific_javascripts do
= page_specific_javascript_tag('merge_request_widget/ci_bundle.js')
%h4 %h4
Set by #{link_to_member(@project, @merge_request.merge_user, avatar: true)} Set by #{link_to_member(@project, @merge_request.merge_user, avatar: true)}
to be merged automatically when the pipeline succeeds. to be merged automatically when the pipeline succeeds.
......
%button.choose-btn.btn.btn-sm.js-choose-group-avatar-button %button.choose-btn.btn.btn-sm.js-choose-group-avatar-button{ type: 'button' }
%i.fa.fa-paperclip %i.fa.fa-paperclip
%span Choose File ... %span Choose File ...
&nbsp; &nbsp;
......
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
class: "check_all_issues left" class: "check_all_issues left"
.issues-other-filters.filtered-search-container .issues-other-filters.filtered-search-container
.filtered-search-input-container .filtered-search-input-container
%input.form-control.filtered-search{ placeholder: 'Search or filter results...', 'data-id' => 'filtered-search', 'data-project-id' => @project.id } %input.form-control.filtered-search{ placeholder: 'Search or filter results...', 'data-id' => 'filtered-search', 'data-project-id' => @project.id, 'data-username-params' => @users.to_json(only: [:id, :username]) }
= icon('filter') = icon('filter')
%button.clear-search.hidden{ type: 'button' } %button.clear-search.hidden{ type: 'button' }
= icon('times') = icon('times')
...@@ -47,6 +47,10 @@ ...@@ -47,6 +47,10 @@
%li.filter-dropdown-item{ 'data-value' => 'none' } %li.filter-dropdown-item{ 'data-value' => 'none' }
%button.btn.btn-link %button.btn.btn-link
No Assignee No Assignee
- if current_user
%li.filter-dropdown-item{ 'data-value' => current_user.to_reference }
%button.btn.btn-link
Assigned to me
%li.divider %li.divider
%ul.filter-dropdown{ 'data-dynamic' => true, 'data-dropdown' => true } %ul.filter-dropdown{ 'data-dynamic' => true, 'data-dropdown' => true }
%li.filter-dropdown-item %li.filter-dropdown-item
...@@ -139,3 +143,13 @@ ...@@ -139,3 +143,13 @@
event.preventDefault(); event.preventDefault();
Turbolinks.visit(this.action + '&' + $(this).serialize()); Turbolinks.visit(this.action + '&' + $(this).serialize());
}); });
$(document).off('page:restore').on('page:restore', function (event) {
if (gl.FilteredSearchManager) {
new gl.FilteredSearchManager();
}
Issuable.init();
new gl.IssuableBulkActions({
prefixId: 'issue_',
});
});
---
title: Add some basic fixes for IE11/Edge
merge_request:
author:
---
title: Fixed bug where links in merge dropdown wouldn't work
merge_request:
author:
class AddIndexToCiBuildsForStatusRunnerIdAndType < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :ci_builds, [:status, :type, :runner_id]
end
def down
if index_exists?(:ci_builds, [:status, :type, :runner_id])
remove_index :ci_builds, column: [:status, :type, :runner_id]
end
end
end
class AddIndexToCiRunnersForIsShared < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
add_concurrent_index :ci_runners, :is_shared
end
def down
if index_exists?(:ci_runners, :is_shared)
remove_index :ci_runners, :is_shared
end
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170118200412) do ActiveRecord::Schema.define(version: 20170121130655) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
...@@ -106,14 +106,14 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -106,14 +106,14 @@ ActiveRecord::Schema.define(version: 20170118200412) do
t.text "help_page_text_html" t.text "help_page_text_html"
t.text "shared_runners_text_html" t.text "shared_runners_text_html"
t.text "after_sign_up_text_html" t.text "after_sign_up_text_html"
t.boolean "sidekiq_throttling_enabled", default: false
t.string "sidekiq_throttling_queues"
t.decimal "sidekiq_throttling_factor"
t.boolean "housekeeping_enabled", default: true, null: false t.boolean "housekeeping_enabled", default: true, null: false
t.boolean "housekeeping_bitmaps_enabled", default: true, null: false t.boolean "housekeeping_bitmaps_enabled", default: true, null: false
t.integer "housekeeping_incremental_repack_period", default: 10, null: false t.integer "housekeeping_incremental_repack_period", default: 10, null: false
t.integer "housekeeping_full_repack_period", default: 50, null: false t.integer "housekeeping_full_repack_period", default: 50, null: false
t.integer "housekeeping_gc_period", default: 200, null: false t.integer "housekeeping_gc_period", default: 200, null: false
t.boolean "sidekiq_throttling_enabled", default: false
t.string "sidekiq_throttling_queues"
t.decimal "sidekiq_throttling_factor"
t.boolean "html_emails_enabled", default: true t.boolean "html_emails_enabled", default: true
t.string "plantuml_url" t.string "plantuml_url"
t.boolean "plantuml_enabled" t.boolean "plantuml_enabled"
...@@ -264,6 +264,7 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -264,6 +264,7 @@ ActiveRecord::Schema.define(version: 20170118200412) do
add_index "ci_builds", ["gl_project_id"], name: "index_ci_builds_on_gl_project_id", using: :btree add_index "ci_builds", ["gl_project_id"], name: "index_ci_builds_on_gl_project_id", using: :btree
add_index "ci_builds", ["project_id"], name: "index_ci_builds_on_project_id", using: :btree add_index "ci_builds", ["project_id"], name: "index_ci_builds_on_project_id", using: :btree
add_index "ci_builds", ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree add_index "ci_builds", ["runner_id"], name: "index_ci_builds_on_runner_id", using: :btree
add_index "ci_builds", ["status", "type", "runner_id"], name: "index_ci_builds_on_status_and_type_and_runner_id", using: :btree
add_index "ci_builds", ["status"], name: "index_ci_builds_on_status", using: :btree add_index "ci_builds", ["status"], name: "index_ci_builds_on_status", using: :btree
add_index "ci_builds", ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree add_index "ci_builds", ["token"], name: "index_ci_builds_on_token", unique: true, using: :btree
...@@ -367,6 +368,7 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -367,6 +368,7 @@ ActiveRecord::Schema.define(version: 20170118200412) do
t.boolean "locked", default: false, null: false t.boolean "locked", default: false, null: false
end end
add_index "ci_runners", ["is_shared"], name: "index_ci_runners_on_is_shared", using: :btree
add_index "ci_runners", ["locked"], name: "index_ci_runners_on_locked", using: :btree add_index "ci_runners", ["locked"], name: "index_ci_runners_on_locked", using: :btree
add_index "ci_runners", ["token"], name: "index_ci_runners_on_token", using: :btree add_index "ci_runners", ["token"], name: "index_ci_runners_on_token", using: :btree
...@@ -852,8 +854,8 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -852,8 +854,8 @@ ActiveRecord::Schema.define(version: 20170118200412) do
t.datetime "ldap_sync_last_successful_update_at" t.datetime "ldap_sync_last_successful_update_at"
t.datetime "ldap_sync_last_sync_at" t.datetime "ldap_sync_last_sync_at"
t.datetime "deleted_at" t.datetime "deleted_at"
t.boolean "lfs_enabled"
t.text "description_html" t.text "description_html"
t.boolean "lfs_enabled"
t.integer "parent_id" t.integer "parent_id"
t.integer "shared_runners_minutes_limit" t.integer "shared_runners_minutes_limit"
t.integer "repository_size_limit", limit: 8 t.integer "repository_size_limit", limit: 8
...@@ -864,7 +866,6 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -864,7 +866,6 @@ ActiveRecord::Schema.define(version: 20170118200412) do
add_index "namespaces", ["ldap_sync_last_successful_update_at"], name: "index_namespaces_on_ldap_sync_last_successful_update_at", using: :btree add_index "namespaces", ["ldap_sync_last_successful_update_at"], name: "index_namespaces_on_ldap_sync_last_successful_update_at", using: :btree
add_index "namespaces", ["ldap_sync_last_update_at"], name: "index_namespaces_on_ldap_sync_last_update_at", using: :btree add_index "namespaces", ["ldap_sync_last_update_at"], name: "index_namespaces_on_ldap_sync_last_update_at", using: :btree
add_index "namespaces", ["name", "parent_id"], name: "index_namespaces_on_name_and_parent_id", unique: true, using: :btree add_index "namespaces", ["name", "parent_id"], name: "index_namespaces_on_name_and_parent_id", unique: true, using: :btree
add_index "namespaces", ["name"], name: "index_namespaces_on_name", unique: true, using: :btree
add_index "namespaces", ["name"], name: "index_namespaces_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"} add_index "namespaces", ["name"], name: "index_namespaces_on_name_trigram", using: :gin, opclasses: {"name"=>"gin_trgm_ops"}
add_index "namespaces", ["owner_id"], name: "index_namespaces_on_owner_id", using: :btree add_index "namespaces", ["owner_id"], name: "index_namespaces_on_owner_id", using: :btree
add_index "namespaces", ["parent_id", "id"], name: "index_namespaces_on_parent_id_and_id", unique: true, using: :btree add_index "namespaces", ["parent_id", "id"], name: "index_namespaces_on_parent_id_and_id", unique: true, using: :btree
...@@ -1100,9 +1101,9 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -1100,9 +1101,9 @@ ActiveRecord::Schema.define(version: 20170118200412) do
t.boolean "only_allow_merge_if_build_succeeds", default: false, null: false t.boolean "only_allow_merge_if_build_succeeds", default: false, null: false
t.boolean "has_external_issue_tracker" t.boolean "has_external_issue_tracker"
t.string "repository_storage", default: "default", null: false t.string "repository_storage", default: "default", null: false
t.boolean "repository_read_only"
t.boolean "request_access_enabled", default: false, null: false t.boolean "request_access_enabled", default: false, null: false
t.boolean "has_external_wiki" t.boolean "has_external_wiki"
t.boolean "repository_read_only"
t.boolean "lfs_enabled" t.boolean "lfs_enabled"
t.text "description_html" t.text "description_html"
t.boolean "only_allow_merge_if_all_discussions_are_resolved" t.boolean "only_allow_merge_if_all_discussions_are_resolved"
...@@ -1444,8 +1445,8 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -1444,8 +1445,8 @@ ActiveRecord::Schema.define(version: 20170118200412) do
t.datetime "otp_grace_period_started_at" t.datetime "otp_grace_period_started_at"
t.boolean "ldap_email", default: false, null: false t.boolean "ldap_email", default: false, null: false
t.boolean "external", default: false t.boolean "external", default: false
t.string "organization"
t.string "incoming_email_token" t.string "incoming_email_token"
t.string "organization"
t.boolean "authorized_projects_populated" t.boolean "authorized_projects_populated"
end end
...@@ -1489,8 +1490,8 @@ ActiveRecord::Schema.define(version: 20170118200412) do ...@@ -1489,8 +1490,8 @@ ActiveRecord::Schema.define(version: 20170118200412) do
t.boolean "note_events", default: false, null: false t.boolean "note_events", default: false, null: false
t.boolean "enable_ssl_verification", default: true t.boolean "enable_ssl_verification", default: true
t.boolean "build_events", default: false, null: false t.boolean "build_events", default: false, null: false
t.string "token"
t.boolean "wiki_page_events", default: false, null: false t.boolean "wiki_page_events", default: false, null: false
t.string "token"
t.boolean "pipeline_events", default: false, null: false t.boolean "pipeline_events", default: false, null: false
t.boolean "confidential_issues_events", default: false, null: false t.boolean "confidential_issues_events", default: false, null: false
end end
......
# GitLab Enterprise Edition documentation # GitLab Enterprise Edition documentation
## University
[University](university/README.md) contain guides to learn Git and GitLab through courses and videos.
## User documentation ## User documentation
- [Account Security](user/account/security.md) Securing your account via two-factor authentication, etc. - [Account Security](user/account/security.md) Securing your account via two-factor authentication, etc.
...@@ -23,7 +27,6 @@ ...@@ -23,7 +27,6 @@
- [SSH](ssh/README.md) Setup your ssh keys and deploy keys for secure access to your projects. - [SSH](ssh/README.md) Setup your ssh keys and deploy keys for secure access to your projects.
- [Webhooks](web_hooks/web_hooks.md) Let GitLab notify you when new code has been pushed to your project. - [Webhooks](web_hooks/web_hooks.md) Let GitLab notify you when new code has been pushed to your project.
- [Workflow](workflow/README.md) Using GitLab functionality and importing projects from GitHub and SVN. - [Workflow](workflow/README.md) Using GitLab functionality and importing projects from GitHub and SVN.
- [University](university/README.md) Learn Git and GitLab through videos and courses.
- [Git Attributes](user/project/git_attributes.md) Managing Git attributes using a `.gitattributes` file. - [Git Attributes](user/project/git_attributes.md) Managing Git attributes using a `.gitattributes` file.
- [Git cheatsheet](https://gitlab.com/gitlab-com/marketing/raw/master/design/print/git-cheatsheet/print-pdf/git-cheatsheet.pdf) Download a PDF describing the most used Git operations. - [Git cheatsheet](https://gitlab.com/gitlab-com/marketing/raw/master/design/print/git-cheatsheet/print-pdf/git-cheatsheet.pdf) Download a PDF describing the most used Git operations.
...@@ -65,7 +68,8 @@ ...@@ -65,7 +68,8 @@
- [GitLab Pages configuration](pages/administration.md) Configure GitLab Pages. - [GitLab Pages configuration](pages/administration.md) Configure GitLab Pages.
- [Elasticsearch](integration/elasticsearch.md) Enable Elasticsearch. - [Elasticsearch](integration/elasticsearch.md) Enable Elasticsearch.
- [GitLab GEO](gitlab-geo/README.md) Configure GitLab GEO, a secondary read-only GitLab instance. - [GitLab GEO](gitlab-geo/README.md) Configure GitLab GEO, a secondary read-only GitLab instance.
- [GitLab Performance Monitoring](administration/monitoring/performance/introduction.md) Configure GitLab and InfluxDB for measuring performance metrics. - [GitLab performance monitoring with InfluxDB](administration/monitoring/performance/introduction.md) Configure GitLab and InfluxDB for measuring performance metrics.
- [GitLab performance monitoring with Prometheus](administration/monitoring/performance/prometheus.md) Configure GitLab and Prometheus for measuring performance metrics.
- [Request Profiling](administration/monitoring/performance/request_profiling.md) Get a detailed profile on slow requests. - [Request Profiling](administration/monitoring/performance/request_profiling.md) Get a detailed profile on slow requests.
- [Monitoring uptime](user/admin_area/monitoring/health_check.md) Check the server status using the health check endpoint. - [Monitoring uptime](user/admin_area/monitoring/health_check.md) Check the server status using the health check endpoint.
- [Debugging Tips](administration/troubleshooting/debug.md) Tips to debug problems when things go wrong - [Debugging Tips](administration/troubleshooting/debug.md) Tips to debug problems when things go wrong
......
...@@ -12,6 +12,11 @@ documents in order to understand and properly configure GitLab Performance Monit ...@@ -12,6 +12,11 @@ documents in order to understand and properly configure GitLab Performance Monit
- [InfluxDB Schema](influxdb_schema.md) - [InfluxDB Schema](influxdb_schema.md)
- [Grafana Install/Configuration](grafana_configuration.md) - [Grafana Install/Configuration](grafana_configuration.md)
>**Note:**
Omnibus GitLab 8.16 includes Prometheus as an additional tool to collect
metrics. It will eventually replace InfluxDB when their metrics collection is
on par. Read more in the [Prometheus documentation](prometheus.md).
## Introduction to GitLab Performance Monitoring ## Introduction to GitLab Performance Monitoring
GitLab Performance Monitoring makes it possible to measure a wide variety of statistics GitLab Performance Monitoring makes it possible to measure a wide variety of statistics
......
# GitLab Prometheus
>**Notes:**
- Prometheus and the node exporter are bundled in the Omnibus GitLab package
since GitLab 8.16. For installations from source you will have to install
them yourself. Over subsequent releases additional GitLab metrics will be
captured.
- Prometheus services are off by default but will be on starting with GitLab 9.0.
[Prometheus] is a powerful time-series monitoring service, providing a flexible
platform for monitoring GitLab and other software products.
GitLab provides out of the box monitoring with Prometheus, providing easy
access to high quality time-series monitoring of GitLab services.
## Overview
Prometheus works by periodically connecting to data sources and collecting their
performance metrics. To view and work with the monitoring data, you can either
connect directly to Prometheus or utilize a dashboard tool like [Grafana].
## Configuring Prometheus
>**Note:**
Available since Omnibus GitLab 8.16. For installations from source you'll
have to install and configure it yourself.
To enable Prometheus:
1. Edit `/etc/gitlab/gitlab.rb`
1. Find and uncomment the following line, making sure it's set to `true`:
```ruby
prometheus['enable'] = true
```
1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
take effect
By default, Prometheus will run as the `gitlab-prometheus` user and listen on
TCP port `9090` under localhost. If the [node exporter](#node-exporter) service
has been enabled, it will automatically be set up as a monitoring target for
Prometheus.
## Viewing Performance Metrics
After you have [enabled Prometheus](#configuring-prometheus), you can visit
`<your_domain_name>:9090` for the dashboard that Prometheus offers by default.
The performance data collected by Prometheus can be viewed directly in the
Prometheus console or through a compatible dashboard tool.
The Prometheus interface provides a [flexible query language][prom-query] to work
with the collected data where you can visualize their output.
For a more fully featured dashboard, Grafana can be used and has
[official support for Prometheus][prom-grafana].
## Prometheus exporters
There are a number of libraries and servers which help in exporting existing
metrics from third-party systems as Prometheus metrics. This is useful for cases
where it is not feasible to instrument a given system with Prometheus metrics
directly (for example, HAProxy or Linux system stats). You can read more in the
[Prometheus exporters and integrations documentation][prom-exporters].
While you can use any exporter you like with your GitLab installation, the
following ones documented here are bundled in the Omnibus GitLab packages
making it easy to configure and use.
### Node exporter
>**Note:**
Available since Omnibus GitLab 8.16. For installations from source you'll
have to install and configure it yourself.
The [node exporter] allows you to measure various machine resources such as
memory, disk and CPU utilization.
To enable the node exporter:
1. [Enable Prometheus](#configuring-prometheus)
1. Edit `/etc/gitlab/gitlab.rb`
1. Find and uncomment the following line, making sure it's set to `true`:
```ruby
node_exporter['enable'] = true
```
1. Save the file and [reconfigure GitLab][reconfigure] for the changes to
take effect
Prometheus it will now automatically begin collecting performance data from
the node exporter. You can visit `<your_domain_name>:9100/metrics` for a real
time representation of the metrics that are collected. Refresh the page and
you will see the data change.
[grafana]: https://grafana.net
[node exporter]: https://github.com/prometheus/node_exporter
[prometheus]: https://prometheus.io
[prom-query]: https://prometheus.io/docs/querying/basics
[prom-grafana]: https://prometheus.io/docs/visualization/grafana/
[scrape-config]: https://prometheus.io/docs/operating/configuration/#%3Cscrape_config%3E
[prom-exporters]: https://prometheus.io/docs/instrumenting/exporters/
[reconfigure]: ../../restart_gitlab.md#omnibus-gitlab-reconfigure
...@@ -106,6 +106,13 @@ that needs access to the GitLab API. ...@@ -106,6 +106,13 @@ that needs access to the GitLab API.
Once you have your token, pass it to the API using either the `private_token` Once you have your token, pass it to the API using either the `private_token`
parameter or the `PRIVATE-TOKEN` header. parameter or the `PRIVATE-TOKEN` header.
> [Introduced][ce-5951] in GitLab 8.15.
Personal Access Tokens can be created with one or more scopes that allow various actions
that a given token can perform. Although there are only two scopes available at the
moment – `read_user` and `api` – the groundwork has been laid to add more scopes easily.
At any time you can revoke any personal access token by just clicking **Revoke**.
### Session Cookie ### Session Cookie
...@@ -382,3 +389,4 @@ programming languages. Visit the [GitLab website] for a complete list. ...@@ -382,3 +389,4 @@ programming languages. Visit the [GitLab website] for a complete list.
[GitLab website]: https://about.gitlab.com/applications/#api-clients "Clients using the GitLab API" [GitLab website]: https://about.gitlab.com/applications/#api-clients "Clients using the GitLab API"
[lib-api-url]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/lib/api/api.rb [lib-api-url]: https://gitlab.com/gitlab-org/gitlab-ce/tree/master/lib/api/api.rb
[ce-3749]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/3749 [ce-3749]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/3749
[ce-5951]: https://gitlab.com/gitlab-org/gitlab-ce/merge_requests/5951
...@@ -74,8 +74,10 @@ in the **Authorized applications** section under **Profile Settings > Applicatio ...@@ -74,8 +74,10 @@ in the **Authorized applications** section under **Profile Settings > Applicatio
--- ---
As you can see, the default scope `api` is used, which is the only scope that GitLab's OAuth applications support scopes, which allow various actions that any given
GitLab supports so far. At any time you can revoke any access by just clicking application can perform. Although there are only two scopes available at the
**Revoke**. moment – `read_user` and `api` – the groundwork has been laid to add more scopes easily.
At any time you can revoke any access by just clicking **Revoke**.
[oauth]: http://oauth.net/2/ "OAuth website" [oauth]: http://oauth.net/2/ "OAuth website"
module Gitlab module Gitlab
module ImportExport module ImportExport
class MembersMapper class MembersMapper
attr_reader :missing_author_ids
def initialize(exported_members:, user:, project:) def initialize(exported_members:, user:, project:)
@exported_members = exported_members @exported_members = user.admin? ? exported_members : []
@user = user @user = user
@project = project @project = project
@missing_author_ids = []
# This needs to run first, as second call would be from #map # This needs to run first, as second call would be from #map
# which means project members already exist. # which means project members already exist.
...@@ -39,7 +36,6 @@ module Gitlab ...@@ -39,7 +36,6 @@ module Gitlab
def missing_keys_tracking_hash def missing_keys_tracking_hash
Hash.new do |_, key| Hash.new do |_, key|
@missing_author_ids << key
default_user_id default_user_id
end end
end end
...@@ -64,7 +60,7 @@ module Gitlab ...@@ -64,7 +60,7 @@ module Gitlab
end end
def find_project_user_query(member) def find_project_user_query(member)
user_arel[:username].eq(member['user']['username']).or(user_arel[:email].eq(member['user']['email'])) user_arel[:email].eq(member['user']['email']).or(user_arel[:username].eq(member['user']['username']))
end end
def user_arel def user_arel
......
...@@ -14,7 +14,7 @@ module Gitlab ...@@ -14,7 +14,7 @@ module Gitlab
priorities: :label_priorities, priorities: :label_priorities,
label: :project_label }.freeze label: :project_label }.freeze
USER_REFERENCES = %w[author_id assignee_id updated_by_id user_id created_by_id merge_user_id].freeze USER_REFERENCES = %w[author_id assignee_id updated_by_id user_id created_by_id merge_user_id resolved_by_id].freeze
PROJECT_REFERENCES = %w[project_id source_project_id gl_project_id target_project_id].freeze PROJECT_REFERENCES = %w[project_id source_project_id gl_project_id target_project_id].freeze
...@@ -80,17 +80,13 @@ module Gitlab ...@@ -80,17 +80,13 @@ module Gitlab
# is left. # is left.
def set_note_author def set_note_author
old_author_id = @relation_hash['author_id'] old_author_id = @relation_hash['author_id']
# Users with admin access can map users
@relation_hash['author_id'] = admin_user? ? @members_mapper.map[old_author_id] : @members_mapper.default_user_id
author = @relation_hash.delete('author') author = @relation_hash.delete('author')
update_note_for_missing_author(author['name']) if missing_author?(old_author_id) update_note_for_missing_author(author['name']) unless has_author?(old_author_id)
end end
def missing_author?(old_author_id) def has_author?(old_author_id)
!admin_user? || @members_mapper.missing_author_ids.include?(old_author_id) admin_user? && @members_mapper.map.keys.include?(old_author_id)
end end
def missing_author_note(updated_at, author_name) def missing_author_note(updated_at, author_name)
......
...@@ -43,6 +43,14 @@ describe 'Dropdown assignee', js: true, feature: true do ...@@ -43,6 +43,14 @@ describe 'Dropdown assignee', js: true, feature: true do
expect(page).to have_css(js_dropdown_assignee, visible: true) expect(page).to have_css(js_dropdown_assignee, visible: true)
end end
it 'shows assigned to me link' do
filtered_search.set('assignee:')
page.within js_dropdown_assignee do
expect(page).to have_content('Assigned to me')
end
end
it 'closes when the search bar is unfocused' do it 'closes when the search bar is unfocused' do
find('body').click() find('body').click()
...@@ -121,6 +129,14 @@ describe 'Dropdown assignee', js: true, feature: true do ...@@ -121,6 +129,14 @@ describe 'Dropdown assignee', js: true, feature: true do
filtered_search.set('assignee:') filtered_search.set('assignee:')
end end
it 'filters by current user' do
page.within js_dropdown_assignee do
click_button 'Assigned to me'
end
expect(filtered_search.value).to eq("assignee:#{user.to_reference}")
end
it 'fills in the assignee username when the assignee has not been filtered' do it 'fills in the assignee username when the assignee has not been filtered' do
click_assignee(user_jacob.name) click_assignee(user_jacob.name)
......
require 'spec_helper'
feature 'Merge immediately', :feature, :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project,
author: user,
title: 'Bug NS-04')
end
let(:pipeline) do
create(:ci_pipeline, project: project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch)
end
before { project.team << [user, :master] }
context 'when there is active pipeline for merge request' do
background do
create(:ci_build, pipeline: pipeline)
end
before do
login_as user
visit namespace_project_merge_request_path(merge_request.project.namespace, merge_request.project, merge_request)
end
it 'enables merge immediately' do
page.within '.mr-widget-body' do
find('.dropdown-toggle').click
click_link 'Merge Immediately'
expect(find('.js-merge-button')).to have_content('Merge in progress')
end
end
end
end
...@@ -32,21 +32,63 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do ...@@ -32,21 +32,63 @@ feature 'Merge When Pipeline Succeeds', :feature, :js do
expect(page).to have_button "Merge When Pipeline Succeeds" expect(page).to have_button "Merge When Pipeline Succeeds"
end end
context "Merge When Pipeline Succeeds enabled", js: true do describe 'enabling Merge When Pipeline Succeeds' do
before do shared_examples 'Merge When Pipeline Succeeds activator' do
it 'activates the Merge When Pipeline Succeeds feature' do
click_button "Merge When Pipeline Succeeds" click_button "Merge When Pipeline Succeeds"
end
it 'activates Merge When Pipeline Succeeds feature' do
expect(page).to have_link "Cancel Automatic Merge"
expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds." expect(page).to have_content "Set by #{user.name} to be merged automatically when the pipeline succeeds."
expect(page).to have_content "The source branch will not be removed." expect(page).to have_content "The source branch will not be removed."
expect(page).to have_link "Cancel Automatic Merge"
visit_merge_request(merge_request) # Needed to refresh the page visit_merge_request(merge_request) # Needed to refresh the page
expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i expect(page).to have_content /enabled an automatic merge when the pipeline for \h{8} succeeds/i
end end
end end
context "when enabled immediately" do
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
context 'when enabled after pipeline status changed' do
before do
pipeline.run!
# We depend on merge request widget being reloaded
# so we have to wait for asynchronous call to reload it
# and have_content expectation handles that.
#
expect(page).to have_content "Pipeline ##{pipeline.id} running"
end
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
context 'when enabled after it was previously canceled' do
before do
click_button "Merge When Pipeline Succeeds"
click_link "Cancel Automatic Merge"
end
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
context 'when it was enabled and then canceled' do
let(:merge_request) do
create(:merge_request_with_diffs,
:merge_when_build_succeeds,
source_project: project,
title: 'Bug NS-04',
author: user,
merge_user: user)
end
before do
click_link "Cancel Automatic Merge"
end
it_behaves_like 'Merge When Pipeline Succeeds activator'
end
end
end end
context 'when merge when pipeline succeeds is enabled' do context 'when merge when pipeline succeeds is enabled' do
......
...@@ -2,7 +2,7 @@ require 'spec_helper' ...@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::ImportExport::MembersMapper, services: true do describe Gitlab::ImportExport::MembersMapper, services: true do
describe 'map members' do describe 'map members' do
let(:user) { create(:user, authorized_projects_populated: true) } let(:user) { create(:admin, authorized_projects_populated: true) }
let(:project) { create(:project, :public, name: 'searchable_project') } let(:project) { create(:project, :public, name: 'searchable_project') }
let(:user2) { create(:user, authorized_projects_populated: true) } let(:user2) { create(:user, authorized_projects_populated: true) }
let(:exported_user_id) { 99 } let(:exported_user_id) { 99 }
...@@ -24,7 +24,7 @@ describe Gitlab::ImportExport::MembersMapper, services: true do ...@@ -24,7 +24,7 @@ describe Gitlab::ImportExport::MembersMapper, services: true do
{ {
"id" => exported_user_id, "id" => exported_user_id,
"email" => user2.email, "email" => user2.email,
"username" => user2.username "username" => 'test'
} }
}, },
{ {
...@@ -48,6 +48,10 @@ describe Gitlab::ImportExport::MembersMapper, services: true do ...@@ -48,6 +48,10 @@ describe Gitlab::ImportExport::MembersMapper, services: true do
exported_members: exported_members, user: user, project: project) exported_members: exported_members, user: user, project: project)
end end
it 'includes the exported user ID in the map' do
expect(members_mapper.map.keys).to include(exported_user_id)
end
it 'maps a project member' do it 'maps a project member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id) expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end end
...@@ -56,12 +60,6 @@ describe Gitlab::ImportExport::MembersMapper, services: true do ...@@ -56,12 +60,6 @@ describe Gitlab::ImportExport::MembersMapper, services: true do
expect(members_mapper.map[-1]).to eq(user.id) expect(members_mapper.map[-1]).to eq(user.id)
end end
it 'updates missing author IDs on missing project member' do
members_mapper.map[-1]
expect(members_mapper.missing_author_ids.first).to eq(-1)
end
it 'has invited members with no user' do it 'has invited members with no user' do
members_mapper.map members_mapper.map
...@@ -74,5 +72,25 @@ describe Gitlab::ImportExport::MembersMapper, services: true do ...@@ -74,5 +72,25 @@ describe Gitlab::ImportExport::MembersMapper, services: true do
expect(user.authorized_project?(project)).to be true expect(user.authorized_project?(project)).to be true
expect(user2.authorized_project?(project)).to be true expect(user2.authorized_project?(project)).to be true
end end
context 'user is not an admin' do
let(:user) { create(:user, authorized_projects_populated: true) }
it 'does not map a project member' do
expect(members_mapper.map[exported_user_id]).to eq(user.id)
end
it 'defaults to importer project member if it does not exist' do
expect(members_mapper.map[-1]).to eq(user.id)
end
end
context 'chooses the one with an email first' do
let(:user3) { create(:user, username: 'test') }
it 'maps the project member that has a matching email first' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end
end
end end
end end
...@@ -3,7 +3,7 @@ require 'spec_helper' ...@@ -3,7 +3,7 @@ require 'spec_helper'
describe Gitlab::ImportExport::RelationFactory, lib: true do describe Gitlab::ImportExport::RelationFactory, lib: true do
let(:project) { create(:empty_project) } let(:project) { create(:empty_project) }
let(:members_mapper) { double('members_mapper').as_null_object } let(:members_mapper) { double('members_mapper').as_null_object }
let(:user) { create(:user) } let(:user) { create(:admin) }
let(:created_object) do let(:created_object) do
described_class.create(relation_sym: relation_sym, described_class.create(relation_sym: relation_sym,
relation_hash: relation_hash, relation_hash: relation_hash,
...@@ -122,4 +122,60 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do ...@@ -122,4 +122,60 @@ describe Gitlab::ImportExport::RelationFactory, lib: true do
expect(created_object.values).not_to include(99) expect(created_object.values).not_to include(99)
end end
end end
context 'Notes user references' do
let(:relation_sym) { :notes }
let(:new_user) { create(:user) }
let(:exported_member) do
{
"id" => 111,
"access_level" => 30,
"source_id" => 1,
"source_type" => "Project",
"user_id" => 3,
"notification_level" => 3,
"created_at" => "2016-11-18T09:29:42.634Z",
"updated_at" => "2016-11-18T09:29:42.634Z",
"user" => {
"id" => 999,
"email" => new_user.email,
"username" => new_user.username
}
}
end
let(:relation_hash) do
{
"id" => 4947,
"note" => "merged",
"noteable_type" => "MergeRequest",
"author_id" => 999,
"created_at" => "2016-11-18T09:29:42.634Z",
"updated_at" => "2016-11-18T09:29:42.634Z",
"project_id" => 1,
"attachment" => {
"url" => nil
},
"noteable_id" => 377,
"system" => true,
"author" => {
"name" => "Administrator"
},
"events" => [
]
}
end
let(:members_mapper) do
Gitlab::ImportExport::MembersMapper.new(
exported_members: [exported_member],
user: user,
project: project)
end
it 'maps the right author to the imported note' do
expect(created_object.author).to eq(new_user)
end
end
end end
...@@ -30,13 +30,32 @@ describe Key, models: true do ...@@ -30,13 +30,32 @@ describe Key, models: true do
end end
describe "#update_last_used_at" do describe "#update_last_used_at" do
it "enqueues a UseKeyWorker job" do let(:key) { create(:key) }
key = create(:key)
context 'when key was not updated during the last day' do
before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).
and_return('000000')
end
it 'enqueues a UseKeyWorker job' do
expect(UseKeyWorker).to receive(:perform_async).with(key.id) expect(UseKeyWorker).to receive(:perform_async).with(key.id)
key.update_last_used_at key.update_last_used_at
end end
end end
context 'when key was updated during the last day' do
before do
allow_any_instance_of(Gitlab::ExclusiveLease).to receive(:try_obtain).
and_return(false)
end
it 'does not enqueue a UseKeyWorker job' do
expect(UseKeyWorker).not_to receive(:perform_async)
key.update_last_used_at
end
end
end
end end
context "validation of uniqueness (based on fingerprint uniqueness)" do context "validation of uniqueness (based on fingerprint uniqueness)" do
......
...@@ -1963,7 +1963,7 @@ describe MergeRequest, models: true do ...@@ -1963,7 +1963,7 @@ describe MergeRequest, models: true do
status: status) status: status)
end end
let(:project) { create(:project, :public, only_allow_merge_if_build_succeeds: true) } let(:project) { create(:project, :public, :repository, only_allow_merge_if_build_succeeds: true) }
let(:developer) { create(:user) } let(:developer) { create(:user) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:merge_request) { create(:merge_request, source_project: project) } let(:merge_request) { create(:merge_request, source_project: project) }
......
require 'spec_helper' require 'spec_helper'
describe 'cycle analytics events' do describe 'cycle analytics events' do
include ApiHelpers
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project, public_builds: false) } let(:project) { create(:project, public_builds: false) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) } let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
...@@ -20,19 +22,19 @@ describe 'cycle analytics events' do ...@@ -20,19 +22,19 @@ describe 'cycle analytics events' do
it 'lists the issue events' do it 'lists the issue events' do
get namespace_project_cycle_analytics_issue_path(project.namespace, project, format: :json) get namespace_project_cycle_analytics_issue_path(project.namespace, project, format: :json)
expect(json_response['events']).not_to be_empty first_issue_iid = project.issues.sort(:created_desc).pluck(:iid).first.to_s
first_issue_iid = Issue.order(created_at: :desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid) expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end end
it 'lists the plan events' do it 'lists the plan events' do
get namespace_project_cycle_analytics_plan_path(project.namespace, project, format: :json) get namespace_project_cycle_analytics_plan_path(project.namespace, project, format: :json)
expect(json_response['events']).not_to be_empty first_mr_short_sha = project.merge_requests.sort(:created_asc).first.commits.first.short_id
expect(json_response['events'].first['short_sha']).to eq(MergeRequest.last.commits.first.short_id) expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['short_sha']).to eq(first_mr_short_sha)
end end
it 'lists the code events' do it 'lists the code events' do
...@@ -40,7 +42,7 @@ describe 'cycle analytics events' do ...@@ -40,7 +42,7 @@ describe 'cycle analytics events' do
expect(json_response['events']).not_to be_empty expect(json_response['events']).not_to be_empty
first_mr_iid = project.merge_requests.order(id: :desc).pluck(:iid).first.to_s first_mr_iid = project.merge_requests.sort(:created_desc).pluck(:iid).first.to_s
expect(json_response['events'].first['iid']).to eq(first_mr_iid) expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end end
...@@ -49,17 +51,15 @@ describe 'cycle analytics events' do ...@@ -49,17 +51,15 @@ describe 'cycle analytics events' do
get namespace_project_cycle_analytics_test_path(project.namespace, project, format: :json) get namespace_project_cycle_analytics_test_path(project.namespace, project, format: :json)
expect(json_response['events']).not_to be_empty expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty expect(json_response['events'].first['date']).not_to be_empty
end end
it 'lists the review events' do it 'lists the review events' do
get namespace_project_cycle_analytics_review_path(project.namespace, project, format: :json) get namespace_project_cycle_analytics_review_path(project.namespace, project, format: :json)
expect(json_response['events']).not_to be_empty first_mr_iid = project.merge_requests.sort(:created_desc).pluck(:iid).first.to_s
first_mr_iid = MergeRequest.order(created_at: :desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_mr_iid) expect(json_response['events'].first['iid']).to eq(first_mr_iid)
end end
...@@ -67,35 +67,32 @@ describe 'cycle analytics events' do ...@@ -67,35 +67,32 @@ describe 'cycle analytics events' do
get namespace_project_cycle_analytics_staging_path(project.namespace, project, format: :json) get namespace_project_cycle_analytics_staging_path(project.namespace, project, format: :json)
expect(json_response['events']).not_to be_empty expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty expect(json_response['events'].first['date']).not_to be_empty
end end
it 'lists the production events' do it 'lists the production events' do
get namespace_project_cycle_analytics_production_path(project.namespace, project, format: :json) get namespace_project_cycle_analytics_production_path(project.namespace, project, format: :json)
expect(json_response['events']).not_to be_empty first_issue_iid = project.issues.sort(:created_desc).pluck(:iid).first.to_s
first_issue_iid = Issue.order(created_at: :desc).pluck(:iid).first.to_s
expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['iid']).to eq(first_issue_iid) expect(json_response['events'].first['iid']).to eq(first_issue_iid)
end end
context 'specific branch' do context 'specific branch' do
it 'lists the test events' do it 'lists the test events' do
branch = MergeRequest.first.source_branch branch = project.merge_requests.first.source_branch
get namespace_project_cycle_analytics_test_path(project.namespace, project, format: :json, branch: branch) get namespace_project_cycle_analytics_test_path(project.namespace, project, format: :json, branch: branch)
expect(json_response['events']).not_to be_empty expect(json_response['events']).not_to be_empty
expect(json_response['events'].first['date']).not_to be_empty expect(json_response['events'].first['date']).not_to be_empty
end end
end end
context 'with private project and builds' do context 'with private project and builds' do
before do before do
ProjectMember.first.update(access_level: Gitlab::Access::GUEST) project.members.first.update(access_level: Gitlab::Access::GUEST)
end end
it 'does not list the test events' do it 'does not list the test events' do
...@@ -118,10 +115,6 @@ describe 'cycle analytics events' do ...@@ -118,10 +115,6 @@ describe 'cycle analytics events' do
end end
end end
def json_response
JSON.parse(response.body)
end
def create_cycle def create_cycle
milestone = create(:milestone, project: project) milestone = create(:milestone, project: project)
issue.update(milestone: milestone) issue.update(milestone: milestone)
......
...@@ -134,5 +134,17 @@ describe PipelineEntity do ...@@ -134,5 +134,17 @@ describe PipelineEntity do
expect(subject).not_to have_key(:yaml_errors) expect(subject).not_to have_key(:yaml_errors)
end end
end end
context 'when pipeline ref is empty' do
let(:pipeline) { create(:ci_empty_pipeline) }
before do
allow(pipeline).to receive(:ref).and_return(nil)
end
it 'does not generate branch path' do
expect(subject[:ref][:path]).to be_nil
end
end
end end
end end
require 'spec_helper'
describe Ci::UpdateBuildQueueService, :services do
let(:project) { create(:project) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:pipeline) { create(:ci_pipeline, project: project) }
context 'when updating specific runners' do
let(:runner) { create(:ci_runner) }
context 'when there are runner that can pick build' do
before { build.project.runners << runner }
it 'ticks runner queue value' do
expect { subject.execute(build) }
.to change { runner.ensure_runner_queue_value }
end
end
context 'when there are no runners that can pick build' do
it 'does not tick runner queue value' do
expect { subject.execute(build) }
.not_to change { runner.ensure_runner_queue_value }
end
end
end
context 'when updating shared runners' do
let(:runner) { create(:ci_runner, :shared) }
context 'when there are runner that can pick build' do
it 'ticks runner queue value' do
expect { subject.execute(build) }
.to change { runner.ensure_runner_queue_value }
end
end
context 'when there are no runners that can pick build' do
before { build.tag_list = [:docker] }
it 'does not tick runner queue value' do
expect { subject.execute(build) }
.not_to change { runner.ensure_runner_queue_value }
end
end
end
end
# Explaination on the scripts:
# https://gitlab.com/gitlab-examples/kubernetes-deploy/blob/master/README.md
image: registry.gitlab.com/gitlab-examples/kubernetes-deploy
variables:
# Application deployment domain
KUBE_DOMAIN: domain.example.com
stages:
- build
- test
- review
- staging
- production
build:
stage: build
script:
- command build
only:
- branches
production:
stage: production
variables:
CI_ENVIRONMENT_URL: http://production.$KUBE_DOMAIN
script:
- command deploy
environment:
name: production
url: http://production.$KUBE_DOMAIN
when: manual
only:
- master
staging:
stage: staging
variables:
CI_ENVIRONMENT_URL: http://staging.$KUBE_DOMAIN
script:
- command deploy
environment:
name: staging
url: http://staging.$KUBE_DOMAIN
only:
- master
review:
stage: review
variables:
CI_ENVIRONMENT_URL: http://$CI_ENVIRONMENT_SLUG.$KUBE_DOMAIN
script:
- command deploy
environment:
name: review/$CI_BUILD_REF_NAME
url: http://$CI_ENVIRONMENT_SLUG.$KUBE_DOMAIN
on_stop: stop_review
only:
- branches
except:
- master
stop_review:
stage: review
variables:
GIT_STRATEGY: none
script:
- command destroy
environment:
name: review/$CI_BUILD_REF_NAME
action: stop
when: manual
only:
- branches
except:
- master
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment