Commit f019a07d authored by Valery Sizov's avatar Valery Sizov

Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ee into ce-to-ee-2017-12-12[ci skip]

parents 9d4e5e30 38b06690
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
/* global Pager */ /* global Pager */
import Cookies from 'js-cookie'; import Cookies from 'js-cookie';
import { localTimeAgo } from './lib/utils/datetime_utility';
class Activities { class Activities {
constructor() { constructor() {
...@@ -15,7 +16,7 @@ class Activities { ...@@ -15,7 +16,7 @@ class Activities {
} }
updateTooltips() { updateTooltips() {
gl.utils.localTimeAgo($('.js-timeago', '.content_list')); localTimeAgo($('.js-timeago', '.content_list'));
} }
reloadActivities() { reloadActivities() {
......
...@@ -4,6 +4,7 @@ ...@@ -4,6 +4,7 @@
/* global Pager */ /* global Pager */
import { pluralize } from './lib/utils/text_utility'; import { pluralize } from './lib/utils/text_utility';
import { localTimeAgo } from './lib/utils/datetime_utility';
export default (function () { export default (function () {
const CommitsList = {}; const CommitsList = {};
...@@ -91,7 +92,7 @@ export default (function () { ...@@ -91,7 +92,7 @@ export default (function () {
$commitsHeadersLast.find('span.commits-count').text(`${commitsCount} ${pluralize('commit', commitsCount)}`); $commitsHeadersLast.find('span.commits-count').text(`${commitsCount} ${pluralize('commit', commitsCount)}`);
} }
gl.utils.localTimeAgo($processedData.find('.js-timeago')); localTimeAgo($processedData.find('.js-timeago'));
return processedData; return processedData;
}; };
......
/* eslint-disable func-names, space-before-function-paren, wrap-iife, quotes, no-var, object-shorthand, consistent-return, no-unused-vars, comma-dangle, vars-on-top, prefer-template, max-len */ /* eslint-disable func-names, space-before-function-paren, wrap-iife, quotes, no-var, object-shorthand, consistent-return, no-unused-vars, comma-dangle, vars-on-top, prefer-template, max-len */
import { localTimeAgo } from './lib/utils/datetime_utility';
export default class Compare { export default class Compare {
constructor(opts) { constructor(opts) {
...@@ -81,7 +82,7 @@ export default class Compare { ...@@ -81,7 +82,7 @@ export default class Compare {
loading.hide(); loading.hide();
$target.html(html); $target.html(html);
var className = '.' + $target[0].className.replace(' ', '.'); var className = '.' + $target[0].className.replace(' ', '.');
gl.utils.localTimeAgo($('.js-timeago', className)); localTimeAgo($('.js-timeago', className));
} }
}); });
} }
......
<script> <script>
import actionBtn from './action_btn.vue'; import actionBtn from './action_btn.vue';
import { getTimeago } from '../../lib/utils/datetime_utility';
export default { export default {
props: { props: {
...@@ -21,7 +22,7 @@ ...@@ -21,7 +22,7 @@
}, },
computed: { computed: {
timeagoDate() { timeagoDate() {
return gl.utils.getTimeago().format(this.deployKey.created_at); return getTimeago().format(this.deployKey.created_at);
}, },
editDeployKeyPath() { editDeployKeyPath() {
return `${this.endpoint}/${this.deployKey.id}/edit`; return `${this.endpoint}/${this.deployKey.id}/edit`;
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
/* global NoteModel */ /* global NoteModel */
import Vue from 'vue'; import Vue from 'vue';
import { localTimeAgo } from '../../lib/utils/datetime_utility';
class DiscussionModel { class DiscussionModel {
constructor (discussionId) { constructor (discussionId) {
...@@ -71,7 +72,7 @@ class DiscussionModel { ...@@ -71,7 +72,7 @@ class DiscussionModel {
$(`${discussionSelector} .discussion-header`).append(data.discussion_headline_html); $(`${discussionSelector} .discussion-header`).append(data.discussion_headline_html);
} }
gl.utils.localTimeAgo($('.js-timeago', `${discussionSelector}`)); localTimeAgo($('.js-timeago', `${discussionSelector}`));
} else { } else {
$discussionHeadline.remove(); $discussionHeadline.remove();
} }
......
...@@ -3,6 +3,7 @@ import { visitUrl } from './lib/utils/url_utility'; ...@@ -3,6 +3,7 @@ import { visitUrl } from './lib/utils/url_utility';
import bp from './breakpoints'; import bp from './breakpoints';
import { bytesToKiB } from './lib/utils/number_utils'; import { bytesToKiB } from './lib/utils/number_utils';
import { setCiStatusFavicon } from './lib/utils/common_utils'; import { setCiStatusFavicon } from './lib/utils/common_utils';
import { timeFor } from './lib/utils/datetime_utility';
export default class Job { export default class Job {
constructor(options) { constructor(options) {
...@@ -261,7 +262,7 @@ export default class Job { ...@@ -261,7 +262,7 @@ export default class Job {
if ($date.length) { if ($date.length) {
const date = $date.text(); const date = $date.text();
return $date.text( return $date.text(
gl.utils.timeFor(new Date(date.replace(/([0-9]+)-([0-9]+)-([0-9]+)/g, '$1/$2/$3')), ' '), timeFor(new Date(date.replace(/([0-9]+)-([0-9]+)-([0-9]+)/g, '$1/$2/$3'))),
); );
} }
} }
......
...@@ -2,6 +2,8 @@ import axios from 'axios'; ...@@ -2,6 +2,8 @@ import axios from 'axios';
import csrf from './csrf'; import csrf from './csrf';
axios.defaults.headers.common[csrf.headerKey] = csrf.token; axios.defaults.headers.common[csrf.headerKey] = csrf.token;
// Used by Rails to check if it is a valid XHR request
axios.defaults.headers.common['X-Requested-With'] = 'XMLHttpRequest';
// Maintain a global counter for active requests // Maintain a global counter for active requests
// see: spec/support/wait_for_requests.rb // see: spec/support/wait_for_requests.rb
......
class Cache { export default class Cache {
constructor() { constructor() {
this.internalStorage = { }; this.internalStorage = { };
} }
...@@ -15,5 +15,3 @@ class Cache { ...@@ -15,5 +15,3 @@ class Cache {
delete this.internalStorage[key]; delete this.internalStorage[key];
} }
} }
export default Cache;
/* eslint-disable import/prefer-default-export */
export const BYTES_IN_KIB = 1024; export const BYTES_IN_KIB = 1024;
export const HIDDEN_CLASS = 'hidden'; export const HIDDEN_CLASS = 'hidden';
/* eslint-disable func-names, space-before-function-paren, wrap-iife, no-var, no-param-reassign, no-cond-assign, comma-dangle, no-unused-expressions, prefer-template, max-len */
import timeago from 'timeago.js'; import timeago from 'timeago.js';
import dateFormat from 'vendor/date.format'; import dateFormat from 'vendor/date.format';
import { pluralize } from './text_utility'; import { pluralize } from './text_utility';
import { import {
lang, lang,
s__, s__,
...@@ -12,27 +9,65 @@ import { ...@@ -12,27 +9,65 @@ import {
window.timeago = timeago; window.timeago = timeago;
window.dateFormat = dateFormat; window.dateFormat = dateFormat;
(function() { /**
(function(w) { * Given a date object returns the day of the week in English
var base; * @param {date} date
var timeagoInstance; * @returns {String}
*/
export const getDayName = date => ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][date.getDay()];
if (w.gl == null) { /**
w.gl = {}; * @example
} * dateFormat('2017-12-05','mmm d, yyyy h:MMtt Z' ) -> "Dec 5, 2017 12:00am GMT+0000"
if ((base = w.gl).utils == null) { * @param {date} datetime
base.utils = {}; * @returns {String}
} */
w.gl.utils.days = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']; export const formatDate = datetime => dateFormat(datetime, 'mmm d, yyyy h:MMtt Z');
w.gl.utils.formatDate = function(datetime) { let timeagoInstance;
return dateFormat(datetime, 'mmm d, yyyy h:MMtt Z'); /**
* Sets a timeago Instance
*/
export function getTimeago() {
if (!timeagoInstance) {
const localeRemaining = function getLocaleRemaining(number, index) {
return [
[s__('Timeago|less than a minute ago'), s__('Timeago|in a while')],
[s__('Timeago|less than a minute ago'), s__('Timeago|%s seconds remaining')],
[s__('Timeago|about a minute ago'), s__('Timeago|1 minute remaining')],
[s__('Timeago|%s minutes ago'), s__('Timeago|%s minutes remaining')],
[s__('Timeago|about an hour ago'), s__('Timeago|1 hour remaining')],
[s__('Timeago|about %s hours ago'), s__('Timeago|%s hours remaining')],
[s__('Timeago|a day ago'), s__('Timeago|1 day remaining')],
[s__('Timeago|%s days ago'), s__('Timeago|%s days remaining')],
[s__('Timeago|a week ago'), s__('Timeago|1 week remaining')],
[s__('Timeago|%s weeks ago'), s__('Timeago|%s weeks remaining')],
[s__('Timeago|a month ago'), s__('Timeago|1 month remaining')],
[s__('Timeago|%s months ago'), s__('Timeago|%s months remaining')],
[s__('Timeago|a year ago'), s__('Timeago|1 year remaining')],
[s__('Timeago|%s years ago'), s__('Timeago|%s years remaining')],
][index];
}; };
const locale = function getLocale(number, index) {
w.gl.utils.getDayName = function(date) { return [
return this.days[date.getDay()]; [s__('Timeago|less than a minute ago'), s__('Timeago|in a while')],
[s__('Timeago|less than a minute ago'), s__('Timeago|in %s seconds')],
[s__('Timeago|about a minute ago'), s__('Timeago|in 1 minute')],
[s__('Timeago|%s minutes ago'), s__('Timeago|in %s minutes')],
[s__('Timeago|about an hour ago'), s__('Timeago|in 1 hour')],
[s__('Timeago|about %s hours ago'), s__('Timeago|in %s hours')],
[s__('Timeago|a day ago'), s__('Timeago|in 1 day')],
[s__('Timeago|%s days ago'), s__('Timeago|in %s days')],
[s__('Timeago|a week ago'), s__('Timeago|in 1 week')],
[s__('Timeago|%s weeks ago'), s__('Timeago|in %s weeks')],
[s__('Timeago|a month ago'), s__('Timeago|in 1 month')],
[s__('Timeago|%s months ago'), s__('Timeago|in %s months')],
[s__('Timeago|a year ago'), s__('Timeago|in 1 year')],
[s__('Timeago|%s years ago'), s__('Timeago|in %s years')],
][index];
}; };
<<<<<<< HEAD
w.gl.utils.localTimeAgo = function($timeagoEls, setTimeago = true) { w.gl.utils.localTimeAgo = function($timeagoEls, setTimeago = true) {
$timeagoEls.each((i, el) => { $timeagoEls.each((i, el) => {
if (setTimeago) { if (setTimeago) {
...@@ -41,92 +76,69 @@ window.dateFormat = dateFormat; ...@@ -41,92 +76,69 @@ window.dateFormat = dateFormat;
template: '<div class="tooltip local-timeago" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>' template: '<div class="tooltip local-timeago" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>'
}); });
} }
=======
timeago.register(lang, locale);
timeago.register(`${lang}-remaining`, localeRemaining);
timeagoInstance = timeago();
}
el.classList.add('js-timeago-render'); return timeagoInstance;
}); }
>>>>>>> 38b066905132eff84ec50caa8b785d3118b462c2
gl.utils.renderTimeago($timeagoEls); /**
}; * For the given element, renders a timeago instance.
* @param {jQuery} $els
*/
export const renderTimeago = ($els) => {
const timeagoEls = $els || document.querySelectorAll('.js-timeago-render');
w.gl.utils.getTimeago = function() { // timeago.js sets timeouts internally for each timeago value to be updated in real time
var locale; getTimeago().render(timeagoEls, lang);
};
if (!timeagoInstance) {
const localeRemaining = function(number, index) {
return [
[s__('Timeago|less than a minute ago'), s__('Timeago|in a while')],
[s__('Timeago|less than a minute ago'), s__('Timeago|%s seconds remaining')],
[s__('Timeago|about a minute ago'), s__('Timeago|1 minute remaining')],
[s__('Timeago|%s minutes ago'), s__('Timeago|%s minutes remaining')],
[s__('Timeago|about an hour ago'), s__('Timeago|1 hour remaining')],
[s__('Timeago|about %s hours ago'), s__('Timeago|%s hours remaining')],
[s__('Timeago|a day ago'), s__('Timeago|1 day remaining')],
[s__('Timeago|%s days ago'), s__('Timeago|%s days remaining')],
[s__('Timeago|a week ago'), s__('Timeago|1 week remaining')],
[s__('Timeago|%s weeks ago'), s__('Timeago|%s weeks remaining')],
[s__('Timeago|a month ago'), s__('Timeago|1 month remaining')],
[s__('Timeago|%s months ago'), s__('Timeago|%s months remaining')],
[s__('Timeago|a year ago'), s__('Timeago|1 year remaining')],
[s__('Timeago|%s years ago'), s__('Timeago|%s years remaining')]
][index];
};
locale = function(number, index) {
return [
[s__('Timeago|less than a minute ago'), s__('Timeago|in a while')],
[s__('Timeago|less than a minute ago'), s__('Timeago|in %s seconds')],
[s__('Timeago|about a minute ago'), s__('Timeago|in 1 minute')],
[s__('Timeago|%s minutes ago'), s__('Timeago|in %s minutes')],
[s__('Timeago|about an hour ago'), s__('Timeago|in 1 hour')],
[s__('Timeago|about %s hours ago'), s__('Timeago|in %s hours')],
[s__('Timeago|a day ago'), s__('Timeago|in 1 day')],
[s__('Timeago|%s days ago'), s__('Timeago|in %s days')],
[s__('Timeago|a week ago'), s__('Timeago|in 1 week')],
[s__('Timeago|%s weeks ago'), s__('Timeago|in %s weeks')],
[s__('Timeago|a month ago'), s__('Timeago|in 1 month')],
[s__('Timeago|%s months ago'), s__('Timeago|in %s months')],
[s__('Timeago|a year ago'), s__('Timeago|in 1 year')],
[s__('Timeago|%s years ago'), s__('Timeago|in %s years')]
][index];
};
timeago.register(lang, locale);
timeago.register(`${lang}-remaining`, localeRemaining);
timeagoInstance = timeago();
}
return timeagoInstance;
};
w.gl.utils.timeFor = function(time, suffix, expiredLabel) { /**
var timefor; * For the given elements, sets a tooltip with a formatted date.
if (!time) { * @param {jQuery}
return ''; * @param {Boolean} setTimeago
} */
if (new Date(time) < new Date()) { export const localTimeAgo = ($timeagoEls, setTimeago = true) => {
expiredLabel || (expiredLabel = s__('Timeago|Past due')); $timeagoEls.each((i, el) => {
timefor = expiredLabel; if (setTimeago) {
} else { // Recreate with custom template
timefor = gl.utils.getTimeago().format(time, `${lang}-remaining`).trim(); $(el).tooltip({
} template: '<div class="tooltip local-timeago" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',
return timefor; });
}; }
w.gl.utils.renderTimeago = function($els) { el.classList.add('js-timeago-render');
const timeagoEls = $els || document.querySelectorAll('.js-timeago-render'); });
// timeago.js sets timeouts internally for each timeago value to be updated in real time renderTimeago($timeagoEls);
gl.utils.getTimeago().render(timeagoEls, lang); };
};
w.gl.utils.getDayDifference = function(a, b) { /**
var millisecondsPerDay = 1000 * 60 * 60 * 24; * Returns remaining or passed time over the given time.
var date1 = Date.UTC(a.getFullYear(), a.getMonth(), a.getDate()); * @param {*} time
var date2 = Date.UTC(b.getFullYear(), b.getMonth(), b.getDate()); * @param {*} expiredLabel
*/
export const timeFor = (time, expiredLabel) => {
if (!time) {
return '';
}
if (new Date(time) < new Date()) {
return expiredLabel || s__('Timeago|Past due');
}
return getTimeago().format(time, `${lang}-remaining`).trim();
};
return Math.floor((date2 - date1) / millisecondsPerDay); export const getDayDifference = (a, b) => {
}; const millisecondsPerDay = 1000 * 60 * 60 * 24;
})(window); const date1 = Date.UTC(a.getFullYear(), a.getMonth(), a.getDate());
}).call(window); const date2 = Date.UTC(b.getFullYear(), b.getMonth(), b.getDate());
return Math.floor((date2 - date1) / millisecondsPerDay);
};
/** /**
* Port of ruby helper time_interval_in_words. * Port of ruby helper time_interval_in_words.
...@@ -161,3 +173,10 @@ export function dateInWords(date, abbreviated = false) { ...@@ -161,3 +173,10 @@ export function dateInWords(date, abbreviated = false) {
return `${monthName} ${date.getDate()}, ${year}`; return `${monthName} ${date.getDate()}, ${year}`;
} }
window.gl = window.gl || {};
window.gl.utils = {
...(window.gl.utils || {}),
getTimeago,
localTimeAgo,
};
...@@ -28,7 +28,7 @@ import './commit/image_file'; ...@@ -28,7 +28,7 @@ import './commit/image_file';
// lib/utils // lib/utils
import { handleLocationHash } from './lib/utils/common_utils'; import { handleLocationHash } from './lib/utils/common_utils';
import './lib/utils/datetime_utility'; import { localTimeAgo, renderTimeago } from './lib/utils/datetime_utility';
import { getLocationHash, visitUrl } from './lib/utils/url_utility'; import { getLocationHash, visitUrl } from './lib/utils/url_utility';
// behaviors // behaviors
...@@ -195,7 +195,7 @@ $(function () { ...@@ -195,7 +195,7 @@ $(function () {
return $(this).parents('form').submit(); return $(this).parents('form').submit();
// Form submitter // Form submitter
}); });
gl.utils.localTimeAgo($('abbr.timeago, .js-timeago'), true); localTimeAgo($('abbr.timeago, .js-timeago'), true);
// Disable form buttons while a form is submitting // Disable form buttons while a form is submitting
$body.on('ajax:complete, ajax:beforeSend, submit', 'form', function (e) { $body.on('ajax:complete, ajax:beforeSend, submit', 'form', function (e) {
var buttons; var buttons;
...@@ -287,7 +287,7 @@ $(function () { ...@@ -287,7 +287,7 @@ $(function () {
loadAwardsHandler(); loadAwardsHandler();
new Aside(); new Aside();
gl.utils.renderTimeago(); renderTimeago();
$(document).trigger('init.scrolling-tabs'); $(document).trigger('init.scrolling-tabs');
......
...@@ -14,6 +14,7 @@ import { ...@@ -14,6 +14,7 @@ import {
import { getLocationHash } from './lib/utils/url_utility'; import { getLocationHash } from './lib/utils/url_utility';
import initDiscussionTab from './image_diff/init_discussion_tab'; import initDiscussionTab from './image_diff/init_discussion_tab';
import Diff from './diff'; import Diff from './diff';
import { localTimeAgo } from './lib/utils/datetime_utility';
import syntaxHighlight from './syntax_highlight'; import syntaxHighlight from './syntax_highlight';
/* eslint-disable max-len */ /* eslint-disable max-len */
...@@ -248,7 +249,7 @@ import syntaxHighlight from './syntax_highlight'; ...@@ -248,7 +249,7 @@ import syntaxHighlight from './syntax_highlight';
url: `${source}.json`, url: `${source}.json`,
success: (data) => { success: (data) => {
document.querySelector('div#commits').innerHTML = data.html; document.querySelector('div#commits').innerHTML = data.html;
gl.utils.localTimeAgo($('.js-timeago', 'div#commits')); localTimeAgo($('.js-timeago', 'div#commits'));
this.commitsLoaded = true; this.commitsLoaded = true;
this.scrollToElement('#commits'); this.scrollToElement('#commits');
}, },
...@@ -295,7 +296,7 @@ import syntaxHighlight from './syntax_highlight'; ...@@ -295,7 +296,7 @@ import syntaxHighlight from './syntax_highlight';
gl.diffNotesCompileComponents(); gl.diffNotesCompileComponents();
} }
gl.utils.localTimeAgo($('.js-timeago', 'div#diffs')); localTimeAgo($('.js-timeago', 'div#diffs'));
syntaxHighlight($('#diffs .js-syntax-highlight')); syntaxHighlight($('#diffs .js-syntax-highlight'));
if (this.diffViewType() === 'parallel' && this.isDiffAction(this.currentAction)) { if (this.diffViewType() === 'parallel' && this.isDiffAction(this.currentAction)) {
......
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
/* global Issuable */ /* global Issuable */
/* global ListMilestone */ /* global ListMilestone */
import _ from 'underscore'; import _ from 'underscore';
import { timeFor } from './lib/utils/datetime_utility';
(function() { (function() {
this.MilestoneSelect = (function() { this.MilestoneSelect = (function() {
...@@ -239,7 +240,7 @@ import _ from 'underscore'; ...@@ -239,7 +240,7 @@ import _ from 'underscore';
$value.css('display', ''); $value.css('display', '');
if (data.milestone != null) { if (data.milestone != null) {
data.milestone.full_path = _this.currentProject.full_path; data.milestone.full_path = _this.currentProject.full_path;
data.milestone.remaining = gl.utils.timeFor(data.milestone.due_date); data.milestone.remaining = timeFor(data.milestone.due_date);
data.milestone.name = data.milestone.title; data.milestone.name = data.milestone.title;
$value.html(milestoneLinkTemplate(data.milestone)); $value.html(milestoneLinkTemplate(data.milestone));
return $sidebarCollapsedValue.find('span').html(collapsedSidebarLabelTemplate(data.milestone)); return $sidebarCollapsedValue.find('span').html(collapsedSidebarLabelTemplate(data.milestone));
......
...@@ -25,6 +25,7 @@ import Autosave from './autosave'; ...@@ -25,6 +25,7 @@ import Autosave from './autosave';
import TaskList from './task_list'; import TaskList from './task_list';
import { ajaxPost, isInViewport, getPagePath, scrollToElement, isMetaKey } from './lib/utils/common_utils'; import { ajaxPost, isInViewport, getPagePath, scrollToElement, isMetaKey } from './lib/utils/common_utils';
import imageDiffHelper from './image_diff/helpers/index'; import imageDiffHelper from './image_diff/helpers/index';
import { localTimeAgo } from './lib/utils/datetime_utility';
window.autosize = Autosize; window.autosize = Autosize;
...@@ -311,7 +312,7 @@ export default class Notes { ...@@ -311,7 +312,7 @@ export default class Notes {
setupNewNote($note) { setupNewNote($note) {
// Update datetime format on the recent note // Update datetime format on the recent note
gl.utils.localTimeAgo($note.find('.js-timeago'), false); localTimeAgo($note.find('.js-timeago'), false);
this.collapseLongCommitList(); this.collapseLongCommitList();
this.taskList.init(); this.taskList.init();
...@@ -463,7 +464,7 @@ export default class Notes { ...@@ -463,7 +464,7 @@ export default class Notes {
this.renderDiscussionAvatar(diffAvatarContainer, noteEntity); this.renderDiscussionAvatar(diffAvatarContainer, noteEntity);
} }
gl.utils.localTimeAgo($('.js-timeago'), false); localTimeAgo($('.js-timeago'), false);
Notes.checkMergeRequestStatus(); Notes.checkMergeRequestStatus();
return this.updateNotesCount(1); return this.updateNotesCount(1);
} }
......
import _ from 'underscore'; import _ from 'underscore';
import d3 from 'd3'; import d3 from 'd3';
import { getDayName, getDayDifference } from '../lib/utils/datetime_utility';
const LOADING_HTML = ` const LOADING_HTML = `
<div class="text-center"> <div class="text-center">
...@@ -17,7 +18,7 @@ function getSystemDate(systemUtcOffsetSeconds) { ...@@ -17,7 +18,7 @@ function getSystemDate(systemUtcOffsetSeconds) {
function formatTooltipText({ date, count }) { function formatTooltipText({ date, count }) {
const dateObject = new Date(date); const dateObject = new Date(date);
const dateDayName = gl.utils.getDayName(dateObject); const dateDayName = getDayName(dateObject);
const dateText = dateObject.format('mmm d, yyyy'); const dateText = dateObject.format('mmm d, yyyy');
let contribText = 'No contributions'; let contribText = 'No contributions';
...@@ -51,7 +52,7 @@ export default class ActivityCalendar { ...@@ -51,7 +52,7 @@ export default class ActivityCalendar {
const oneYearAgo = new Date(today); const oneYearAgo = new Date(today);
oneYearAgo.setFullYear(today.getFullYear() - 1); oneYearAgo.setFullYear(today.getFullYear() - 1);
const days = gl.utils.getDayDifference(oneYearAgo, today); const days = getDayDifference(oneYearAgo, today);
for (let i = 0; i <= days; i += 1) { for (let i = 0; i <= days; i += 1) {
const date = new Date(oneYearAgo); const date = new Date(oneYearAgo);
......
import ActivityCalendar from './activity_calendar'; import ActivityCalendar from './activity_calendar';
import { localTimeAgo } from '../lib/utils/datetime_utility';
/** /**
* UserTabs * UserTabs
...@@ -138,7 +139,7 @@ export default class UserTabs { ...@@ -138,7 +139,7 @@ export default class UserTabs {
const tabSelector = `div#${action}`; const tabSelector = `div#${action}`;
this.$parentEl.find(tabSelector).html(data.html); this.$parentEl.find(tabSelector).html(data.html);
this.loaded[action] = true; this.loaded[action] = true;
gl.utils.localTimeAgo($('.js-timeago', tabSelector)); localTimeAgo($('.js-timeago', tabSelector));
}, },
}); });
} }
......
import '~/lib/utils/datetime_utility'; import { getTimeago } from '~/lib/utils/datetime_utility';
import { visitUrl } from '../../lib/utils/url_utility'; import { visitUrl } from '../../lib/utils/url_utility';
import Flash from '../../flash'; import Flash from '../../flash';
import MemoryUsage from './mr_widget_memory_usage'; import MemoryUsage from './mr_widget_memory_usage';
...@@ -17,7 +17,7 @@ export default { ...@@ -17,7 +17,7 @@ export default {
}, },
methods: { methods: {
formatDate(date) { formatDate(date) {
return gl.utils.getTimeago().format(date); return getTimeago().format(date);
}, },
hasExternalUrls(deployment = {}) { hasExternalUrls(deployment = {}) {
return deployment.external_url && deployment.external_url_formatted; return deployment.external_url && deployment.external_url_formatted;
......
import Timeago from 'timeago.js'; import Timeago from 'timeago.js';
import { getStateKey } from '../dependencies'; import { getStateKey } from '../dependencies';
import { formatDate } from '../../lib/utils/datetime_utility';
export default class MergeRequestStore { export default class MergeRequestStore {
constructor(data) { constructor(data) {
...@@ -124,7 +125,7 @@ export default class MergeRequestStore { ...@@ -124,7 +125,7 @@ export default class MergeRequestStore {
static getEventObject(event) { static getEventObject(event) {
return { return {
author: MergeRequestStore.getAuthorObject(event), author: MergeRequestStore.getAuthorObject(event),
updatedAt: gl.utils.formatDate(MergeRequestStore.getEventUpdatedAtDate(event)), updatedAt: formatDate(MergeRequestStore.getEventUpdatedAtDate(event)),
formattedUpdatedAt: MergeRequestStore.getEventDate(event), formattedUpdatedAt: MergeRequestStore.getEventDate(event),
}; };
} }
......
import { getTimeago } from '../../lib/utils/datetime_utility';
export default { export default {
name: 'MemoryGraph', name: 'MemoryGraph',
props: { props: {
...@@ -16,7 +18,7 @@ export default { ...@@ -16,7 +18,7 @@ export default {
}, },
computed: { computed: {
getFormattedMedian() { getFormattedMedian() {
const deployedSince = gl.utils.getTimeago().format(this.deploymentTime * 1000); const deployedSince = getTimeago().format(this.deploymentTime * 1000);
return `Deployed ${deployedSince}`; return `Deployed ${deployedSince}`;
}, },
}, },
......
import '../../lib/utils/datetime_utility'; import { formatDate, getTimeago } from '../../lib/utils/datetime_utility';
/** /**
* Mixin with time ago methods used in some vue components * Mixin with time ago methods used in some vue components
...@@ -6,13 +6,13 @@ import '../../lib/utils/datetime_utility'; ...@@ -6,13 +6,13 @@ import '../../lib/utils/datetime_utility';
export default { export default {
methods: { methods: {
timeFormated(time) { timeFormated(time) {
const timeago = gl.utils.getTimeago(); const timeago = getTimeago();
return timeago.format(time); return timeago.format(time);
}, },
tooltipTitle(time) { tooltipTitle(time) {
return gl.utils.formatDate(time); return formatDate(time);
}, },
}, },
}; };
...@@ -42,8 +42,8 @@ ...@@ -42,8 +42,8 @@
.commiter .commiter
- commit_author_link = commit_author_link(commit, avatar: false, size: 24) - commit_author_link = commit_author_link(commit, avatar: false, size: 24)
- commit_timeago = time_ago_with_tooltip(commit.committed_date, placement: 'bottom') - commit_timeago = time_ago_with_tooltip(commit.authored_date, placement: 'bottom')
- commit_text = _('%{commit_author_link} committed %{commit_timeago}') % { commit_author_link: commit_author_link, commit_timeago: commit_timeago } - commit_text = _('%{commit_author_link} authored %{commit_timeago}') % { commit_author_link: commit_author_link, commit_timeago: commit_timeago }
#{ commit_text.html_safe } #{ commit_text.html_safe }
- if show_project_name - if show_project_name
%span.project_namespace %span.project_namespace
......
---
- cronjob:admin_email
- cronjob:expire_build_artifacts
- cronjob:gitlab_usage_ping
- cronjob:import_export_project_cleanup
- cronjob:pipeline_schedule
- cronjob:prune_old_events
- cronjob:remove_expired_group_links
- cronjob:remove_expired_members
- cronjob:remove_old_web_hook_logs
- cronjob:remove_unreferenced_lfs_objects
- cronjob:repository_archive_cache
- cronjob:repository_check_batch
- cronjob:requests_profiles
- cronjob:schedule_update_user_activity
- cronjob:stuck_ci_jobs
- cronjob:stuck_import_jobs
- cronjob:stuck_merge_jobs
- cronjob:trending_projects
- gcp_cluster:cluster_install_app
- gcp_cluster:cluster_provision
- gcp_cluster:cluster_wait_for_app_installation
- gcp_cluster:wait_for_cluster_creation
- github_import_advance_stage
- github_importer:github_import_import_diff_note
- github_importer:github_import_import_issue
- github_importer:github_import_import_note
- github_importer:github_import_import_pull_request
- github_importer:github_import_refresh_import_jid
- github_importer:github_import_stage_finish_import
- github_importer:github_import_stage_import_base_data
- github_importer:github_import_stage_import_issues_and_diff_notes
- github_importer:github_import_stage_import_notes
- github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository
- pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline
- pipeline_default:build_coverage
- pipeline_default:build_trace_sections
- pipeline_default:pipeline_metrics
- pipeline_default:pipeline_notification
- pipeline_default:update_head_pipeline_for_merge_request
- pipeline_hooks:build_hooks
- pipeline_hooks:pipeline_hooks
- pipeline_processing:build_finished
- pipeline_processing:build_queue
- pipeline_processing:build_success
- pipeline_processing:pipeline_process
- pipeline_processing:pipeline_success
- pipeline_processing:pipeline_update
- pipeline_processing:stage_update
- repository_check:repository_check_clear
- repository_check:repository_check_single_repository
- default
- mailers # ActionMailer::DeliveryJob.queue_name
- authorized_projects
- background_migration
- create_gpg_signature
- delete_merged_branches
- delete_user
- email_receiver
- emails_on_push
- expire_build_instance_artifacts
- git_garbage_collect
- gitlab_shell
- group_destroy
- invalid_gpg_signature_update
- irker
- merge
- namespaceless_project_destroy
- new_issue
- new_merge_request
- new_note
- pages
- post_receive
- process_commit
- project_cache
- project_destroy
- project_export
- project_migrate_hashed_storage
- project_service
- propagate_service_template
- reactive_caching
- repository_fork
- repository_import
- storage_migrator
- system_hook_push
- update_merge_requests
- update_user_activity
- upload_checksum
- web_hook
# EE-specific queues
- cronjob:clear_shared_runners_minutes
- cronjob:geo_file_download_dispatch
- cronjob:geo_metrics_update
- cronjob:geo_prune_event_log
- cronjob:geo_repository_sync
- cronjob:historical_data
- cronjob:ldap_all_groups_sync
- cronjob:ldap_sync
- cronjob:update_all_mirrors
- geo:geo_file_removal
- geo:geo_hashed_storage_attachments_migration
- geo:geo_hashed_storage_migration
- geo:geo_rename_repository
- geo:geo_repositories_clean_up
- geo:geo_repository_destroy
- admin_emails
- elastic_batch_project_indexer
- elastic_commit_indexer
- elastic_indexer
- export_csv
- geo_base_scheduler
- geo_file_download
- geo_project_sync
- geo_repository_shard_sync
- ldap_group_sync
- object_storage_upload
- project_update_repository_storage
- rebase
- repository_update_mirror
- repository_update_remote_mirror
...@@ -2,7 +2,7 @@ class BuildFinishedWorker ...@@ -2,7 +2,7 @@ class BuildFinishedWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing queue_namespace :pipeline_processing
def perform(build_id) def perform(build_id)
Ci::Build.find_by(id: build_id).try do |build| Ci::Build.find_by(id: build_id).try do |build|
......
...@@ -2,7 +2,7 @@ class BuildHooksWorker ...@@ -2,7 +2,7 @@ class BuildHooksWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :hooks queue_namespace :pipeline_hooks
def perform(build_id) def perform(build_id)
Ci::Build.find_by(id: build_id) Ci::Build.find_by(id: build_id)
......
...@@ -2,7 +2,7 @@ class BuildQueueWorker ...@@ -2,7 +2,7 @@ class BuildQueueWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing queue_namespace :pipeline_processing
def perform(build_id) def perform(build_id)
Ci::Build.find_by(id: build_id).try do |build| Ci::Build.find_by(id: build_id).try do |build|
......
...@@ -2,7 +2,7 @@ class BuildSuccessWorker ...@@ -2,7 +2,7 @@ class BuildSuccessWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing queue_namespace :pipeline_processing
def perform(build_id) def perform(build_id)
Ci::Build.find_by(id: build_id).try do |build| Ci::Build.find_by(id: build_id).try do |build|
......
...@@ -3,13 +3,23 @@ Sidekiq::Worker.extend ActiveSupport::Concern ...@@ -3,13 +3,23 @@ Sidekiq::Worker.extend ActiveSupport::Concern
module ApplicationWorker module ApplicationWorker
extend ActiveSupport::Concern extend ActiveSupport::Concern
include Sidekiq::Worker include Sidekiq::Worker # rubocop:disable Cop/IncludeSidekiqWorker
included do included do
sidekiq_options queue: base_queue_name set_queue
end end
module ClassMethods module ClassMethods
def inherited(subclass)
subclass.set_queue
end
def set_queue
queue_name = [queue_namespace, base_queue_name].compact.join(':')
sidekiq_options queue: queue_name # rubocop:disable Cop/SidekiqOptionsQueue
end
def base_queue_name def base_queue_name
name name
.sub(/\AGitlab::/, '') .sub(/\AGitlab::/, '')
...@@ -18,6 +28,16 @@ module ApplicationWorker ...@@ -18,6 +28,16 @@ module ApplicationWorker
.tr('/', '_') .tr('/', '_')
end end
def queue_namespace(new_namespace = nil)
if new_namespace
sidekiq_options queue_namespace: new_namespace
set_queue
else
get_sidekiq_options['queue_namespace']&.to_s
end
end
def queue def queue
get_sidekiq_options['queue'].to_s get_sidekiq_options['queue'].to_s
end end
......
...@@ -5,6 +5,6 @@ module ClusterQueue ...@@ -5,6 +5,6 @@ module ClusterQueue
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
sidekiq_options queue: :gcp_cluster queue_namespace :gcp_cluster
end end
end end
...@@ -4,6 +4,7 @@ module CronjobQueue ...@@ -4,6 +4,7 @@ module CronjobQueue
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
sidekiq_options queue: :cronjob, retry: false queue_namespace :cronjob
sidekiq_options retry: false
end end
end end
...@@ -3,6 +3,6 @@ module GeoQueue ...@@ -3,6 +3,6 @@ module GeoQueue
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
sidekiq_options queue: :geo queue_namespace :geo
end end
end end
...@@ -4,12 +4,14 @@ module Gitlab ...@@ -4,12 +4,14 @@ module Gitlab
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
queue_namespace :github_importer
# If a job produces an error it may block a stage from advancing # If a job produces an error it may block a stage from advancing
# forever. To prevent this from happening we prevent jobs from going to # forever. To prevent this from happening we prevent jobs from going to
# the dead queue. This does mean some resources may not be imported, but # the dead queue. This does mean some resources may not be imported, but
# this is better than a project being stuck in the "import" state # this is better than a project being stuck in the "import" state
# forever. # forever.
sidekiq_options queue: 'github_importer', dead: false, retry: 5 sidekiq_options dead: false, retry: 5
end end
end end
end end
......
...@@ -5,14 +5,6 @@ module PipelineQueue ...@@ -5,14 +5,6 @@ module PipelineQueue
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
sidekiq_options queue: 'pipeline_default' queue_namespace :pipeline_default
end
class_methods do
def enqueue_in(group:)
raise ArgumentError, 'Unspecified queue group!' if group.empty?
sidekiq_options queue: "pipeline_#{group}"
end
end end
end end
...@@ -3,6 +3,8 @@ module RepositoryCheckQueue ...@@ -3,6 +3,8 @@ module RepositoryCheckQueue
extend ActiveSupport::Concern extend ActiveSupport::Concern
included do included do
sidekiq_options queue: :repository_check, retry: false queue_namespace :repository_check
sidekiq_options retry: false
end end
end end
...@@ -2,7 +2,7 @@ class CreatePipelineWorker ...@@ -2,7 +2,7 @@ class CreatePipelineWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :creation queue_namespace :pipeline_creation
def perform(project_id, user_id, ref, source, params = {}) def perform(project_id, user_id, ref, source, params = {})
project = Project.find(project_id) project = Project.find(project_id)
......
...@@ -2,7 +2,7 @@ class ExpireJobCacheWorker ...@@ -2,7 +2,7 @@ class ExpireJobCacheWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :cache queue_namespace :pipeline_cache
def perform(job_id) def perform(job_id)
job = CommitStatus.joins(:pipeline, :project).find_by(id: job_id) job = CommitStatus.joins(:pipeline, :project).find_by(id: job_id)
......
...@@ -2,7 +2,7 @@ class ExpirePipelineCacheWorker ...@@ -2,7 +2,7 @@ class ExpirePipelineCacheWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :cache queue_namespace :pipeline_cache
def perform(pipeline_id) def perform(pipeline_id)
pipeline = Ci::Pipeline.find_by(id: pipeline_id) pipeline = Ci::Pipeline.find_by(id: pipeline_id)
......
module Geo module Geo
class BaseSchedulerWorker class BaseSchedulerWorker
include ApplicationWorker include ApplicationWorker
include CronjobQueue
include ExclusiveLeaseGuard include ExclusiveLeaseGuard
DB_RETRIEVE_BATCH_SIZE = 1000 DB_RETRIEVE_BATCH_SIZE = 1000
......
module Geo module Geo
class FileDownloadDispatchWorker < Geo::BaseSchedulerWorker class FileDownloadDispatchWorker < Geo::BaseSchedulerWorker
include CronjobQueue
private private
def max_capacity def max_capacity
......
module Geo module Geo
class RepositoryShardSyncWorker < Geo::BaseSchedulerWorker class RepositoryShardSyncWorker < Geo::BaseSchedulerWorker
# We may have many long-running threads, so split them out sidekiq_options retry: false
# into their own queue to make it possible for other jobs to run.
sidekiq_options queue: :geo_repository_shard_sync, retry: false
attr_accessor :shard_name attr_accessor :shard_name
......
...@@ -9,7 +9,7 @@ module Gitlab ...@@ -9,7 +9,7 @@ module Gitlab
class AdvanceStageWorker class AdvanceStageWorker
include ApplicationWorker include ApplicationWorker
sidekiq_options queue: 'github_importer_advance_stage', dead: false sidekiq_options dead: false
INTERVAL = 30.seconds.to_i INTERVAL = 30.seconds.to_i
......
class PagesWorker class PagesWorker
include ApplicationWorker include ApplicationWorker
sidekiq_options queue: :pages, retry: false sidekiq_options retry: false
def perform(action, *arg) def perform(action, *arg)
send(action, *arg) # rubocop:disable GitlabSecurity/PublicSend send(action, *arg) # rubocop:disable GitlabSecurity/PublicSend
......
...@@ -2,7 +2,7 @@ class PipelineHooksWorker ...@@ -2,7 +2,7 @@ class PipelineHooksWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :hooks queue_namespace :pipeline_hooks
def perform(pipeline_id) def perform(pipeline_id)
Ci::Pipeline.find_by(id: pipeline_id) Ci::Pipeline.find_by(id: pipeline_id)
......
...@@ -2,7 +2,7 @@ class PipelineProcessWorker ...@@ -2,7 +2,7 @@ class PipelineProcessWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing queue_namespace :pipeline_processing
def perform(pipeline_id) def perform(pipeline_id)
Ci::Pipeline.find_by(id: pipeline_id) Ci::Pipeline.find_by(id: pipeline_id)
......
...@@ -2,7 +2,7 @@ class PipelineSuccessWorker ...@@ -2,7 +2,7 @@ class PipelineSuccessWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing queue_namespace :pipeline_processing
def perform(pipeline_id) def perform(pipeline_id)
Ci::Pipeline.find_by(id: pipeline_id).try do |pipeline| Ci::Pipeline.find_by(id: pipeline_id).try do |pipeline|
......
...@@ -2,7 +2,7 @@ class PipelineUpdateWorker ...@@ -2,7 +2,7 @@ class PipelineUpdateWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing queue_namespace :pipeline_processing
def perform(pipeline_id) def perform(pipeline_id)
Ci::Pipeline.find_by(id: pipeline_id) Ci::Pipeline.find_by(id: pipeline_id)
......
...@@ -2,7 +2,7 @@ class StageUpdateWorker ...@@ -2,7 +2,7 @@ class StageUpdateWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue include PipelineQueue
enqueue_in group: :processing queue_namespace :pipeline_processing
def perform(stage_id) def perform(stage_id)
Ci::Stage.find_by(id: stage_id).try do |stage| Ci::Stage.find_by(id: stage_id).try do |stage|
......
class UpdateHeadPipelineForMergeRequestWorker class UpdateHeadPipelineForMergeRequestWorker
include ApplicationWorker include ApplicationWorker
include PipelineQueue
sidekiq_options queue: 'pipeline_default'
def perform(merge_request_id) def perform(merge_request_id)
merge_request = MergeRequest.find(merge_request_id) merge_request = MergeRequest.find(merge_request_id)
......
---
title: Fix Merge Rquest widget rebase action in Internet Explorer
merge_request: 3732
author:
type: fixed
---
title: Prevent adding same role multiple times on repeated clicks
merge_request: 3700
author:
type: fixed
---
title: Record EE Ultimate usage pings correctly
merge_request:
author:
type: fixed
---
title: Fix validation of environment scope for Ci::Variable
merge_request: 3641
author:
type: fixed
---
title: Show authored date rather than committed date on the commit list
merge_request:
author:
type: fixed
...@@ -46,6 +46,8 @@ Sidekiq.configure_server do |config| ...@@ -46,6 +46,8 @@ Sidekiq.configure_server do |config|
Gitlab::SidekiqThrottler.execute! Gitlab::SidekiqThrottler.execute!
Gitlab::SidekiqVersioning.install!
config = Gitlab::Database.config || config = Gitlab::Database.config ||
Rails.application.config.database_configuration[Rails.env] Rails.application.config.database_configuration[Rails.env]
config['pool'] = Sidekiq.options[:concurrency] config['pool'] = Sidekiq.options[:concurrency]
...@@ -72,19 +74,3 @@ Sidekiq.configure_client do |config| ...@@ -72,19 +74,3 @@ Sidekiq.configure_client do |config|
chain.add Gitlab::SidekiqStatus::ClientMiddleware chain.add Gitlab::SidekiqStatus::ClientMiddleware
end end
end end
# The Sidekiq client API always adds the queue to the Sidekiq queue
# list, but mail_room and gitlab-shell do not. This is only necessary
# for monitoring.
begin
queues = Gitlab::SidekiqConfig.worker_queues
Sidekiq.redis do |conn|
conn.pipelined do
queues.each do |queue|
conn.sadd('queues', queue)
end
end
end
rescue Redis::BaseError, SocketError, Errno::ENOENT, Errno::EADDRNOTAVAIL, Errno::EAFNOSUPPORT, Errno::ECONNRESET, Errno::ECONNREFUSED
end
...@@ -25,8 +25,6 @@ ...@@ -25,8 +25,6 @@
- [new_note, 2] - [new_note, 2]
- [new_issue, 2] - [new_issue, 2]
- [new_merge_request, 2] - [new_merge_request, 2]
- [build, 2]
- [pipeline, 2]
- [pipeline_processing, 5] - [pipeline_processing, 5]
- [pipeline_creation, 4] - [pipeline_creation, 4]
- [pipeline_default, 3] - [pipeline_default, 3]
...@@ -38,11 +36,12 @@ ...@@ -38,11 +36,12 @@
- [mailers, 2] - [mailers, 2]
- [invalid_gpg_signature_update, 2] - [invalid_gpg_signature_update, 2]
- [create_gpg_signature, 2] - [create_gpg_signature, 2]
- [rebase, 2]
- [upload_checksum, 1] - [upload_checksum, 1]
- [repository_fork, 1] - [repository_fork, 1]
- [repository_import, 1] - [repository_import, 1]
- [github_importer, 1] - [github_importer, 1]
- [github_importer_advance_stage, 1] - [github_import_advance_stage, 1]
- [project_service, 1] - [project_service, 1]
- [delete_user, 1] - [delete_user, 1]
- [delete_merged_branches, 1] - [delete_merged_branches, 1]
...@@ -68,13 +67,15 @@ ...@@ -68,13 +67,15 @@
- [gcp_cluster, 1] - [gcp_cluster, 1]
- [project_migrate_hashed_storage, 1] - [project_migrate_hashed_storage, 1]
- [storage_migrator, 1] - [storage_migrator, 1]
# EE specific queues
# EE-specific queues
- [ldap_group_sync, 2] - [ldap_group_sync, 2]
- [geo, 1] - [geo, 1]
- [repository_update_mirror, 1] - [repository_update_mirror, 1]
- [repository_update_remote_mirror, 1] - [repository_update_remote_mirror, 1]
- [project_update_repository_storage, 1] - [project_update_repository_storage, 1]
- [admin_emails, 1] - [admin_emails, 1]
- [geo_base_scheduler, 1] # Parent class of geo_repository_shard_sync and cronjob:geo_file_download_dispatch
- [geo_project_sync, 1] - [geo_project_sync, 1]
- [geo_file_download, 1] - [geo_file_download, 1]
- [geo_repository_shard_sync, 1] - [geo_repository_shard_sync, 1]
......
# See http://doc.gitlab.com/ce/development/migration_style_guide.html
# for more information on how to write migrations for GitLab.
class MigrateGithubImporterAdvanceStageSidekiqQueue < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
sidekiq_queue_migrate 'github_importer_advance_stage', to: 'github_import_advance_stage'
end
def down
sidekiq_queue_migrate 'github_import_advance_stage', to: 'github_importer_advance_stage'
end
end
...@@ -11,7 +11,7 @@ ...@@ -11,7 +11,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20171206221519) do ActiveRecord::Schema.define(version: 20171213160445) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "plpgsql" enable_extension "plpgsql"
......
...@@ -18,6 +18,12 @@ Each separate argument denotes a group of queues that have to be processed by a ...@@ -18,6 +18,12 @@ Each separate argument denotes a group of queues that have to be processed by a
Sidekiq process. Multiple queues can be processed by the same process by Sidekiq process. Multiple queues can be processed by the same process by
separating them with a comma instead of a space. separating them with a comma instead of a space.
Instead of a queue, a queue namespace can also be provided, to have the process
automatically listen on all queues in that namespace without needing to
explicitly list all the queue names. For more information about queue namespaces,
see the relevant section in the
[Sidekiq style guide](../../development/sidekiq_style_guide.md#queue-namespaces).
For example, say you want to start 2 extra processes: one to process the For example, say you want to start 2 extra processes: one to process the
"process_commit" queue, and one to process the "post_receive" queue. This can be "process_commit" queue, and one to process the "post_receive" queue. This can be
done as follows: done as follows:
...@@ -90,10 +96,10 @@ default value is "development". ...@@ -90,10 +96,10 @@ default value is "development".
## All Queues With Exceptions ## All Queues With Exceptions
You're able to run all queues in `sidekiq_queues.yml` file on a single or You're able to run all queues in `sidekiq_queues.yml` file on a single or
multiple processes with exceptions using the `--negate` flag. multiple processes with exceptions using the `--negate` flag.
For example, say you want to run a single process for all queues, For example, say you want to run a single process for all queues,
except "process_commit" and "post_receive". You can do so by executing: except "process_commit" and "post_receive". You can do so by executing:
```bash ```bash
......
...@@ -9,25 +9,54 @@ All workers should include `ApplicationWorker` instead of `Sidekiq::Worker`, ...@@ -9,25 +9,54 @@ All workers should include `ApplicationWorker` instead of `Sidekiq::Worker`,
which adds some convenience methods and automatically sets the queue based on which adds some convenience methods and automatically sets the queue based on
the worker's name. the worker's name.
## Default Queue ## Dedicated Queues
Use of the "default" queue is not allowed. Every worker should use a queue that All workers should use their own queue, which is automatically set based on the
matches the worker's purpose the closest. For example, workers that are to be worker class name. For a worker named `ProcessSomethingWorker`, the queue name
executed periodically should use the "cronjob" queue. would be `process_something`. If you're not sure what queue a worker uses,
you can find it using `SomeWorker.queue`. There is almost never a reason to
manually override the queue name using `sidekiq_options queue: :some_queue`.
A list of all available queues can be found in `config/sidekiq_queues.yml`. ## Queue Namespaces
## Dedicated Queues While different workers cannot share a queue, they can share a queue namespace.
Most workers should use their own queue, which is automatically set based on the Defining a queue namespace for a worker makes it possible to start a Sidekiq
worker class name. For a worker named `ProcessSomethingWorker`, the queue name process that automatically handles jobs for all workers in that namespace,
would be `process_something`. If you're not sure what a worker's queue name is, without needing to explicitly list all their queue names. If, for example, all
you can find it using `SomeWorker.queue`. workers that are managed by sidekiq-cron use the `cronjob` queue namespace, we
can spin up a Sidekiq process specifically for these kinds of scheduled jobs.
If a new worker using the `cronjob` namespace is added later on, the Sidekiq
process will automatically pick up jobs for that worker too (after having been
restarted), without the need to change any configuration.
A queue namespace can be set using the `queue_namespace` DSL class method:
```ruby
class SomeScheduledTaskWorker
include ApplicationWorker
queue_namespace :cronjob
# ...
end
```
Behind the scenes, this will set `SomeScheduledTaskWorker.queue` to
`cronjob:some_scheduled_task`. Commonly used namespaces will have their own
concern module that can easily be included into the worker class, and that may
set other Sidekiq options besides the queue namespace. `CronjobQueue`, for
example, sets the namespace, but also disables retries.
`bundle exec sidekiq` is namespace-aware, and will automatically listen on all
queues in a namespace (technically: all queues prefixed with the namespace name)
when a namespace is provided instead of a simple queue name in the `--queue`
(`-q`) option, or in the `:queues:` section in `config/sidekiq_queues.yml`.
In some cases multiple workers do use the same queue. For example, the various Note that adding a worker to an existing namespace should be done with care, as
workers for updating CI pipelines all use the `pipeline` queue. Adding workers the extra jobs will take resources away from jobs from workers that were already
to existing queues should be done with care, as adding more workers can lead to there, if the resources available to the Sidekiq process handling the namespace
slow jobs blocking work (even for different jobs) on the shared queue. are not adjusted appropriately.
## Tests ## Tests
...@@ -36,7 +65,7 @@ tests should be placed in `spec/workers`. ...@@ -36,7 +65,7 @@ tests should be placed in `spec/workers`.
## Removing or renaming queues ## Removing or renaming queues
Try to avoid renaming or removing queues in minor and patch releases. Try to avoid renaming or removing workers and their queues in minor and patch releases.
During online update instance can have pending jobs and removing the queue can During online update instance can have pending jobs and removing the queue can
lead to those jobs being stuck forever. If you can't write migration for those lead to those jobs being stuck forever. If you can't write migration for those
Sidekiq jobs, please consider doing rename or remove queue in major release only. Sidekiq jobs, please consider doing rename or remove queue in major release only.
...@@ -145,8 +145,8 @@ export default class ProtectedBranchAccessDropdown { ...@@ -145,8 +145,8 @@ export default class ProtectedBranchAccessDropdown {
addSelectedItem(selectedItem) { addSelectedItem(selectedItem) {
let itemToAdd = {}; let itemToAdd = {};
// If the item already exists, just use it
let index = -1; let index = -1;
let alreadyAdded = false;
const selectedItems = this.getAllSelectedItems(); const selectedItems = this.getAllSelectedItems();
// Compare IDs based on selectedItem.type // Compare IDs based on selectedItem.type
...@@ -155,6 +155,10 @@ export default class ProtectedBranchAccessDropdown { ...@@ -155,6 +155,10 @@ export default class ProtectedBranchAccessDropdown {
switch (selectedItem.type) { switch (selectedItem.type) {
case LEVEL_TYPES.ROLE: case LEVEL_TYPES.ROLE:
comparator = LEVEL_ID_PROP.ROLE; comparator = LEVEL_ID_PROP.ROLE;
// If the item already exists, just use it
if (item[comparator] === selectedItem.id) {
alreadyAdded = true;
}
break; break;
case LEVEL_TYPES.GROUP: case LEVEL_TYPES.GROUP:
comparator = LEVEL_ID_PROP.GROUP; comparator = LEVEL_ID_PROP.GROUP;
...@@ -171,6 +175,10 @@ export default class ProtectedBranchAccessDropdown { ...@@ -171,6 +175,10 @@ export default class ProtectedBranchAccessDropdown {
} }
}); });
if (alreadyAdded) {
return;
}
if (index !== -1 && selectedItems[index]._destroy) { if (index !== -1 && selectedItems[index]._destroy) {
delete selectedItems[index]._destroy; delete selectedItems[index]._destroy;
return; return;
......
...@@ -140,8 +140,8 @@ export default class ProtectedTagAccessDropdown { ...@@ -140,8 +140,8 @@ export default class ProtectedTagAccessDropdown {
addSelectedItem(selectedItem) { addSelectedItem(selectedItem) {
let itemToAdd = {}; let itemToAdd = {};
// If the item already exists, just use it
let index = -1; let index = -1;
let alreadyAdded = false;
const selectedItems = this.getAllSelectedItems(); const selectedItems = this.getAllSelectedItems();
// Compare IDs based on selectedItem.type // Compare IDs based on selectedItem.type
...@@ -150,6 +150,10 @@ export default class ProtectedTagAccessDropdown { ...@@ -150,6 +150,10 @@ export default class ProtectedTagAccessDropdown {
switch (selectedItem.type) { switch (selectedItem.type) {
case LEVEL_TYPES.ROLE: case LEVEL_TYPES.ROLE:
comparator = LEVEL_ID_PROP.ROLE; comparator = LEVEL_ID_PROP.ROLE;
// If the item already exists, just use it
if (item[comparator] === selectedItem.id) {
alreadyAdded = true;
}
break; break;
case LEVEL_TYPES.GROUP: case LEVEL_TYPES.GROUP:
comparator = LEVEL_ID_PROP.GROUP; comparator = LEVEL_ID_PROP.GROUP;
...@@ -166,6 +170,10 @@ export default class ProtectedTagAccessDropdown { ...@@ -166,6 +170,10 @@ export default class ProtectedTagAccessDropdown {
} }
}); });
if (alreadyAdded) {
return;
}
if (index !== -1 && selectedItems[index]._destroy) { if (index !== -1 && selectedItems[index]._destroy) {
delete selectedItems[index]._destroy; delete selectedItems[index]._destroy;
return; return;
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
data() { data() {
return { return {
isMakingRequest: false, isMakingRequest: false,
rebasingError: '', rebasingError: null,
}; };
}, },
computed: { computed: {
...@@ -39,14 +39,11 @@ ...@@ -39,14 +39,11 @@
showDisabledButton() { showDisabledButton() {
return ['failed', 'loading'].includes(this.status); return ['failed', 'loading'].includes(this.status);
}, },
hasRebasingError() {
return this.rebasingError.length;
},
}, },
methods: { methods: {
rebase() { rebase() {
this.isMakingRequest = true; this.isMakingRequest = true;
this.rebasingError = ''; this.rebasingError = null;
this.service.rebase() this.service.rebase()
.then(() => { .then(() => {
...@@ -117,7 +114,7 @@ ...@@ -117,7 +114,7 @@
Rebase Rebase
</button> </button>
<span <span
v-if="!hasRebasingError" v-if="!rebasingError"
class="bold"> class="bold">
Fast-forward merge is not possible. Fast-forward merge is not possible.
Rebase the source branch onto the target branch or merge target Rebase the source branch onto the target branch or merge target
......
import Vue from 'vue'; import axios from '~/lib/utils/axios_utils';
import CEWidgetService from '~/vue_merge_request_widget/services/mr_widget_service'; import CEWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
...@@ -6,36 +6,31 @@ export default class MRWidgetService extends CEWidgetService { ...@@ -6,36 +6,31 @@ export default class MRWidgetService extends CEWidgetService {
constructor(mr) { constructor(mr) {
super(mr); super(mr);
// Set as a text/plain request so BE doesn't try to parse this.approvalsPath = mr.approvalsPath;
// See https://gitlab.com/gitlab-org/gitlab-ce/issues/34534 this.rebasePath = mr.rebasePath;
this.approvalsResource = Vue.resource(mr.approvalsPath, {}, {}, {
headers: {
'Content-Type': 'text/plain',
},
});
this.rebaseResource = Vue.resource(mr.rebasePath);
} }
fetchApprovals() { fetchApprovals() {
return this.approvalsResource.get() return axios.get(this.approvalsPath)
.then(res => res.json()); .then(res => res.data);
} }
approveMergeRequest() { approveMergeRequest() {
return this.approvalsResource.save() return axios.post(this.approvalsPath)
.then(res => res.json()); .then(res => res.data);
} }
unapproveMergeRequest() { unapproveMergeRequest() {
return this.approvalsResource.delete() return axios.delete(this.approvalsPath)
.then(res => res.json()); .then(res => res.data);
} }
rebase() { rebase() {
return this.rebaseResource.save(); return axios.post(this.rebasePath);
} }
fetchReport(endpoint) { // eslint-disable-line fetchReport(endpoint) { // eslint-disable-line
return Vue.http.get(endpoint).then(res => res.json()); return axios.get(endpoint)
.then(res => res.data);
} }
} }
class RebaseWorker class RebaseWorker
include ApplicationWorker include ApplicationWorker
sidekiq_options queue: :merge
def perform(merge_request_id, current_user_id) def perform(merge_request_id, current_user_id)
current_user = User.find(current_user_id) current_user = User.find(current_user_id)
merge_request = MergeRequest.find(merge_request_id) merge_request = MergeRequest.find(merge_request_id)
......
...@@ -38,7 +38,7 @@ module Gitlab ...@@ -38,7 +38,7 @@ module Gitlab
end end
def environment_name_regex_chars def environment_name_regex_chars
'a-zA-Z0-9_/\\$\\{\\}\\. -' 'a-zA-Z0-9_/\\$\\{\\}\\. \\-'
end end
def environment_name_regex def environment_name_regex
......
...@@ -33,8 +33,13 @@ module Gitlab ...@@ -33,8 +33,13 @@ module Gitlab
queue_groups = SidekiqCluster.parse_queues(argv) queue_groups = SidekiqCluster.parse_queues(argv)
all_queues = SidekiqConfig.worker_queues(@rails_path)
queue_groups.map! do |queues|
SidekiqConfig.expand_queues(queues, all_queues)
end
if @negate_queues if @negate_queues
all_queues = SidekiqConfig.config_queues(@rails_path)
queue_groups.map! { |queues| all_queues - queues } queue_groups.map! { |queues| all_queues - queues }
end end
......
require 'yaml' require 'yaml'
require 'set'
module Gitlab module Gitlab
module SidekiqConfig module SidekiqConfig
def self.redis_queues # This method is called by `bin/sidekiq-cluster` in EE, which runs outside
@redis_queues ||= Sidekiq::Queue.all.map(&:name) # of bundler/Rails context, so we cannot use any gem or Rails methods.
def self.worker_queues(rails_path = Rails.root.to_s)
@worker_queues ||= {}
@worker_queues[rails_path] ||= YAML.load_file(File.join(rails_path, 'app/workers/all_queues.yml'))
end end
# This method is called by `bin/sidekiq-cluster` in EE, which runs outside # This method is called by `bin/sidekiq-cluster` in EE, which runs outside
# of bundler/Rails context, so we cannot use any gem or Rails methods. # of bundler/Rails context, so we cannot use any gem or Rails methods.
def self.config_queues(rails_path = Rails.root.to_s) def self.expand_queues(queues, all_queues = self.worker_queues)
return [] if queues.empty?
queues_set = all_queues.to_set
queues.flat_map do |queue|
[queue, *queues_set.grep(/\A#{queue}:/)]
end
end
def self.redis_queues
# Not memoized, because this can change during the life of the application
Sidekiq::Queue.all.map(&:name)
end
def self.config_queues
@config_queues ||= begin @config_queues ||= begin
config = YAML.load_file(File.join(rails_path, 'config', 'sidekiq_queues.yml')) config = YAML.load_file(Rails.root.join('config/sidekiq_queues.yml'))
config[:queues].map(&:first) config[:queues].map(&:first)
end end
end end
...@@ -25,14 +44,6 @@ module Gitlab ...@@ -25,14 +44,6 @@ module Gitlab
find_workers(Rails.root.join('ee', 'app', 'workers')) find_workers(Rails.root.join('ee', 'app', 'workers'))
end end
def self.default_queues
[ActionMailer::DeliveryJob.queue_name, 'default']
end
def self.worker_queues
@worker_queues ||= (workers.map(&:queue) + default_queues).uniq
end
def self.find_workers(root) def self.find_workers(root)
concerns = root.join('concerns').to_s concerns = root.join('concerns').to_s
...@@ -45,7 +56,7 @@ module Gitlab ...@@ -45,7 +56,7 @@ module Gitlab
ns.camelize.constantize ns.camelize.constantize
end end
# Skip concerns # Skip things that aren't workers
workers.select { |w| w < Sidekiq::Worker } workers.select { |w| w < Sidekiq::Worker }
end end
end end
......
module Gitlab
module SidekiqVersioning
def self.install!
Sidekiq::Manager.prepend SidekiqVersioning::Manager
# The Sidekiq client API always adds the queue to the Sidekiq queue
# list, but mail_room and gitlab-shell do not. This is only necessary
# for monitoring.
begin
queues = SidekiqConfig.worker_queues
if queues.any?
Sidekiq.redis do |conn|
conn.pipelined do
queues.each do |queue|
conn.sadd('queues', queue)
end
end
end
end
rescue ::Redis::BaseError, SocketError, Errno::ENOENT, Errno::EADDRNOTAVAIL, Errno::EAFNOSUPPORT, Errno::ECONNRESET, Errno::ECONNREFUSED
end
end
end
end
module Gitlab
module SidekiqVersioning
module Manager
def initialize(options = {})
options[:strict] = false
options[:queues] = SidekiqConfig.expand_queues(options[:queues])
Sidekiq.logger.info "Listening on queues #{options[:queues].uniq.sort}"
super
end
end
end
end
...@@ -140,6 +140,8 @@ module Gitlab ...@@ -140,6 +140,8 @@ module Gitlab
def license_edition(plan) def license_edition(plan)
case plan case plan
when 'ultimate'
'EEU'
when 'premium' when 'premium'
'EEP' 'EEP'
when 'starter' when 'starter'
......
require_relative '../spec_helpers'
module RuboCop
module Cop
# Cop that makes sure workers include `ApplicationWorker`, not `Sidekiq::Worker`.
class IncludeSidekiqWorker < RuboCop::Cop::Cop
include SpecHelpers
MSG = 'Include `ApplicationWorker`, not `Sidekiq::Worker`.'.freeze
def_node_matcher :includes_sidekiq_worker?, <<~PATTERN
(send nil :include (const (const nil :Sidekiq) :Worker))
PATTERN
def on_send(node)
return if in_spec?(node)
return unless includes_sidekiq_worker?(node)
add_offense(node.arguments.first, :expression)
end
def autocorrect(node)
lambda do |corrector|
corrector.replace(node.source_range, 'ApplicationWorker')
end
end
end
end
end
require_relative '../spec_helpers'
module RuboCop
module Cop
# Cop that prevents manually setting a queue in Sidekiq workers.
class SidekiqOptionsQueue < RuboCop::Cop::Cop
include SpecHelpers
MSG = 'Do not manually set a queue; `ApplicationWorker` sets one automatically.'.freeze
def_node_matcher :sidekiq_options?, <<~PATTERN
(send nil :sidekiq_options $...)
PATTERN
def on_send(node)
return if in_spec?(node)
return unless sidekiq_options?(node)
node.arguments.first.each_node(:pair) do |pair|
key_name = pair.key.children[0]
add_offense(pair, :expression) if key_name == :queue
end
end
end
end
end
...@@ -3,10 +3,12 @@ require_relative 'cop/active_record_serialize' ...@@ -3,10 +3,12 @@ require_relative 'cop/active_record_serialize'
require_relative 'cop/custom_error_class' require_relative 'cop/custom_error_class'
require_relative 'cop/gem_fetcher' require_relative 'cop/gem_fetcher'
require_relative 'cop/in_batches' require_relative 'cop/in_batches'
require_relative 'cop/include_sidekiq_worker'
require_relative 'cop/line_break_after_guard_clauses' require_relative 'cop/line_break_after_guard_clauses'
require_relative 'cop/polymorphic_associations' require_relative 'cop/polymorphic_associations'
require_relative 'cop/project_path_helper' require_relative 'cop/project_path_helper'
require_relative 'cop/redirect_with_status' require_relative 'cop/redirect_with_status'
require_relative 'cop/sidekiq_options_queue'
require_relative 'cop/migration/add_column' require_relative 'cop/migration/add_column'
require_relative 'cop/migration/add_concurrent_foreign_key' require_relative 'cop/migration/add_concurrent_foreign_key'
require_relative 'cop/migration/add_concurrent_index' require_relative 'cop/migration/add_concurrent_index'
......
# coding: utf-8
require 'spec_helper'
describe Gitlab::Regex do
describe '.environment_scope_regex' do
subject { described_class.environment_scope_regex }
it { is_expected.to match('foo') }
it { is_expected.to match('foo*Z') }
it { is_expected.not_to match('!!()()') }
end
end
...@@ -6,7 +6,7 @@ describe HasEnvironmentScope do ...@@ -6,7 +6,7 @@ describe HasEnvironmentScope do
it { is_expected.to allow_value('*').for(:environment_scope) } it { is_expected.to allow_value('*').for(:environment_scope) }
it { is_expected.to allow_value('review/*').for(:environment_scope) } it { is_expected.to allow_value('review/*').for(:environment_scope) }
it { is_expected.not_to allow_value('').for(:environment_scope) } it { is_expected.not_to allow_value('').for(:environment_scope) }
it { is_expected.not_to allow_value('<>').for(:environment_scope) } it { is_expected.not_to allow_value('!!()()').for(:environment_scope) }
it do it do
is_expected.to validate_uniqueness_of(:key) is_expected.to validate_uniqueness_of(:key)
......
...@@ -759,7 +759,7 @@ describe Project do ...@@ -759,7 +759,7 @@ describe Project do
context 'when environment scope is exactly matched' do context 'when environment scope is exactly matched' do
before do before do
cluster.update(environment_scope: 'review/name') cluster.update!(environment_scope: 'review/name')
end end
it_behaves_like 'matching environment scope' it_behaves_like 'matching environment scope'
...@@ -767,7 +767,7 @@ describe Project do ...@@ -767,7 +767,7 @@ describe Project do
context 'when environment scope is matched by wildcard' do context 'when environment scope is matched by wildcard' do
before do before do
cluster.update(environment_scope: 'review/*') cluster.update!(environment_scope: 'review/*')
end end
it_behaves_like 'matching environment scope' it_behaves_like 'matching environment scope'
...@@ -775,7 +775,7 @@ describe Project do ...@@ -775,7 +775,7 @@ describe Project do
context 'when environment scope does not match' do context 'when environment scope does not match' do
before do before do
cluster.update(environment_scope: 'review/*/special') cluster.update!(environment_scope: 'review/*/special')
end end
it_behaves_like 'not matching environment scope' it_behaves_like 'not matching environment scope'
...@@ -789,14 +789,14 @@ describe Project do ...@@ -789,14 +789,14 @@ describe Project do
end end
it 'does not treat it as wildcard' do it 'does not treat it as wildcard' do
cluster.update(environment_scope: 'foo_bar/*') cluster.update!(environment_scope: 'foo_bar/*')
is_expected.to eq(default_cluster.platform_kubernetes) is_expected.to eq(default_cluster.platform_kubernetes)
end end
it 'matches literally for _' do it 'matches literally for _' do
cluster.update(environment_scope: 'foo_bar/*') cluster.update!(environment_scope: 'foo_bar/*')
environment.update(name: 'foo_bar/test') environment.update!(name: 'foo_bar/test')
is_expected.to eq(cluster.platform_kubernetes) is_expected.to eq(cluster.platform_kubernetes)
end end
...@@ -819,8 +819,8 @@ describe Project do ...@@ -819,8 +819,8 @@ describe Project do
is_expected.to eq(default_cluster.platform_kubernetes) is_expected.to eq(default_cluster.platform_kubernetes)
end end
it 'matches literally for _' do it 'matches literally for %' do
cluster.update(environment_scope: 'foo%bar/*') cluster.update_attribute(:environment_scope, 'foo%bar/*')
environment.update_attribute(:name, 'foo%bar/test') environment.update_attribute(:name, 'foo%bar/test')
is_expected.to eq(cluster.platform_kubernetes) is_expected.to eq(cluster.platform_kubernetes)
......
...@@ -197,7 +197,7 @@ describe 'Commits' do ...@@ -197,7 +197,7 @@ describe 'Commits' do
commits = project.repository.commits(branch_name) commits = project.repository.commits(branch_name)
commits.each do |commit| commits.each do |commit|
expect(page).to have_content("committed #{commit.committed_date.strftime("%b %d, %Y")}") expect(page).to have_content("authored #{commit.authored_date.strftime("%b %d, %Y")}")
end end
end end
......
import * as datetimeUtility from '~/lib/utils/datetime_utility'; import * as datetimeUtility from '~/lib/utils/datetime_utility';
(() => { describe('Date time utils', () => {
describe('Date time utils', () => { describe('timeFor', () => {
describe('timeFor', () => { it('returns `past due` when in past', () => {
it('returns `past due` when in past', () => { const date = new Date();
const date = new Date(); date.setFullYear(date.getFullYear() - 1);
date.setFullYear(date.getFullYear() - 1);
expect(
expect( datetimeUtility.timeFor(date),
gl.utils.timeFor(date), ).toBe('Past due');
).toBe('Past due');
});
it('returns remaining time when in the future', () => {
const date = new Date();
date.setFullYear(date.getFullYear() + 1);
// Add a day to prevent a transient error. If date is even 1 second
// short of a full year, timeFor will return '11 months remaining'
date.setDate(date.getDate() + 1);
expect(
gl.utils.timeFor(date),
).toBe('1 year remaining');
});
}); });
describe('get day name', () => { it('returns remaining time when in the future', () => {
it('should return Sunday', () => { const date = new Date();
const day = gl.utils.getDayName(new Date('07/17/2016')); date.setFullYear(date.getFullYear() + 1);
expect(day).toBe('Sunday');
}); // Add a day to prevent a transient error. If date is even 1 second
// short of a full year, timeFor will return '11 months remaining'
it('should return Monday', () => { date.setDate(date.getDate() + 1);
const day = gl.utils.getDayName(new Date('07/18/2016'));
expect(day).toBe('Monday');
});
it('should return Tuesday', () => {
const day = gl.utils.getDayName(new Date('07/19/2016'));
expect(day).toBe('Tuesday');
});
it('should return Wednesday', () => {
const day = gl.utils.getDayName(new Date('07/20/2016'));
expect(day).toBe('Wednesday');
});
it('should return Thursday', () => {
const day = gl.utils.getDayName(new Date('07/21/2016'));
expect(day).toBe('Thursday');
});
it('should return Friday', () => {
const day = gl.utils.getDayName(new Date('07/22/2016'));
expect(day).toBe('Friday');
});
it('should return Saturday', () => {
const day = gl.utils.getDayName(new Date('07/23/2016'));
expect(day).toBe('Saturday');
});
});
describe('get day difference', () => { expect(
it('should return 7', () => { datetimeUtility.timeFor(date),
const firstDay = new Date('07/01/2016'); ).toBe('1 year remaining');
const secondDay = new Date('07/08/2016');
const difference = gl.utils.getDayDifference(firstDay, secondDay);
expect(difference).toBe(7);
});
it('should return 31', () => {
const firstDay = new Date('07/01/2016');
const secondDay = new Date('08/01/2016');
const difference = gl.utils.getDayDifference(firstDay, secondDay);
expect(difference).toBe(31);
});
it('should return 365', () => {
const firstDay = new Date('07/02/2015');
const secondDay = new Date('07/01/2016');
const difference = gl.utils.getDayDifference(firstDay, secondDay);
expect(difference).toBe(365);
});
}); });
}); });
describe('timeIntervalInWords', () => { describe('get day name', () => {
it('should return string with number of minutes and seconds', () => { it('should return Sunday', () => {
expect(datetimeUtility.timeIntervalInWords(9.54)).toEqual('9 seconds'); const day = datetimeUtility.getDayName(new Date('07/17/2016'));
expect(datetimeUtility.timeIntervalInWords(1)).toEqual('1 second'); expect(day).toBe('Sunday');
expect(datetimeUtility.timeIntervalInWords(200)).toEqual('3 minutes 20 seconds'); });
expect(datetimeUtility.timeIntervalInWords(6008)).toEqual('100 minutes 8 seconds');
it('should return Monday', () => {
const day = datetimeUtility.getDayName(new Date('07/18/2016'));
expect(day).toBe('Monday');
});
it('should return Tuesday', () => {
const day = datetimeUtility.getDayName(new Date('07/19/2016'));
expect(day).toBe('Tuesday');
});
it('should return Wednesday', () => {
const day = datetimeUtility.getDayName(new Date('07/20/2016'));
expect(day).toBe('Wednesday');
});
it('should return Thursday', () => {
const day = datetimeUtility.getDayName(new Date('07/21/2016'));
expect(day).toBe('Thursday');
});
it('should return Friday', () => {
const day = datetimeUtility.getDayName(new Date('07/22/2016'));
expect(day).toBe('Friday');
});
it('should return Saturday', () => {
const day = datetimeUtility.getDayName(new Date('07/23/2016'));
expect(day).toBe('Saturday');
}); });
}); });
describe('dateInWords', () => { describe('get day difference', () => {
const date = new Date('07/01/2016'); it('should return 7', () => {
const firstDay = new Date('07/01/2016');
const secondDay = new Date('07/08/2016');
const difference = datetimeUtility.getDayDifference(firstDay, secondDay);
expect(difference).toBe(7);
});
it('should return date in words', () => { it('should return 31', () => {
expect(datetimeUtility.dateInWords(date)).toEqual('July 1, 2016'); const firstDay = new Date('07/01/2016');
const secondDay = new Date('08/01/2016');
const difference = datetimeUtility.getDayDifference(firstDay, secondDay);
expect(difference).toBe(31);
}); });
it('should return abbreviated month name', () => { it('should return 365', () => {
expect(datetimeUtility.dateInWords(date, true)).toEqual('Jul 1, 2016'); const firstDay = new Date('07/02/2015');
const secondDay = new Date('07/01/2016');
const difference = datetimeUtility.getDayDifference(firstDay, secondDay);
expect(difference).toBe(365);
}); });
}); });
})(); });
describe('timeIntervalInWords', () => {
it('should return string with number of minutes and seconds', () => {
expect(datetimeUtility.timeIntervalInWords(9.54)).toEqual('9 seconds');
expect(datetimeUtility.timeIntervalInWords(1)).toEqual('1 second');
expect(datetimeUtility.timeIntervalInWords(200)).toEqual('3 minutes 20 seconds');
expect(datetimeUtility.timeIntervalInWords(6008)).toEqual('100 minutes 8 seconds');
});
});
describe('dateInWords', () => {
const date = new Date('07/01/2016');
it('should return date in words', () => {
expect(datetimeUtility.dateInWords(date)).toEqual('July 1, 2016');
});
it('should return abbreviated month name', () => {
expect(datetimeUtility.dateInWords(date, true)).toEqual('Jul 1, 2016');
});
});
import Vue from 'vue'; import Vue from 'vue';
import DeployKeysStore from '~/deploy_keys/store'; import DeployKeysStore from '~/deploy_keys/store';
import key from '~/deploy_keys/components/key.vue'; import key from '~/deploy_keys/components/key.vue';
import { getTimeago } from '~/lib/utils/datetime_utility';
describe('Deploy keys key', () => { describe('Deploy keys key', () => {
let vm; let vm;
...@@ -37,7 +38,7 @@ describe('Deploy keys key', () => { ...@@ -37,7 +38,7 @@ describe('Deploy keys key', () => {
it('renders human friendly formatted created date', () => { it('renders human friendly formatted created date', () => {
expect( expect(
vm.$el.querySelector('.key-created-at').textContent.trim(), vm.$el.querySelector('.key-created-at').textContent.trim(),
).toBe(`created ${gl.utils.getTimeago().format(deployKey.created_at)}`); ).toBe(`created ${getTimeago().format(deployKey.created_at)}`);
}); });
it('shows edit button', () => { it('shows edit button', () => {
......
...@@ -222,7 +222,6 @@ import '~/notes'; ...@@ -222,7 +222,6 @@ import '~/notes';
notes.note_ids = []; notes.note_ids = [];
notes.updatedNotesTrackingMap = {}; notes.updatedNotesTrackingMap = {};
spyOn(gl.utils, 'localTimeAgo');
spyOn(Notes, 'isNewNote').and.callThrough(); spyOn(Notes, 'isNewNote').and.callThrough();
spyOn(Notes, 'isUpdatedNote').and.callThrough(); spyOn(Notes, 'isUpdatedNote').and.callThrough();
spyOn(Notes, 'animateAppendNote').and.callThrough(); spyOn(Notes, 'animateAppendNote').and.callThrough();
...@@ -349,7 +348,6 @@ import '~/notes'; ...@@ -349,7 +348,6 @@ import '~/notes';
]); ]);
notes.note_ids = []; notes.note_ids = [];
spyOn(gl.utils, 'localTimeAgo');
spyOn(Notes, 'isNewNote'); spyOn(Notes, 'isNewNote');
spyOn(Notes, 'animateAppendNote'); spyOn(Notes, 'animateAppendNote');
Notes.isNewNote.and.returnValue(true); Notes.isNewNote.and.returnValue(true);
......
...@@ -2,6 +2,7 @@ import Vue from 'vue'; ...@@ -2,6 +2,7 @@ import Vue from 'vue';
import * as urlUtils from '~/lib/utils/url_utility'; import * as urlUtils from '~/lib/utils/url_utility';
import deploymentComponent from '~/vue_merge_request_widget/components/mr_widget_deployment'; import deploymentComponent from '~/vue_merge_request_widget/components/mr_widget_deployment';
import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service'; import MRWidgetService from '~/vue_merge_request_widget/services/mr_widget_service';
import { getTimeago } from '~/lib/utils/datetime_utility';
const deploymentMockData = [ const deploymentMockData = [
{ {
...@@ -49,7 +50,7 @@ describe('MRWidgetDeployment', () => { ...@@ -49,7 +50,7 @@ describe('MRWidgetDeployment', () => {
describe('formatDate', () => { describe('formatDate', () => {
it('should work', () => { it('should work', () => {
const readable = gl.utils.getTimeago().format(deployment.deployed_at); const readable = getTimeago().format(deployment.deployed_at);
expect(vm.formatDate(deployment.deployed_at)).toEqual(readable); expect(vm.formatDate(deployment.deployed_at)).toEqual(readable);
}); });
}); });
......
import Vue from 'vue'; import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import mrWidgetOptions from 'ee/vue_merge_request_widget/mr_widget_options'; import mrWidgetOptions from 'ee/vue_merge_request_widget/mr_widget_options';
import MRWidgetService from 'ee/vue_merge_request_widget/services/mr_widget_service'; import MRWidgetService from 'ee/vue_merge_request_widget/services/mr_widget_service';
import MRWidgetStore from 'ee/vue_merge_request_widget/stores/mr_widget_store'; import MRWidgetStore from 'ee/vue_merge_request_widget/stores/mr_widget_store';
...@@ -49,21 +51,16 @@ describe('ee merge request widget options', () => { ...@@ -49,21 +51,16 @@ describe('ee merge request widget options', () => {
}); });
describe('with successful request', () => { describe('with successful request', () => {
const interceptor = (request, next) => { let mock;
if (request.url === 'path.json') {
next(request.respondWith(JSON.stringify(securityIssues), {
status: 200,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(interceptor); mock = mock = new MockAdapter(axios);
mock.onGet('path.json').reply(200, securityIssues);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor); mock.reset();
}); });
it('should render provided data', (done) => { it('should render provided data', (done) => {
...@@ -77,21 +74,16 @@ describe('ee merge request widget options', () => { ...@@ -77,21 +74,16 @@ describe('ee merge request widget options', () => {
}); });
describe('with empty successful request', () => { describe('with empty successful request', () => {
const emptyInterceptor = (request, next) => { let mock;
if (request.url === 'path.json') {
next(request.respondWith(JSON.stringify([]), {
status: 200,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(emptyInterceptor); mock = mock = new MockAdapter(axios);
mock.onGet('path.json').reply(200, []);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, emptyInterceptor); mock.reset();
}); });
it('should render provided data', (done) => { it('should render provided data', (done) => {
...@@ -105,21 +97,16 @@ describe('ee merge request widget options', () => { ...@@ -105,21 +97,16 @@ describe('ee merge request widget options', () => {
}); });
describe('with failed request', () => { describe('with failed request', () => {
const errorInterceptor = (request, next) => { let mock;
if (request.url === 'path.json') {
next(request.respondWith(JSON.stringify([]), {
status: 500,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(errorInterceptor); mock = mock = new MockAdapter(axios);
mock.onGet('path.json').reply(500, []);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, errorInterceptor); mock.reset();
}); });
it('should render error indicator', (done) => { it('should render error indicator', (done) => {
...@@ -157,27 +144,17 @@ describe('ee merge request widget options', () => { ...@@ -157,27 +144,17 @@ describe('ee merge request widget options', () => {
}); });
describe('with successful request', () => { describe('with successful request', () => {
const interceptor = (request, next) => { let mock;
if (request.url === 'head.json') {
next(request.respondWith(JSON.stringify(headIssues), {
status: 200,
}));
}
if (request.url === 'base.json') {
next(request.respondWith(JSON.stringify(baseIssues), {
status: 200,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(interceptor); mock = mock = new MockAdapter(axios);
mock.onGet('head.json').reply(200, headIssues);
mock.onGet('base.json').reply(200, baseIssues);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor); mock.reset();
}); });
it('should render provided data', (done) => { it('should render provided data', (done) => {
...@@ -218,27 +195,17 @@ describe('ee merge request widget options', () => { ...@@ -218,27 +195,17 @@ describe('ee merge request widget options', () => {
}); });
describe('with empty successful request', () => { describe('with empty successful request', () => {
const emptyInterceptor = (request, next) => { let mock;
if (request.url === 'head.json') {
next(request.respondWith(JSON.stringify([]), {
status: 200,
}));
}
if (request.url === 'base.json') {
next(request.respondWith(JSON.stringify([]), {
status: 200,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(emptyInterceptor); mock = mock = new MockAdapter(axios);
mock.onGet('head.json').reply(200, []);
mock.onGet('base.json').reply(200, []);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, emptyInterceptor); mock.reset();
}); });
it('should render provided data', (done) => { it('should render provided data', (done) => {
...@@ -252,27 +219,17 @@ describe('ee merge request widget options', () => { ...@@ -252,27 +219,17 @@ describe('ee merge request widget options', () => {
}); });
describe('with failed request', () => { describe('with failed request', () => {
const errorInterceptor = (request, next) => { let mock;
if (request.url === 'head.json') {
next(request.respondWith(JSON.stringify([]), {
status: 500,
}));
}
if (request.url === 'base.json') {
next(request.respondWith(JSON.stringify([]), {
status: 500,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(errorInterceptor); mock = mock = new MockAdapter(axios);
mock.onGet('head.json').reply(500, []);
mock.onGet('base.json').reply(500, []);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, errorInterceptor); mock.reset();
}); });
it('should render error indicator', (done) => { it('should render error indicator', (done) => {
...@@ -308,27 +265,17 @@ describe('ee merge request widget options', () => { ...@@ -308,27 +265,17 @@ describe('ee merge request widget options', () => {
}); });
describe('with successful request', () => { describe('with successful request', () => {
const interceptor = (request, next) => { let mock;
if (request.url === 'head.json') {
next(request.respondWith(JSON.stringify(headPerformance), {
status: 200,
}));
}
if (request.url === 'base.json') {
next(request.respondWith(JSON.stringify(basePerformance), {
status: 200,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(interceptor); mock = mock = new MockAdapter(axios);
mock.onGet('head.json').reply(200, headPerformance);
mock.onGet('base.json').reply(200, basePerformance);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor); mock.reset();
}); });
it('should render provided data', (done) => { it('should render provided data', (done) => {
...@@ -370,27 +317,17 @@ describe('ee merge request widget options', () => { ...@@ -370,27 +317,17 @@ describe('ee merge request widget options', () => {
}); });
describe('with empty successful request', () => { describe('with empty successful request', () => {
const emptyInterceptor = (request, next) => { let mock;
if (request.url === 'head.json') {
next(request.respondWith(JSON.stringify([]), {
status: 200,
}));
}
if (request.url === 'base.json') {
next(request.respondWith(JSON.stringify([]), {
status: 200,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(emptyInterceptor); mock = mock = new MockAdapter(axios);
mock.onGet('head.json').reply(200, []);
mock.onGet('base.json').reply(200, []);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, emptyInterceptor); mock.reset();
}); });
it('should render provided data', (done) => { it('should render provided data', (done) => {
...@@ -404,27 +341,17 @@ describe('ee merge request widget options', () => { ...@@ -404,27 +341,17 @@ describe('ee merge request widget options', () => {
}); });
describe('with failed request', () => { describe('with failed request', () => {
const errorInterceptor = (request, next) => { let mock;
if (request.url === 'head.json') {
next(request.respondWith(JSON.stringify([]), {
status: 500,
}));
}
if (request.url === 'base.json') {
next(request.respondWith(JSON.stringify([]), {
status: 500,
}));
}
};
beforeEach(() => { beforeEach(() => {
Vue.http.interceptors.push(errorInterceptor); mock = mock = new MockAdapter(axios);
mock.onGet('head.json').reply(500, []);
mock.onGet('base.json').reply(500, []);
vm = mountComponent(Component); vm = mountComponent(Component);
}); });
afterEach(() => { afterEach(() => {
Vue.http.interceptors = _.without(Vue.http.interceptors, errorInterceptor); mock.reset();
}); });
it('should render error indicator', (done) => { it('should render error indicator', (done) => {
......
import Vue from 'vue'; import Vue from 'vue';
import timeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue'; import timeagoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import '~/lib/utils/datetime_utility'; import { formatDate, getTimeago } from '~/lib/utils/datetime_utility';
describe('Time ago with tooltip component', () => { describe('Time ago with tooltip component', () => {
let TimeagoTooltip; let TimeagoTooltip;
...@@ -24,10 +24,10 @@ describe('Time ago with tooltip component', () => { ...@@ -24,10 +24,10 @@ describe('Time ago with tooltip component', () => {
expect(vm.$el.tagName).toEqual('TIME'); expect(vm.$el.tagName).toEqual('TIME');
expect( expect(
vm.$el.getAttribute('data-original-title'), vm.$el.getAttribute('data-original-title'),
).toEqual(gl.utils.formatDate('2017-05-08T14:57:39.781Z')); ).toEqual(formatDate('2017-05-08T14:57:39.781Z'));
expect(vm.$el.getAttribute('data-placement')).toEqual('top'); expect(vm.$el.getAttribute('data-placement')).toEqual('top');
const timeago = gl.utils.getTimeago(); const timeago = getTimeago();
expect(vm.$el.textContent.trim()).toEqual(timeago.format('2017-05-08T14:57:39.781Z')); expect(vm.$el.textContent.trim()).toEqual(timeago.format('2017-05-08T14:57:39.781Z'));
}); });
......
...@@ -11,28 +11,41 @@ describe Gitlab::SidekiqCluster::CLI do ...@@ -11,28 +11,41 @@ describe Gitlab::SidekiqCluster::CLI do
end end
context 'with arguments' do context 'with arguments' do
it 'starts the Sidekiq workers' do before do
expect(Gitlab::SidekiqCluster).to receive(:start).and_return([])
expect(cli).to receive(:write_pid) expect(cli).to receive(:write_pid)
expect(cli).to receive(:trap_signals) expect(cli).to receive(:trap_signals)
expect(cli).to receive(:start_loop) expect(cli).to receive(:start_loop)
end
it 'starts the Sidekiq workers' do
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['foo']], 'test', Dir.pwd, dryrun: false)
.and_return([])
cli.run(%w(foo)) cli.run(%w(foo))
end end
context 'with --negate flag' do context 'with --negate flag' do
it 'starts Sidekiq workers for all queues on sidekiq_queues.yml except the ones on argv' do it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do
expect(Gitlab::SidekiqConfig).to receive(:config_queues).and_return(['baz']) expect(Gitlab::SidekiqConfig).to receive(:worker_queues).and_return(['baz'])
expect(Gitlab::SidekiqCluster).to receive(:start) expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['baz']], 'test', Dir.pwd, dryrun: false) .with([['baz']], 'test', Dir.pwd, dryrun: false)
.and_return([]) .and_return([])
expect(cli).to receive(:write_pid)
expect(cli).to receive(:trap_signals)
expect(cli).to receive(:start_loop)
cli.run(%w(foo -n)) cli.run(%w(foo -n))
end end
end end
context 'queue namespace expansion' do
it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do
expect(Gitlab::SidekiqConfig).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar'])
expect(Gitlab::SidekiqCluster).to receive(:start)
.with([['cronjob', 'cronjob:foo', 'cronjob:bar']], 'test', Dir.pwd, dryrun: false)
.and_return([])
cli.run(%w(cronjob))
end
end
end end
end end
......
...@@ -23,7 +23,7 @@ describe Gitlab::SidekiqConfig do ...@@ -23,7 +23,7 @@ describe Gitlab::SidekiqConfig do
expect(queues).to include('post_receive') expect(queues).to include('post_receive')
expect(queues).to include('merge') expect(queues).to include('merge')
expect(queues).to include('cronjob') expect(queues).to include('cronjob:stuck_import_jobs')
expect(queues).to include('mailers') expect(queues).to include('mailers')
expect(queues).to include('default') expect(queues).to include('default')
end end
...@@ -35,4 +35,25 @@ describe Gitlab::SidekiqConfig do ...@@ -35,4 +35,25 @@ describe Gitlab::SidekiqConfig do
expect(queues).to include('ldap_group_sync') expect(queues).to include('ldap_group_sync')
end end
end end
describe '.expand_queues' do
it 'expands queue namespaces to concrete queue names' do
queues = described_class.expand_queues(%w[cronjob])
expect(queues).to include('cronjob:stuck_import_jobs')
expect(queues).to include('cronjob:stuck_merge_jobs')
end
it 'lets concrete queue names pass through' do
queues = described_class.expand_queues(%w[post_receive])
expect(queues).to include('post_receive')
end
it 'lets unknown queues pass through' do
queues = described_class.expand_queues(%w[unknown])
expect(queues).to include('unknown')
end
end
end end
require 'spec_helper'
describe Gitlab::SidekiqVersioning::Manager do
before do
Sidekiq::Manager.prepend described_class
end
describe '#initialize' do
it 'listens on all expanded queues' do
manager = Sidekiq::Manager.new(queues: %w[post_receive repository_fork cronjob unknown])
queues = manager.options[:queues]
expect(queues).to include('post_receive')
expect(queues).to include('repository_fork')
expect(queues).to include('cronjob')
expect(queues).to include('cronjob:stuck_import_jobs')
expect(queues).to include('cronjob:stuck_merge_jobs')
expect(queues).to include('unknown')
end
end
end
require 'spec_helper'
describe Gitlab::SidekiqVersioning, :sidekiq, :redis do
let(:foo_worker) do
Class.new do
def self.name
'FooWorker'
end
include ApplicationWorker
end
end
let(:bar_worker) do
Class.new do
def self.name
'BarWorker'
end
include ApplicationWorker
end
end
before do
allow(Gitlab::SidekiqConfig).to receive(:workers).and_return([foo_worker, bar_worker])
allow(Gitlab::SidekiqConfig).to receive(:worker_queues).and_return([foo_worker.queue, bar_worker.queue])
end
describe '.install!' do
it 'prepends SidekiqVersioning::Manager into Sidekiq::Manager' do
described_class.install!
expect(Sidekiq::Manager).to include(Gitlab::SidekiqVersioning::Manager)
end
it 'registers all versionless and versioned queues with Redis' do
described_class.install!
queues = Sidekiq::Queue.all.map(&:name)
expect(queues).to include('foo')
expect(queues).to include('bar')
end
end
end
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/include_sidekiq_worker'
describe RuboCop::Cop::IncludeSidekiqWorker do
include CopHelper
subject(:cop) { described_class.new }
context 'when `Sidekiq::Worker` is included' do
let(:source) { 'include Sidekiq::Worker' }
let(:correct_source) { 'include ApplicationWorker' }
it 'registers an offense ' do
inspect_source(cop, source)
aggregate_failures do
expect(cop.offenses.size).to eq(1)
expect(cop.offenses.map(&:line)).to eq([1])
expect(cop.highlights).to eq(['Sidekiq::Worker'])
end
end
it 'autocorrects to the right version' do
autocorrected = autocorrect_source(cop, source)
expect(autocorrected).to eq(correct_source)
end
end
end
require 'spec_helper'
require 'rubocop'
require 'rubocop/rspec/support'
require_relative '../../../rubocop/cop/sidekiq_options_queue'
describe RuboCop::Cop::SidekiqOptionsQueue do
include CopHelper
subject(:cop) { described_class.new }
it 'registers an offense when `sidekiq_options` is used with the `queue` option' do
inspect_source(cop, 'sidekiq_options queue: "some_queue"')
aggregate_failures do
expect(cop.offenses.size).to eq(1)
expect(cop.offenses.map(&:line)).to eq([1])
expect(cop.highlights).to eq(['queue: "some_queue"'])
end
end
it 'does not register an offense when `sidekiq_options` is used with another option' do
inspect_source(cop, 'sidekiq_options retry: false')
expect(cop.offenses).to be_empty
end
end
...@@ -17,6 +17,14 @@ describe ApplicationWorker do ...@@ -17,6 +17,14 @@ describe ApplicationWorker do
end end
end end
describe '.queue_namespace' do
it 'sets the queue name based on the class name' do
worker.queue_namespace :some_namespace
expect(worker.queue).to eq('some_namespace:foo_bar_dummy')
end
end
describe '.queue' do describe '.queue' do
it 'returns the queue name' do it 'returns the queue name' do
worker.sidekiq_options queue: :some_queue worker.sidekiq_options queue: :some_queue
......
...@@ -14,6 +14,6 @@ describe ClusterQueue do ...@@ -14,6 +14,6 @@ describe ClusterQueue do
it 'sets a default pipelines queue automatically' do it 'sets a default pipelines queue automatically' do
expect(worker.sidekiq_options['queue']) expect(worker.sidekiq_options['queue'])
.to eq :gcp_cluster .to eq 'gcp_cluster:dummy'
end end
end end
...@@ -13,7 +13,7 @@ describe CronjobQueue do ...@@ -13,7 +13,7 @@ describe CronjobQueue do
end end
it 'sets the queue name of a worker' do it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob') expect(worker.sidekiq_options['queue'].to_s).to eq('cronjob:dummy')
end end
it 'disables retrying of failed jobs' do it 'disables retrying of failed jobs' do
......
...@@ -13,6 +13,6 @@ describe GeoQueue do ...@@ -13,6 +13,6 @@ describe GeoQueue do
end end
it 'sets the queue name of a worker' do it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('geo') expect(worker.sidekiq_options['queue'].to_s).to eq('geo:dummy')
end end
end end
...@@ -11,6 +11,6 @@ describe Gitlab::GithubImport::Queue do ...@@ -11,6 +11,6 @@ describe Gitlab::GithubImport::Queue do
include Gitlab::GithubImport::Queue include Gitlab::GithubImport::Queue
end end
expect(worker.sidekiq_options['queue']).to eq('github_importer') expect(worker.sidekiq_options['queue']).to eq('github_importer:dummy')
end end
end end
...@@ -14,15 +14,6 @@ describe PipelineQueue do ...@@ -14,15 +14,6 @@ describe PipelineQueue do
it 'sets a default pipelines queue automatically' do it 'sets a default pipelines queue automatically' do
expect(worker.sidekiq_options['queue']) expect(worker.sidekiq_options['queue'])
.to eq 'pipeline_default' .to eq 'pipeline_default:dummy'
end
describe '.enqueue_in' do
it 'sets a custom sidekiq queue with prefix and group' do
worker.enqueue_in(group: :processing)
expect(worker.sidekiq_options['queue'])
.to eq 'pipeline_processing'
end
end end
end end
...@@ -13,7 +13,7 @@ describe RepositoryCheckQueue do ...@@ -13,7 +13,7 @@ describe RepositoryCheckQueue do
end end
it 'sets the queue name of a worker' do it 'sets the queue name of a worker' do
expect(worker.sidekiq_options['queue'].to_s).to eq('repository_check') expect(worker.sidekiq_options['queue'].to_s).to eq('repository_check:dummy')
end end
it 'disables retrying of failed jobs' do it 'disables retrying of failed jobs' do
......
require 'spec_helper' require 'spec_helper'
describe 'Every Sidekiq worker' do describe 'Every Sidekiq worker' do
it 'includes ApplicationWorker' do
expect(Gitlab::SidekiqConfig.workers).to all(include(ApplicationWorker))
end
it 'does not use the default queue' do it 'does not use the default queue' do
expect(Gitlab::SidekiqConfig.workers.map(&:queue)).not_to include('default') expect(Gitlab::SidekiqConfig.workers.map(&:queue)).not_to include('default')
end end
it 'uses the cronjob queue when the worker runs as a cronjob' do it 'uses the cronjob queue when the worker runs as a cronjob' do
expect(Gitlab::SidekiqConfig.cron_workers.map(&:queue)).to all(eq('cronjob')) expect(Gitlab::SidekiqConfig.cron_workers.map(&:queue)).to all(start_with('cronjob:'))
end
it 'has its queue in app/workers/all_queues.yml', :aggregate_failures do
file_worker_queues = Gitlab::SidekiqConfig.worker_queues.to_set
worker_queues = Gitlab::SidekiqConfig.workers.map(&:queue).to_set
worker_queues << ActionMailer::DeliveryJob.queue_name
worker_queues << 'default'
missing_from_file = worker_queues - file_worker_queues
expect(missing_from_file).to be_empty, "expected #{missing_from_file.to_a.inspect} to be in app/workers/all_queues.yml"
unncessarily_in_file = file_worker_queues - worker_queues
expect(unncessarily_in_file).to be_empty, "expected #{unncessarily_in_file.to_a.inspect} not to be in app/workers/all_queues.yml"
end end
it 'defines the queue in the Sidekiq configuration file' do it 'has its queue or namespace in config/sidekiq_queues.yml', :aggregate_failures do
config_queue_names = Gitlab::SidekiqConfig.config_queues.to_set config_queues = Gitlab::SidekiqConfig.config_queues.to_set
Gitlab::SidekiqConfig.workers.each do |worker|
queue = worker.queue
queue_namespace = queue.split(':').first
expect(Gitlab::SidekiqConfig.worker_queues).to all(be_in(config_queue_names)) expect(config_queues).to include(queue).or(include(queue_namespace))
end
end end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment