Commit 7f3f48f4 authored by GitLab Bot's avatar GitLab Bot

Merge remote-tracking branch 'upstream/master' into ce-to-ee-2018-07-23

# Conflicts:
#	doc/ci/variables/README.md
#	locale/gitlab.pot

[ci skip]
parents 28ae1ad1 e23b966d
...@@ -376,8 +376,14 @@ on those issues. Please select someone with relevant experience from the ...@@ -376,8 +376,14 @@ on those issues. Please select someone with relevant experience from the
[GitLab team][team]. If there is nobody mentioned with that expertise look in [GitLab team][team]. If there is nobody mentioned with that expertise look in
the commit history for the affected files to find someone. the commit history for the affected files to find someone.
We also use [GitLab Triage] to automate some triaging policies. This is
currently setup as a [scheduled pipeline] running on the [`gl-triage`] branch.
[described in our handbook]: https://about.gitlab.com/handbook/engineering/issue-triage/ [described in our handbook]: https://about.gitlab.com/handbook/engineering/issue-triage/
[issue bash events]: https://gitlab.com/gitlab-org/gitlab-ce/issues/17815 [issue bash events]: https://gitlab.com/gitlab-org/gitlab-ce/issues/17815
[GitLab Triage]: https://gitlab.com/gitlab-org/gitlab-triage
[scheduled pipeline]: https://gitlab.com/gitlab-org/gitlab-ce/pipeline_schedules/3732/edit
[`gl-triage`]: https://gitlab.com/gitlab-org/gitlab-ce/tree/gl-triage
### Feature proposals ### Feature proposals
......
...@@ -71,6 +71,11 @@ export default { ...@@ -71,6 +71,11 @@ export default {
required: false, required: false,
default: false, default: false,
}, },
discussions: {
type: Array,
required: false,
default: () => [],
},
}, },
computed: { computed: {
...mapState({ ...mapState({
...@@ -78,7 +83,6 @@ export default { ...@@ -78,7 +83,6 @@ export default {
diffFiles: state => state.diffs.diffFiles, diffFiles: state => state.diffs.diffFiles,
}), }),
...mapGetters(['isLoggedIn']), ...mapGetters(['isLoggedIn']),
...mapGetters('diffs', ['discussionsByLineCode']),
lineHref() { lineHref() {
return this.lineCode ? `#${this.lineCode}` : '#'; return this.lineCode ? `#${this.lineCode}` : '#';
}, },
...@@ -88,24 +92,19 @@ export default { ...@@ -88,24 +92,19 @@ export default {
this.showCommentButton && this.showCommentButton &&
!this.isMatchLine && !this.isMatchLine &&
!this.isContextLine && !this.isContextLine &&
!this.hasDiscussions && !this.isMetaLine &&
!this.isMetaLine !this.hasDiscussions
); );
}, },
discussions() {
return this.discussionsByLineCode[this.lineCode] || [];
},
hasDiscussions() { hasDiscussions() {
return this.discussions.length > 0; return this.discussions.length > 0;
}, },
shouldShowAvatarsOnGutter() { shouldShowAvatarsOnGutter() {
let render = this.hasDiscussions && this.showCommentButton;
if (!this.lineType && this.linePosition === LINE_POSITION_RIGHT) { if (!this.lineType && this.linePosition === LINE_POSITION_RIGHT) {
render = false; return false;
} }
return render; return this.hasDiscussions && this.showCommentButton;
}, },
}, },
methods: { methods: {
......
...@@ -67,6 +67,11 @@ export default { ...@@ -67,6 +67,11 @@ export default {
required: false, required: false,
default: false, default: false,
}, },
discussions: {
type: Array,
required: false,
default: () => [],
},
}, },
computed: { computed: {
...mapGetters(['isLoggedIn']), ...mapGetters(['isLoggedIn']),
...@@ -136,6 +141,7 @@ export default { ...@@ -136,6 +141,7 @@ export default {
:is-match-line="isMatchLine" :is-match-line="isMatchLine"
:is-context-line="isContentLine" :is-context-line="isContentLine"
:is-meta-line="isMetaLine" :is-meta-line="isMetaLine"
:discussions="discussions"
/> />
</td> </td>
</template> </template>
<script> <script>
import { mapState, mapGetters } from 'vuex'; import { mapState } from 'vuex';
import diffDiscussions from './diff_discussions.vue'; import diffDiscussions from './diff_discussions.vue';
import diffLineNoteForm from './diff_line_note_form.vue'; import diffLineNoteForm from './diff_line_note_form.vue';
...@@ -21,15 +21,16 @@ export default { ...@@ -21,15 +21,16 @@ export default {
type: Number, type: Number,
required: true, required: true,
}, },
discussions: {
type: Array,
required: false,
default: () => [],
},
}, },
computed: { computed: {
...mapState({ ...mapState({
diffLineCommentForms: state => state.diffs.diffLineCommentForms, diffLineCommentForms: state => state.diffs.diffLineCommentForms,
}), }),
...mapGetters('diffs', ['discussionsByLineCode']),
discussions() {
return this.discussionsByLineCode[this.line.lineCode] || [];
},
className() { className() {
return this.discussions.length ? '' : 'js-temp-notes-holder'; return this.discussions.length ? '' : 'js-temp-notes-holder';
}, },
......
...@@ -33,6 +33,11 @@ export default { ...@@ -33,6 +33,11 @@ export default {
required: false, required: false,
default: false, default: false,
}, },
discussions: {
type: Array,
required: false,
default: () => [],
},
}, },
data() { data() {
return { return {
...@@ -89,6 +94,7 @@ export default { ...@@ -89,6 +94,7 @@ export default {
:is-bottom="isBottom" :is-bottom="isBottom"
:is-hover="isHover" :is-hover="isHover"
:show-comment-button="true" :show-comment-button="true"
:discussions="discussions"
class="diff-line-num old_line" class="diff-line-num old_line"
/> />
<diff-table-cell <diff-table-cell
...@@ -98,6 +104,7 @@ export default { ...@@ -98,6 +104,7 @@ export default {
:line-type="newLineType" :line-type="newLineType"
:is-bottom="isBottom" :is-bottom="isBottom"
:is-hover="isHover" :is-hover="isHover"
:discussions="discussions"
class="diff-line-num new_line" class="diff-line-num new_line"
/> />
<td <td
......
...@@ -20,7 +20,11 @@ export default { ...@@ -20,7 +20,11 @@ export default {
}, },
}, },
computed: { computed: {
...mapGetters('diffs', ['commitId', 'discussionsByLineCode']), ...mapGetters('diffs', [
'commitId',
'shouldRenderInlineCommentRow',
'singleDiscussionByLineCode',
]),
...mapState({ ...mapState({
diffLineCommentForms: state => state.diffs.diffLineCommentForms, diffLineCommentForms: state => state.diffs.diffLineCommentForms,
}), }),
...@@ -34,18 +38,7 @@ export default { ...@@ -34,18 +38,7 @@ export default {
return window.gon.user_color_scheme; return window.gon.user_color_scheme;
}, },
}, },
methods: { methods: {},
shouldRenderCommentRow(line) {
if (this.diffLineCommentForms[line.lineCode]) return true;
const lineDiscussions = this.discussionsByLineCode[line.lineCode];
if (lineDiscussions === undefined) {
return false;
}
return lineDiscussions.every(discussion => discussion.expanded);
},
},
}; };
</script> </script>
...@@ -64,13 +57,15 @@ export default { ...@@ -64,13 +57,15 @@ export default {
:line="line" :line="line"
:is-bottom="index + 1 === diffLinesLength" :is-bottom="index + 1 === diffLinesLength"
:key="line.lineCode" :key="line.lineCode"
:discussions="singleDiscussionByLineCode(line.lineCode)"
/> />
<inline-diff-comment-row <inline-diff-comment-row
v-if="shouldRenderCommentRow(line)" v-if="shouldRenderInlineCommentRow(line)"
:diff-file-hash="diffFile.fileHash" :diff-file-hash="diffFile.fileHash"
:line="line" :line="line"
:line-index="index" :line-index="index"
:key="index" :key="index"
:discussions="singleDiscussionByLineCode(line.lineCode)"
/> />
</template> </template>
</tbody> </tbody>
......
<script> <script>
import { mapState, mapGetters } from 'vuex'; import { mapState } from 'vuex';
import diffDiscussions from './diff_discussions.vue'; import diffDiscussions from './diff_discussions.vue';
import diffLineNoteForm from './diff_line_note_form.vue'; import diffLineNoteForm from './diff_line_note_form.vue';
...@@ -21,48 +21,51 @@ export default { ...@@ -21,48 +21,51 @@ export default {
type: Number, type: Number,
required: true, required: true,
}, },
leftDiscussions: {
type: Array,
required: false,
default: () => [],
},
rightDiscussions: {
type: Array,
required: false,
default: () => [],
},
}, },
computed: { computed: {
...mapState({ ...mapState({
diffLineCommentForms: state => state.diffs.diffLineCommentForms, diffLineCommentForms: state => state.diffs.diffLineCommentForms,
}), }),
...mapGetters('diffs', ['discussionsByLineCode']),
leftLineCode() { leftLineCode() {
return this.line.left.lineCode; return this.line.left.lineCode;
}, },
rightLineCode() { rightLineCode() {
return this.line.right.lineCode; return this.line.right.lineCode;
}, },
hasDiscussion() {
const discussions = this.discussionsByLineCode;
return discussions[this.leftLineCode] || discussions[this.rightLineCode];
},
hasExpandedDiscussionOnLeft() { hasExpandedDiscussionOnLeft() {
const discussions = this.discussionsByLineCode[this.leftLineCode]; const discussions = this.leftDiscussions;
return discussions ? discussions.every(discussion => discussion.expanded) : false; return discussions ? discussions.every(discussion => discussion.expanded) : false;
}, },
hasExpandedDiscussionOnRight() { hasExpandedDiscussionOnRight() {
const discussions = this.discussionsByLineCode[this.rightLineCode]; const discussions = this.rightDiscussions;
return discussions ? discussions.every(discussion => discussion.expanded) : false; return discussions ? discussions.every(discussion => discussion.expanded) : false;
}, },
hasAnyExpandedDiscussion() { hasAnyExpandedDiscussion() {
return this.hasExpandedDiscussionOnLeft || this.hasExpandedDiscussionOnRight; return this.hasExpandedDiscussionOnLeft || this.hasExpandedDiscussionOnRight;
}, },
shouldRenderDiscussionsOnLeft() { shouldRenderDiscussionsOnLeft() {
return this.discussionsByLineCode[this.leftLineCode] && this.hasExpandedDiscussionOnLeft; return this.leftDiscussions && this.hasExpandedDiscussionOnLeft;
}, },
shouldRenderDiscussionsOnRight() { shouldRenderDiscussionsOnRight() {
return ( return this.rightDiscussions && this.hasExpandedDiscussionOnRight && this.line.right.type;
this.discussionsByLineCode[this.rightLineCode] && },
this.hasExpandedDiscussionOnRight && showRightSideCommentForm() {
this.line.right.type return this.line.right.type && this.diffLineCommentForms[this.rightLineCode];
);
}, },
className() { className() {
return this.hasDiscussion ? '' : 'js-temp-notes-holder'; return this.leftDiscussions.length > 0 || this.rightDiscussions.length > 0
? ''
: 'js-temp-notes-holder';
}, },
}, },
}; };
...@@ -80,13 +83,12 @@ export default { ...@@ -80,13 +83,12 @@ export default {
class="content" class="content"
> >
<diff-discussions <diff-discussions
v-if="discussionsByLineCode[leftLineCode].length" v-if="leftDiscussions.length"
:discussions="discussionsByLineCode[leftLineCode]" :discussions="leftDiscussions"
/> />
</div> </div>
<diff-line-note-form <diff-line-note-form
v-if="diffLineCommentForms[leftLineCode] && v-if="diffLineCommentForms[leftLineCode]"
diffLineCommentForms[leftLineCode]"
:diff-file-hash="diffFileHash" :diff-file-hash="diffFileHash"
:line="line.left" :line="line.left"
:note-target-line="line.left" :note-target-line="line.left"
...@@ -100,13 +102,12 @@ export default { ...@@ -100,13 +102,12 @@ export default {
class="content" class="content"
> >
<diff-discussions <diff-discussions
v-if="discussionsByLineCode[rightLineCode].length" v-if="rightDiscussions.length"
:discussions="discussionsByLineCode[rightLineCode]" :discussions="rightDiscussions"
/> />
</div> </div>
<diff-line-note-form <diff-line-note-form
v-if="diffLineCommentForms[rightLineCode] && v-if="showRightSideCommentForm"
diffLineCommentForms[rightLineCode] && line.right.type"
:diff-file-hash="diffFileHash" :diff-file-hash="diffFileHash"
:line="line.right" :line="line.right"
:note-target-line="line.right" :note-target-line="line.right"
......
...@@ -36,6 +36,16 @@ export default { ...@@ -36,6 +36,16 @@ export default {
required: false, required: false,
default: false, default: false,
}, },
leftDiscussions: {
type: Array,
required: false,
default: () => [],
},
rightDiscussions: {
type: Array,
required: false,
default: () => [],
},
}, },
data() { data() {
return { return {
...@@ -116,6 +126,7 @@ export default { ...@@ -116,6 +126,7 @@ export default {
:is-hover="isLeftHover" :is-hover="isLeftHover"
:show-comment-button="true" :show-comment-button="true"
:diff-view-type="parallelDiffViewType" :diff-view-type="parallelDiffViewType"
:discussions="leftDiscussions"
class="diff-line-num old_line" class="diff-line-num old_line"
/> />
<td <td
...@@ -136,6 +147,7 @@ export default { ...@@ -136,6 +147,7 @@ export default {
:is-hover="isRightHover" :is-hover="isRightHover"
:show-comment-button="true" :show-comment-button="true"
:diff-view-type="parallelDiffViewType" :diff-view-type="parallelDiffViewType"
:discussions="rightDiscussions"
class="diff-line-num new_line" class="diff-line-num new_line"
/> />
<td <td
......
...@@ -21,7 +21,11 @@ export default { ...@@ -21,7 +21,11 @@ export default {
}, },
}, },
computed: { computed: {
...mapGetters('diffs', ['commitId', 'discussionsByLineCode']), ...mapGetters('diffs', [
'commitId',
'singleDiscussionByLineCode',
'shouldRenderParallelCommentRow',
]),
...mapState({ ...mapState({
diffLineCommentForms: state => state.diffs.diffLineCommentForms, diffLineCommentForms: state => state.diffs.diffLineCommentForms,
}), }),
...@@ -51,32 +55,6 @@ export default { ...@@ -51,32 +55,6 @@ export default {
return window.gon.user_color_scheme; return window.gon.user_color_scheme;
}, },
}, },
methods: {
shouldRenderCommentRow(line) {
const leftLineCode = line.left.lineCode;
const rightLineCode = line.right.lineCode;
const discussions = this.discussionsByLineCode;
const leftDiscussions = discussions[leftLineCode];
const rightDiscussions = discussions[rightLineCode];
const hasDiscussion = leftDiscussions || rightDiscussions;
const hasExpandedDiscussionOnLeft = leftDiscussions
? leftDiscussions.every(discussion => discussion.expanded)
: false;
const hasExpandedDiscussionOnRight = rightDiscussions
? rightDiscussions.every(discussion => discussion.expanded)
: false;
if (hasDiscussion && (hasExpandedDiscussionOnLeft || hasExpandedDiscussionOnRight)) {
return true;
}
const hasCommentFormOnLeft = this.diffLineCommentForms[leftLineCode];
const hasCommentFormOnRight = this.diffLineCommentForms[rightLineCode];
return hasCommentFormOnLeft || hasCommentFormOnRight;
},
},
}; };
</script> </script>
...@@ -97,13 +75,17 @@ export default { ...@@ -97,13 +75,17 @@ export default {
:line="line" :line="line"
:is-bottom="index + 1 === diffLinesLength" :is-bottom="index + 1 === diffLinesLength"
:key="index" :key="index"
:left-discussions="singleDiscussionByLineCode(line.left.lineCode)"
:right-discussions="singleDiscussionByLineCode(line.right.lineCode)"
/> />
<parallel-diff-comment-row <parallel-diff-comment-row
v-if="shouldRenderCommentRow(line)" v-if="shouldRenderParallelCommentRow(line)"
:key="`dcr-${index}`" :key="`dcr-${index}`"
:line="line" :line="line"
:diff-file-hash="diffFile.fileHash" :diff-file-hash="diffFile.fileHash"
:line-index="index" :line-index="index"
:left-discussions="singleDiscussionByLineCode(line.left.lineCode)"
:right-discussions="singleDiscussionByLineCode(line.right.lineCode)"
/> />
</template> </template>
</tbody> </tbody>
......
...@@ -75,19 +75,21 @@ export const discussionsByLineCode = (state, getters, rootState, rootGetters) => ...@@ -75,19 +75,21 @@ export const discussionsByLineCode = (state, getters, rootState, rootGetters) =>
const isDiffDiscussion = note.diff_discussion; const isDiffDiscussion = note.diff_discussion;
const hasLineCode = note.line_code; const hasLineCode = note.line_code;
const isResolvable = note.resolvable; const isResolvable = note.resolvable;
const diffRefs = diffRefsByLineCode[note.line_code];
if (isDiffDiscussion && hasLineCode && isResolvable && diffRefs) { if (isDiffDiscussion && hasLineCode && isResolvable) {
const refs = convertObjectPropsToCamelCase(note.position.formatter); const diffRefs = diffRefsByLineCode[note.line_code];
const originalRefs = convertObjectPropsToCamelCase(note.original_position.formatter); if (diffRefs) {
const refs = convertObjectPropsToCamelCase(note.position.formatter);
const originalRefs = convertObjectPropsToCamelCase(note.original_position.formatter);
if (_.isEqual(refs, diffRefs) || _.isEqual(originalRefs, diffRefs)) { if (_.isEqual(refs, diffRefs) || _.isEqual(originalRefs, diffRefs)) {
const lineCode = note.line_code; const lineCode = note.line_code;
if (acc[lineCode]) { if (acc[lineCode]) {
acc[lineCode].push(note); acc[lineCode].push(note);
} else { } else {
acc[lineCode] = [note]; acc[lineCode] = [note];
}
} }
} }
} }
...@@ -96,6 +98,47 @@ export const discussionsByLineCode = (state, getters, rootState, rootGetters) => ...@@ -96,6 +98,47 @@ export const discussionsByLineCode = (state, getters, rootState, rootGetters) =>
}, {}); }, {});
}; };
export const singleDiscussionByLineCode = (state, getters) => lineCode => {
if (!lineCode) return [];
const discussions = getters.discussionsByLineCode;
return discussions[lineCode] || [];
};
export const shouldRenderParallelCommentRow = (state, getters) => line => {
const leftLineCode = line.left.lineCode;
const rightLineCode = line.right.lineCode;
const leftDiscussions = getters.singleDiscussionByLineCode(leftLineCode);
const rightDiscussions = getters.singleDiscussionByLineCode(rightLineCode);
const hasDiscussion = leftDiscussions.length || rightDiscussions.length;
const hasExpandedDiscussionOnLeft = leftDiscussions.length
? leftDiscussions.every(discussion => discussion.expanded)
: false;
const hasExpandedDiscussionOnRight = rightDiscussions.length
? rightDiscussions.every(discussion => discussion.expanded)
: false;
if (hasDiscussion && (hasExpandedDiscussionOnLeft || hasExpandedDiscussionOnRight)) {
return true;
}
const hasCommentFormOnLeft = state.diffLineCommentForms[leftLineCode];
const hasCommentFormOnRight = state.diffLineCommentForms[rightLineCode];
return hasCommentFormOnLeft || hasCommentFormOnRight;
};
export const shouldRenderInlineCommentRow = (state, getters) => line => {
if (state.diffLineCommentForms[line.lineCode]) return true;
const lineDiscussions = getters.singleDiscussionByLineCode(line.lineCode);
if (lineDiscussions.length === 0) {
return false;
}
return lineDiscussions.every(discussion => discussion.expanded);
};
// prevent babel-plugin-rewire from generating an invalid default during karma∂ tests // prevent babel-plugin-rewire from generating an invalid default during karma∂ tests
export const getDiffFileByHash = state => fileHash => export const getDiffFileByHash = state => fileHash =>
state.diffFiles.find(file => file.fileHash === fileHash); state.diffFiles.find(file => file.fileHash === fileHash);
......
import $ from 'jquery'; import $ from 'jquery';
import { parseQueryStringIntoObject } from '~/lib/utils/common_utils'; import { parseQueryStringIntoObject } from '~/lib/utils/common_utils';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import flash from '~/flash'; import createFlash from '~/flash';
import { __ } from '~/locale'; import { __ } from '~/locale';
export default class GpgBadges { export default class GpgBadges {
static fetch() { static fetch() {
const badges = $('.js-loading-gpg-badge');
const tag = $('.js-signature-container'); const tag = $('.js-signature-container');
if (tag.length === 0) {
return Promise.resolve();
}
const badges = $('.js-loading-gpg-badge');
badges.html('<i class="fa fa-spinner fa-spin"></i>'); badges.html('<i class="fa fa-spinner fa-spin"></i>');
const displayError = () => createFlash(__('An error occurred while loading commit signatures'));
const endpoint = tag.data('signaturesPath');
if (!endpoint) {
displayError();
return Promise.reject(new Error('Missing commit signatures endpoint!'));
}
const params = parseQueryStringIntoObject(tag.serialize()); const params = parseQueryStringIntoObject(tag.serialize());
return axios.get(tag.data('signaturesPath'), { params }) return axios
.then(({ data }) => { .get(endpoint, { params })
data.signatures.forEach((signature) => { .then(({ data }) => {
badges.filter(`[data-commit-sha="${signature.commit_sha}"]`).replaceWith(signature.html); data.signatures.forEach(signature => {
}); badges.filter(`[data-commit-sha="${signature.commit_sha}"]`).replaceWith(signature.html);
}) });
.catch(() => flash(__('An error occurred while loading commits'))); })
.catch(displayError);
} }
} }
...@@ -2,6 +2,7 @@ import Vue from 'vue'; ...@@ -2,6 +2,7 @@ import Vue from 'vue';
import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue'; import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
import BlobViewer from '~/blob/viewer/index'; import BlobViewer from '~/blob/viewer/index';
import initBlob from '~/pages/projects/init_blob'; import initBlob from '~/pages/projects/init_blob';
import GpgBadges from '~/gpg_badges';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
new BlobViewer(); // eslint-disable-line no-new new BlobViewer(); // eslint-disable-line no-new
...@@ -26,4 +27,6 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -26,4 +27,6 @@ document.addEventListener('DOMContentLoaded', () => {
}, },
}); });
} }
GpgBadges.fetch();
}); });
...@@ -7,6 +7,7 @@ import TreeView from '~/tree'; ...@@ -7,6 +7,7 @@ import TreeView from '~/tree';
import BlobViewer from '~/blob/viewer/index'; import BlobViewer from '~/blob/viewer/index';
import Activities from '~/activities'; import Activities from '~/activities';
import { ajaxGet } from '~/lib/utils/common_utils'; import { ajaxGet } from '~/lib/utils/common_utils';
import GpgBadges from '~/gpg_badges';
import Star from '../../../star'; import Star from '../../../star';
import notificationsDropdown from '../../../notifications_dropdown'; import notificationsDropdown from '../../../notifications_dropdown';
...@@ -38,4 +39,6 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -38,4 +39,6 @@ document.addEventListener('DOMContentLoaded', () => {
$(treeSlider).waitForImages(() => { $(treeSlider).waitForImages(() => {
ajaxGet(document.querySelector('.js-tree-content').dataset.logsPath); ajaxGet(document.querySelector('.js-tree-content').dataset.logsPath);
}); });
GpgBadges.fetch();
}); });
...@@ -2,6 +2,7 @@ import $ from 'jquery'; ...@@ -2,6 +2,7 @@ import $ from 'jquery';
import Vue from 'vue'; import Vue from 'vue';
import initBlob from '~/blob_edit/blob_bundle'; import initBlob from '~/blob_edit/blob_bundle';
import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue'; import commitPipelineStatus from '~/projects/tree/components/commit_pipeline_status_component.vue';
import GpgBadges from '~/gpg_badges';
import TreeView from '../../../../tree'; import TreeView from '../../../../tree';
import ShortcutsNavigation from '../../../../shortcuts_navigation'; import ShortcutsNavigation from '../../../../shortcuts_navigation';
import BlobViewer from '../../../../blob/viewer'; import BlobViewer from '../../../../blob/viewer';
...@@ -14,7 +15,8 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -14,7 +15,8 @@ document.addEventListener('DOMContentLoaded', () => {
new BlobViewer(); // eslint-disable-line no-new new BlobViewer(); // eslint-disable-line no-new
new NewCommitForm($('.js-create-dir-form')); // eslint-disable-line no-new new NewCommitForm($('.js-create-dir-form')); // eslint-disable-line no-new
$('#tree-slider').waitForImages(() => $('#tree-slider').waitForImages(() =>
ajaxGet(document.querySelector('.js-tree-content').dataset.logsPath)); ajaxGet(document.querySelector('.js-tree-content').dataset.logsPath),
);
initBlob(); initBlob();
const commitPipelineStatusEl = document.querySelector('.js-commit-pipeline-status'); const commitPipelineStatusEl = document.querySelector('.js-commit-pipeline-status');
...@@ -36,4 +38,6 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -36,4 +38,6 @@ document.addEventListener('DOMContentLoaded', () => {
}, },
}); });
} }
GpgBadges.fetch();
}); });
<script> <script>
import Visibility from 'visibilityjs'; import Visibility from 'visibilityjs';
import ciIcon from '~/vue_shared/components/ci_icon.vue'; import ciIcon from '~/vue_shared/components/ci_icon.vue';
import loadingIcon from '~/vue_shared/components/loading_icon.vue'; import loadingIcon from '~/vue_shared/components/loading_icon.vue';
import Poll from '~/lib/utils/poll'; import Poll from '~/lib/utils/poll';
import Flash from '~/flash'; import Flash from '~/flash';
import { s__, sprintf } from '~/locale'; import { s__, sprintf } from '~/locale';
import tooltip from '~/vue_shared/directives/tooltip'; import tooltip from '~/vue_shared/directives/tooltip';
import CommitPipelineService from '../services/commit_pipeline_service'; import CommitPipelineService from '../services/commit_pipeline_service';
export default { export default {
directives: { directives: {
tooltip, tooltip,
},
components: {
ciIcon,
loadingIcon,
},
props: {
endpoint: {
type: String,
required: true,
}, },
components: { /* This prop can be used to replace some of the `render_commit_status`
ciIcon,
loadingIcon,
},
props: {
endpoint: {
type: String,
required: true,
},
/* This prop can be used to replace some of the `render_commit_status`
used across GitLab, this way we could use this vue component and add a used across GitLab, this way we could use this vue component and add a
realtime status where it makes sense realtime status where it makes sense
realtime: { realtime: {
...@@ -29,76 +29,77 @@ ...@@ -29,76 +29,77 @@
required: false, required: false,
default: true, default: true,
}, */ }, */
},
data() {
return {
ciStatus: {},
isLoading: true,
};
},
computed: {
statusTitle() {
return sprintf(s__('Commits|Commit: %{commitText}'), { commitText: this.ciStatus.text });
}, },
data() { },
return { mounted() {
ciStatus: {}, this.service = new CommitPipelineService(this.endpoint);
isLoading: true, this.initPolling();
}; },
}, methods: {
computed: { successCallback(res) {
statusTitle() { const { pipelines } = res.data;
return sprintf(s__('Commits|Commit: %{commitText}'), { commitText: this.ciStatus.text }); if (pipelines.length > 0) {
}, // The pipeline entity always keeps the latest pipeline info on the `details.status`
this.ciStatus = pipelines[0].details.status;
}
this.isLoading = false;
}, },
mounted() { errorCallback() {
this.service = new CommitPipelineService(this.endpoint); this.ciStatus = {
this.initPolling(); text: 'not found',
icon: 'status_notfound',
group: 'notfound',
};
this.isLoading = false;
Flash(s__('Something went wrong on our end'));
}, },
methods: { initPolling() {
successCallback(res) { this.poll = new Poll({
const { pipelines } = res.data; resource: this.service,
if (pipelines.length > 0) { method: 'fetchData',
// The pipeline entity always keeps the latest pipeline info on the `details.status` successCallback: response => this.successCallback(response),
this.ciStatus = pipelines[0].details.status; errorCallback: this.errorCallback,
} });
this.isLoading = false;
}, if (!Visibility.hidden()) {
errorCallback() { this.isLoading = true;
this.ciStatus = { this.poll.makeRequest();
text: 'not found', } else {
icon: 'status_notfound', this.fetchPipelineCommitData();
group: 'notfound', }
};
this.isLoading = false;
Flash(s__('Something went wrong on our end'));
},
initPolling() {
this.poll = new Poll({
resource: this.service,
method: 'fetchData',
successCallback: response => this.successCallback(response),
errorCallback: this.errorCallback,
});
Visibility.change(() => {
if (!Visibility.hidden()) { if (!Visibility.hidden()) {
this.isLoading = true; this.poll.restart();
this.poll.makeRequest();
} else { } else {
this.fetchPipelineCommitData(); this.poll.stop();
} }
});
Visibility.change(() => {
if (!Visibility.hidden()) {
this.poll.restart();
} else {
this.poll.stop();
}
});
},
fetchPipelineCommitData() {
this.service.fetchData()
.then(this.successCallback)
.catch(this.errorCallback);
},
}, },
destroy() { fetchPipelineCommitData() {
this.poll.stop(); this.service
.fetchData()
.then(this.successCallback)
.catch(this.errorCallback);
}, },
}; },
destroy() {
this.poll.stop();
},
};
</script> </script>
<template> <template>
<div> <div class="ci-status-link">
<loading-icon <loading-icon
v-if="isLoading" v-if="isLoading"
label="Loading pipeline status" label="Loading pipeline status"
...@@ -113,6 +114,7 @@ ...@@ -113,6 +114,7 @@
:title="statusTitle" :title="statusTitle"
:aria-label="statusTitle" :aria-label="statusTitle"
:status="ciStatus" :status="ciStatus"
:size="24"
data-container="body" data-container="body"
/> />
</a> </a>
......
import Visibility from 'visibilityjs';
import axios from '../../lib/utils/axios_utils';
import Poll from '../../lib/utils/poll';
import * as types from './mutation_types';
export const setEndpoint = ({ commit }, endpoint) => commit(types.SET_ENDPOINT, endpoint);
export const requestReports = ({ commit }) => commit(types.REQUEST_REPORTS);
let eTagPoll;
export const clearEtagPoll = () => {
eTagPoll = null;
};
export const stopPolling = () => {
if (eTagPoll) eTagPoll.stop();
};
export const restartPolling = () => {
if (eTagPoll) eTagPoll.restart();
};
/**
* We need to poll the reports endpoint while they are being parsed in the Backend.
* This can take up to one minute.
*
* Poll.js will handle etag response.
* While http status code is 204, it means it's parsing, and we'll keep polling
* When http status code is 200, it means parsing is done, we can show the results & stop polling
* When http status code is 500, it means parsing went wrong and we stop polling
*/
export const fetchReports = ({ state, dispatch }) => {
dispatch('requestReports');
eTagPoll = new Poll({
resource: {
getReports(endpoint) {
return axios.get(endpoint);
},
},
data: state.endpoint,
method: 'getReports',
successCallback: ({ data }) => dispatch('receiveReportsSuccess', data),
errorCallback: () => dispatch('receiveReportsError'),
});
if (!Visibility.hidden()) {
eTagPoll.makeRequest();
}
Visibility.change(() => {
if (!Visibility.hidden()) {
dispatch('restartPolling');
} else {
dispatch('stopPolling');
}
});
};
export const receiveReportsSuccess = ({ commit }, response) =>
commit(types.RECEIVE_REPORTS_SUCCESS, response);
export const receiveReportsError = ({ commit }) => commit(types.RECEIVE_REPORTS_ERROR);
// prevent babel-plugin-rewire from generating an invalid default during karma tests
export default () => {};
import Vue from 'vue';
import Vuex from 'vuex';
import * as actions from './actions';
import mutations from './mutations';
import state from './state';
Vue.use(Vuex);
export default () => new Vuex.Store({
actions,
mutations,
state: state(),
});
export const SET_ENDPOINT = 'SET_ENDPOINT';
export const REQUEST_REPORTS = 'REQUEST_REPORTS';
export const RECEIVE_REPORTS_SUCCESS = 'RECEIVE_REPORTS_SUCCESS';
export const RECEIVE_REPORTS_ERROR = 'RECEIVE_REPORTS_ERROR';
/* eslint-disable no-param-reassign */
import * as types from './mutation_types';
export default {
[types.SET_ENDPOINT](state, endpoint) {
state.endpoint = endpoint;
},
[types.REQUEST_REPORTS](state) {
state.isLoading = true;
},
[types.RECEIVE_REPORTS_SUCCESS](state, response) {
state.isLoading = false;
state.summary.total = response.summary.total;
state.summary.resolved = response.summary.resolved;
state.summary.failed = response.summary.failed;
state.reports = response.suites;
},
[types.RECEIVE_REPORTS_ERROR](state) {
state.isLoading = false;
state.hasError = true;
},
};
export default () => ({
endpoint: null,
isLoading: false,
hasError: false,
summary: {
total: 0,
resolved: 0,
failed: 0,
},
/**
* Each report will have the following format:
* {
* name: {String},
* summary: {
* total: {Number},
* resolved: {Number},
* failed: {Number},
* },
* new_failures: {Array.<Object>},
* resolved_failures: {Array.<Object>},
* existing_failures: {Array.<Object>},
* }
*/
reports: [],
});
...@@ -13,12 +13,19 @@ ...@@ -13,12 +13,19 @@
* /> * />
*/ */
import tooltip from '../directives/tooltip'; import tooltip from '../directives/tooltip';
import Icon from '../components/icon.vue';
export default { export default {
name: 'ClipboardButton', name: 'ClipboardButton',
directives: { directives: {
tooltip, tooltip,
}, },
components: {
Icon,
},
props: { props: {
text: { text: {
type: String, type: String,
...@@ -58,10 +65,6 @@ export default { ...@@ -58,10 +65,6 @@ export default {
type="button" type="button"
class="btn" class="btn"
> >
<i <icon name="duplicate" />
aria-hidden="true"
class="fa fa-clipboard"
>
</i>
</button> </button>
</template> </template>
...@@ -294,6 +294,10 @@ ...@@ -294,6 +294,10 @@
.btn-clipboard { .btn-clipboard {
border: 0; border: 0;
padding: 0 5px; padding: 0 5px;
svg {
top: auto;
}
} }
.input-group-prepend, .input-group-prepend,
......
...@@ -209,7 +209,7 @@ ...@@ -209,7 +209,7 @@
> .ci-status-link, > .ci-status-link,
> .btn, > .btn,
> .commit-sha-group { > .commit-sha-group {
margin-left: $gl-padding-8; margin-left: $gl-padding;
} }
} }
...@@ -239,10 +239,6 @@ ...@@ -239,10 +239,6 @@
fill: $gl-text-color-secondary; fill: $gl-text-color-secondary;
} }
.fa-clipboard {
color: $gl-text-color-secondary;
}
:first-child { :first-child {
border-bottom-left-radius: $border-radius-default; border-bottom-left-radius: $border-radius-default;
border-top-left-radius: $border-radius-default; border-top-left-radius: $border-radius-default;
......
...@@ -51,7 +51,7 @@ module ButtonHelper ...@@ -51,7 +51,7 @@ module ButtonHelper
} }
content_tag :button, button_attributes do content_tag :button, button_attributes do
concat(icon('clipboard', 'aria-hidden': 'true')) unless hide_button_icon concat(sprite_icon('duplicate')) unless hide_button_icon
concat(button_text) concat(button_text)
end end
end end
......
...@@ -56,7 +56,7 @@ module CiStatusHelper ...@@ -56,7 +56,7 @@ module CiStatusHelper
status.humanize status.humanize
end end
def ci_icon_for_status(status) def ci_icon_for_status(status, size: 16)
if detailed_status?(status) if detailed_status?(status)
return sprite_icon(status.icon) return sprite_icon(status.icon)
end end
...@@ -85,7 +85,7 @@ module CiStatusHelper ...@@ -85,7 +85,7 @@ module CiStatusHelper
'status_canceled' 'status_canceled'
end end
sprite_icon(icon_name, size: 16) sprite_icon(icon_name, size: size)
end end
def pipeline_status_cache_key(pipeline_status) def pipeline_status_cache_key(pipeline_status)
...@@ -111,7 +111,8 @@ module CiStatusHelper ...@@ -111,7 +111,8 @@ module CiStatusHelper
'commit', 'commit',
commit.status(ref), commit.status(ref),
path, path,
tooltip_placement: tooltip_placement) tooltip_placement: tooltip_placement,
icon_size: 24)
end end
def render_pipeline_status(pipeline, tooltip_placement: 'left') def render_pipeline_status(pipeline, tooltip_placement: 'left')
...@@ -125,16 +126,16 @@ module CiStatusHelper ...@@ -125,16 +126,16 @@ module CiStatusHelper
Ci::Runner.instance_type.blank? Ci::Runner.instance_type.blank?
end end
def render_status_with_link(type, status, path = nil, tooltip_placement: 'left', cssclass: '', container: 'body') def render_status_with_link(type, status, path = nil, tooltip_placement: 'left', cssclass: '', container: 'body', icon_size: 16)
klass = "ci-status-link ci-status-icon-#{status.dasherize} #{cssclass}" klass = "ci-status-link ci-status-icon-#{status.dasherize} #{cssclass}"
title = "#{type.titleize}: #{ci_label_for_status(status)}" title = "#{type.titleize}: #{ci_label_for_status(status)}"
data = { toggle: 'tooltip', placement: tooltip_placement, container: container } data = { toggle: 'tooltip', placement: tooltip_placement, container: container }
if path if path
link_to ci_icon_for_status(status), path, link_to ci_icon_for_status(status, size: icon_size), path,
class: klass, title: title, data: data class: klass, title: title, data: data
else else
content_tag :span, ci_icon_for_status(status), content_tag :span, ci_icon_for_status(status, size: icon_size),
class: klass, title: title, data: data class: klass, title: title, data: data
end end
end end
......
...@@ -145,15 +145,14 @@ module CommitsHelper ...@@ -145,15 +145,14 @@ module CommitsHelper
person_name person_name
end end
options = { link_options = {
class: "commit-#{options[:source]}-link has-tooltip", class: "commit-#{options[:source]}-link"
title: source_email
} }
if user.nil? if user.nil?
mail_to(source_email, text, options) mail_to(source_email, text, link_options)
else else
link_to(text, user_path(user), options) link_to(text, user_path(user), link_options)
end end
end end
......
...@@ -25,7 +25,7 @@ module Projects ...@@ -25,7 +25,7 @@ module Projects
success success
rescue => e rescue => e
error("Error importing repository #{project.import_url} into #{project.full_path} - #{e.message}") error("Error importing repository #{project.safe_import_url} into #{project.full_path} - #{e.message}")
end end
private private
......
# frozen_string_literal: true
class ImportExportUploader < AttachmentUploader class ImportExportUploader < AttachmentUploader
EXTENSION_WHITELIST = %w[tar.gz].freeze EXTENSION_WHITELIST = %w[tar.gz].freeze
......
...@@ -18,7 +18,7 @@ ...@@ -18,7 +18,7 @@
- commit = blame_group[:commit] - commit = blame_group[:commit]
%td.blame-commit{ class: age_map_class(commit.committed_date, project_duration) } %td.blame-commit{ class: age_map_class(commit.committed_date, project_duration) }
.commit .commit
= author_avatar(commit, size: 36) = author_avatar(commit, size: 36, has_tooltip: false)
.commit-row-title .commit-row-title
%span.item-title.str-truncated-100 %span.item-title.str-truncated-100
= link_to_markdown commit.title, project_commit_path(@project, commit.id), class: "cdark", title: commit.title = link_to_markdown commit.title, project_commit_path(@project, commit.id), class: "cdark", title: commit.title
......
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
- page_title @blob.path, @ref - page_title @blob.path, @ref
.js-signature-container{ data: { 'signatures-path': namespace_project_signatures_path } }
%div{ class: container_class } %div{ class: container_class }
= render 'projects/last_push' = render 'projects/last_push'
......
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
%span.d-none.d-sm-inline authored %span.d-none.d-sm-inline authored
#{time_ago_with_tooltip(@commit.authored_date)} #{time_ago_with_tooltip(@commit.authored_date)}
%span= s_('ByAuthor|by') %span= s_('ByAuthor|by')
= author_avatar(@commit, size: 24) = author_avatar(@commit, size: 24, has_tooltip: false)
%strong %strong
= commit_author_link(@commit, avatar: true, size: 24) = commit_author_link(@commit, avatar: true, size: 24)
- if @commit.different_committer? - if @commit.different_committer?
......
...@@ -23,7 +23,7 @@ ...@@ -23,7 +23,7 @@
%li.commit.flex-row.js-toggle-container{ id: "commit-#{commit.short_id}" } %li.commit.flex-row.js-toggle-container{ id: "commit-#{commit.short_id}" }
.avatar-cell.d-none.d-sm-block .avatar-cell.d-none.d-sm-block
= author_avatar(commit, size: 36) = author_avatar(commit, size: 36, has_tooltip: false)
.commit-detail.flex-list .commit-detail.flex-list
.commit-content.qa-commit-content .commit-content.qa-commit-content
......
...@@ -8,6 +8,10 @@ ...@@ -8,6 +8,10 @@
= render partial: 'flash_messages', locals: { project: @project } = render partial: 'flash_messages', locals: { project: @project }
- if @project.repository_exists? && !@project.empty_repo?
- signatures_path = namespace_project_signatures_path(project_id: @project.path, id: @project.default_branch)
.js-signature-container{ data: { 'signatures-path': signatures_path } }
%div{ class: [container_class, ("limit-container-width" unless fluid_layout)] } %div{ class: [container_class, ("limit-container-width" unless fluid_layout)] }
= render "projects/last_push" = render "projects/last_push"
......
- @no_container = true - @no_container = true
- breadcrumb_title _("Repository") - breadcrumb_title _("Repository")
- @content_class = "limit-container-width" unless fluid_layout - @content_class = "limit-container-width" unless fluid_layout
- signatures_path = namespace_project_signatures_path(namespace_id: @project.namespace.path, project_id: @project.path, id: @ref)
- page_title @path.presence || _("Files"), @ref - page_title @path.presence || _("Files"), @ref
= content_for :meta_tags do = content_for :meta_tags do
= auto_discovery_link_tag(:atom, project_commits_url(@project, @ref, rss_url_options), title: "#{@project.name}:#{@ref} commits") = auto_discovery_link_tag(:atom, project_commits_url(@project, @ref, rss_url_options), title: "#{@project.name}:#{@ref} commits")
.js-signature-container{ data: { 'signatures-path': signatures_path } }
%div{ class: [(container_class), ("limit-container-width" unless fluid_layout)] } %div{ class: [(container_class), ("limit-container-width" unless fluid_layout)] }
= render 'projects/last_push' = render 'projects/last_push'
= render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id) = render 'projects/files', commit: @last_commit, project: @project, ref: @ref, content_url: project_tree_path(@project, @id)
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
.card-header .card-header
.float-right .float-right
%button.js-clipboard-trigger.btn.btn-sm{ title: t('sherlock.copy_to_clipboard'), type: :button } %button.js-clipboard-trigger.btn.btn-sm{ title: t('sherlock.copy_to_clipboard'), type: :button }
%i.fa.fa-clipboard = sprite_icon('duplicate')
%pre.hidden %pre.hidden
= @query.formatted_query = @query.formatted_query
%strong %strong
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
.card-header .card-header
.float-right .float-right
%button.js-clipboard-trigger.btn.btn-sm{ title: t('sherlock.copy_to_clipboard'), type: :button } %button.js-clipboard-trigger.btn.btn-sm{ title: t('sherlock.copy_to_clipboard'), type: :button }
%i.fa.fa-clipboard = sprite_icon('duplicate')
%pre.hidden %pre.hidden
= @query.explain = @query.explain
%strong %strong
......
# frozen_string_literal: true
module EachShardWorker module EachShardWorker
extend ActiveSupport::Concern extend ActiveSupport::Concern
include ::Gitlab::Utils::StrongMemoize include ::Gitlab::Utils::StrongMemoize
......
# frozen_string_literal: true
class DeleteDiffFilesWorker class DeleteDiffFilesWorker
include ApplicationWorker include ApplicationWorker
......
# frozen_string_literal: true
module RepositoryCheck module RepositoryCheck
class DispatchWorker class DispatchWorker
include ApplicationWorker include ApplicationWorker
......
---
title: Remove tooltips from commit author avatar and name in commit lists
merge_request: 20674
author:
type: other
---
title: Adds Vuex store for reports section in MR widget
merge_request: 20709
author:
type: added
---
title: Sanitize git URL in import errors
merge_request:
author: Jamie Schembri
type: fixed
---
title: Replace 'Sidekiq::Testing.inline!' with 'perform_enqueued_jobs'
merge_request: 20768
author: "@blackst0ne"
type: other
---
title: Enable frozen string in newly added files to previously processed directories
merge_request: 20763
author: gfyoung
type: performance
---
title: Enable frozen strings in remaining lib/banzai/filter/*.rb files
merge_request: 20777
author:
type: performance
---
title: Enable frozen strings in lib/banzai/filter/*.rb
merge_request: 20775
author:
type: performance
---
title: Remove method instrumentation for Banzai filters and reference parsers
merge_request: 20770
author:
type: performance
---
title: Reduces the client side memory footprint on merge requests
merge_request: 20744
author:
type: performance
---
title: Display GPG status on repository and blob pages
merge_request: 20524
author:
type: changed
...@@ -58,20 +58,6 @@ def instrument_classes(instrumentation) ...@@ -58,20 +58,6 @@ def instrument_classes(instrumentation)
instrumentation.instrument_instance_methods(const) instrumentation.instrument_instance_methods(const)
end end
# Instruments all Banzai filters and reference parsers
{
Filter: Rails.root.join('lib', 'banzai', 'filter', '*.rb'),
ReferenceParser: Rails.root.join('lib', 'banzai', 'reference_parser', '*.rb')
}.each do |const_name, path|
Dir[path].each do |file|
klass = File.basename(file, File.extname(file)).camelize
const = Banzai.const_get(const_name).const_get(klass)
instrumentation.instrument_methods(const)
instrumentation.instrument_instance_methods(const)
end
end
instrumentation.instrument_methods(Banzai::Renderer) instrumentation.instrument_methods(Banzai::Renderer)
instrumentation.instrument_methods(Banzai::Querying) instrumentation.instrument_methods(Banzai::Querying)
...@@ -94,8 +80,6 @@ def instrument_classes(instrumentation) ...@@ -94,8 +80,6 @@ def instrument_classes(instrumentation)
instrumentation.instrument_instance_methods(RepositoryCheck::SingleRepositoryWorker) instrumentation.instrument_instance_methods(RepositoryCheck::SingleRepositoryWorker)
instrumentation.instrument_instance_methods(Rouge::Plugins::CommonMark)
instrumentation.instrument_instance_methods(Rouge::Plugins::Redcarpet)
instrumentation.instrument_instance_methods(Rouge::Formatters::HTMLGitlab) instrumentation.instrument_instance_methods(Rouge::Formatters::HTMLGitlab)
[:XML, :HTML].each do |namespace| [:XML, :HTML].each do |namespace|
......
...@@ -267,7 +267,6 @@ Omnibus GitLab 11.1. ...@@ -267,7 +267,6 @@ Omnibus GitLab 11.1.
Follow the steps below to configure verbose logging of GitLab Pages daemon. Follow the steps below to configure verbose logging of GitLab Pages daemon.
1. By default the daemon only logs with `INFO` level. 1. By default the daemon only logs with `INFO` level.
If you wish to make it log events with level `DEBUG` you must configure this in If you wish to make it log events with level `DEBUG` you must configure this in
`/etc/gitlab/gitlab.rb`: `/etc/gitlab/gitlab.rb`:
......
...@@ -66,7 +66,11 @@ future GitLab releases.** ...@@ -66,7 +66,11 @@ future GitLab releases.**
| **CI_JOB_MANUAL** | 8.12 | all | The flag to indicate that job was manually started | | **CI_JOB_MANUAL** | 8.12 | all | The flag to indicate that job was manually started |
| **CI_JOB_NAME** | 9.0 | 0.5 | The name of the job as defined in `.gitlab-ci.yml` | | **CI_JOB_NAME** | 9.0 | 0.5 | The name of the job as defined in `.gitlab-ci.yml` |
| **CI_JOB_STAGE** | 9.0 | 0.5 | The name of the stage as defined in `.gitlab-ci.yml` | | **CI_JOB_STAGE** | 9.0 | 0.5 | The name of the stage as defined in `.gitlab-ci.yml` |
<<<<<<< HEAD
| **CI_JOB_TOKEN** | 9.0 | 1.2 | Token used for authenticating with [GitLab Container Registry][registry], downloading [dependent repositories][dependent-repositories], authenticate with multi-project pipelines when [triggers][trigger-job-token] are involved, and for [downloading job artifacts][get-job-artifacts] | | **CI_JOB_TOKEN** | 9.0 | 1.2 | Token used for authenticating with [GitLab Container Registry][registry], downloading [dependent repositories][dependent-repositories], authenticate with multi-project pipelines when [triggers][trigger-job-token] are involved, and for [downloading job artifacts][get-job-artifacts] |
=======
| **CI_JOB_TOKEN** | 9.0 | 1.2 | Token used for authenticating with the [GitLab Container Registry][registry] and downloading [dependent repositories][dependent-repositories] |
>>>>>>> upstream/master
| **CI_JOB_URL** | 11.1 | 0.5 | Job details URL | | **CI_JOB_URL** | 11.1 | 0.5 | Job details URL |
| **CI_REPOSITORY_URL** | 9.0 | all | The URL to clone the Git repository | | **CI_REPOSITORY_URL** | 9.0 | all | The URL to clone the Git repository |
| **CI_RUNNER_DESCRIPTION** | 8.10 | 0.5 | The description of the runner as saved in GitLab | | **CI_RUNNER_DESCRIPTION** | 8.10 | 0.5 | The description of the runner as saved in GitLab |
...@@ -590,5 +594,9 @@ Below you can find supported syntax reference: ...@@ -590,5 +594,9 @@ Below you can find supported syntax reference:
[trigger-job-token]: ../triggers/README.md#ci-job-token [trigger-job-token]: ../triggers/README.md#ci-job-token
[gitlab-deploy-token]: ../../user/project/deploy_tokens/index.md#gitlab-deploy-token [gitlab-deploy-token]: ../../user/project/deploy_tokens/index.md#gitlab-deploy-token
[registry]: ../../user/project/container_registry.md [registry]: ../../user/project/container_registry.md
<<<<<<< HEAD
[dependent-repositories]: ../../user/project/new_ci_build_permissions_model.md#dependent-repositories [dependent-repositories]: ../../user/project/new_ci_build_permissions_model.md#dependent-repositories
[get-job-artifacts]: ../../api/jobs.html#get-job-artifacts [get-job-artifacts]: ../../api/jobs.html#get-job-artifacts
=======
[dependent-repositories]: ../../user/project/new_ci_build_permissions_model.md#dependent-repositories
>>>>>>> upstream/master
...@@ -591,7 +591,7 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac ...@@ -591,7 +591,7 @@ also be customized, and you can easily use a [custom buildpack](#custom-buildpac
| `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). | | `CANARY_ENABLED` | From GitLab 11.0, this variable can be used to define a [deploy policy for canary environments](#deploy-policy-for-canary-environments). |
| `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. | | `INCREMENTAL_ROLLOUT_ENABLED`| From GitLab 10.8, this variable can be used to enable an [incremental rollout](#incremental-rollout-to-production) of your application for the production environment. |
| `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. | | `TEST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `test` job. If the variable is present, the job will not be created. |
| `CODEQUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. | | `CODE_QUALITY_DISABLED` | From GitLab 11.0, this variable can be used to disable the `codequality` job. If the variable is present, the job will not be created. |
| `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. | | `SAST_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast` job. If the variable is present, the job will not be created. |
| `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. | | `DEPENDENCY_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `dependency_scanning` job. If the variable is present, the job will not be created. |
| `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. | | `CONTAINER_SCANNING_DISABLED` | From GitLab 11.0, this variable can be used to disable the `sast:container` job. If the variable is present, the job will not be created. |
......
...@@ -200,6 +200,12 @@ sudo apt-get install pgloader ...@@ -200,6 +200,12 @@ sudo apt-get install pgloader
sudo -u git cp config/database.yml.postgresql config/database.yml sudo -u git cp config/database.yml.postgresql config/database.yml
sudo -u git -H chmod o-rwx config/database.yml sudo -u git -H chmod o-rwx config/database.yml
``` ```
1. Install Gems related to Postgresql
``` bash
sudo -u git -H rm .bundle/config
sudo -u git -H bundle install --deployment --without development test mysql aws kerberos
```
1. Run the following commands to prepare the schema: 1. Run the following commands to prepare the schema:
......
# frozen_string_literal: true
require 'uri' require 'uri'
module Banzai module Banzai
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# Issues, Merge Requests, Snippets, Commits and Commit Ranges share # Issues, Merge Requests, Snippets, Commits and Commit Ranges share
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
class AsciiDocPostProcessingFilter < HTML::Pipeline::Filter class AsciiDocPostProcessingFilter < HTML::Pipeline::Filter
......
# frozen_string_literal: true
require 'uri' require 'uri'
module Banzai module Banzai
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
class BlockquoteFenceFilter < HTML::Pipeline::TextFilter class BlockquoteFenceFilter < HTML::Pipeline::TextFilter
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that renders `color` followed by a color "chip". # HTML filter that renders `color` followed by a color "chip".
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces commit range references with links. # HTML filter that replaces commit range references with links.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces commit references with links. # HTML filter that replaces commit references with links.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces users' names and emails in commit trailers # HTML filter that replaces users' names and emails in commit trailers
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces :emoji: and unicode with images. # HTML filter that replaces :emoji: and unicode with images.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# The actual filter is implemented in the EE mixin # The actual filter is implemented in the EE mixin
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces external issue tracker references with links. # HTML filter that replaces external issue tracker references with links.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML Filter to modify the attributes of external links # HTML Filter to modify the attributes of external links
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML Filter for parsing Gollum's tags in HTML. It's only parses the # HTML Filter for parsing Gollum's tags in HTML. It's only parses the
......
# frozen_string_literal: true
require 'erb' require 'erb'
module Banzai module Banzai
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that moves the value of image `src` attributes to `data-src` # HTML filter that moves the value of image `src` attributes to `data-src`
...@@ -5,7 +7,7 @@ module Banzai ...@@ -5,7 +7,7 @@ module Banzai
class ImageLazyLoadFilter < HTML::Pipeline::Filter class ImageLazyLoadFilter < HTML::Pipeline::Filter
def call def call
doc.xpath('descendant-or-self::img').each do |img| doc.xpath('descendant-or-self::img').each do |img|
img['class'] ||= '' << 'lazy' img.add_class('lazy')
img['data-src'] = img['src'] img['data-src'] = img['src']
img['src'] = LazyImageTagHelper.placeholder_image img['src'] = LazyImageTagHelper.placeholder_image
end end
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that wraps links around inline images. # HTML filter that wraps links around inline images.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
class InlineDiffFilter < HTML::Pipeline::Filter class InlineDiffFilter < HTML::Pipeline::Filter
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
class IssuableReferenceFilter < AbstractReferenceFilter class IssuableReferenceFilter < AbstractReferenceFilter
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that appends state information to issuable links. # HTML filter that appends state information to issuable links.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces issue references with links. References to # HTML filter that replaces issue references with links. References to
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces label references with links. # HTML filter that replaces label references with links.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
class MarkdownFilter < HTML::Pipeline::TextFilter class MarkdownFilter < HTML::Pipeline::TextFilter
......
# frozen_string_literal: true
require 'uri' require 'uri'
module Banzai module Banzai
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces merge request references with links. References # HTML filter that replaces merge request references with links. References
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
class MermaidFilter < HTML::Pipeline::Filter class MermaidFilter < HTML::Pipeline::Filter
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces milestone references with links. # HTML filter that replaces milestone references with links.
......
# frozen_string_literal: true
require "nokogiri" require "nokogiri"
require "asciidoctor-plantuml/plantuml" require "asciidoctor-plantuml/plantuml"
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that removes references to records that the current user does # HTML filter that removes references to records that the current user does
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# Base class for GitLab Flavored Markdown reference filters. # Base class for GitLab Flavored Markdown reference filters.
......
# frozen_string_literal: true
require 'uri' require 'uri'
module Banzai module Banzai
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# Sanitize HTML # Sanitize HTML
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that sets dir="auto" for RTL languages support # HTML filter that sets dir="auto" for RTL languages support
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces snippet references with links. References to # HTML filter that replaces snippet references with links. References to
......
# frozen_string_literal: true
require 'rouge/plugins/common_mark' require 'rouge/plugins/common_mark'
require 'rouge/plugins/redcarpet' require 'rouge/plugins/redcarpet'
...@@ -15,7 +17,7 @@ module Banzai ...@@ -15,7 +17,7 @@ module Banzai
end end
def highlight_node(node) def highlight_node(node)
css_classes = 'code highlight js-syntax-highlight' css_classes = +'code highlight js-syntax-highlight'
lang = node.attr('lang') lang = node.attr('lang')
retried = false retried = false
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that adds an anchor child element to all Headers in a # HTML filter that adds an anchor child element to all Headers in a
...@@ -19,7 +21,7 @@ module Banzai ...@@ -19,7 +21,7 @@ module Banzai
def call def call
return doc if context[:no_header_anchors] return doc if context[:no_header_anchors]
result[:toc] = "" result[:toc] = +""
headers = Hash.new(0) headers = Hash.new(0)
header_root = current_header = HeaderNode.new header_root = current_header = HeaderNode.new
......
# frozen_string_literal: true
require 'task_list/filter' require 'task_list/filter'
module Banzai module Banzai
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# HTML filter that replaces user or group references with links. # HTML filter that replaces user or group references with links.
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
# Find every image that isn't already wrapped in an `a` tag, and that has # Find every image that isn't already wrapped in an `a` tag, and that has
......
# frozen_string_literal: true
require 'uri' require 'uri'
module Banzai module Banzai
......
# frozen_string_literal: true
module Banzai module Banzai
module Filter module Filter
class YamlFrontMatterFilter < HTML::Pipeline::Filter class YamlFrontMatterFilter < HTML::Pipeline::Filter
......
...@@ -39,7 +39,7 @@ class Feature ...@@ -39,7 +39,7 @@ class Feature
# Flipper creates on-memory features when asked for a not-yet-created one. # Flipper creates on-memory features when asked for a not-yet-created one.
# If we want to check if a feature has been actually set, we look for it # If we want to check if a feature has been actually set, we look for it
# on the persisted features list. # on the persisted features list.
persisted_names.include?(feature.name) persisted_names.include?(feature.name.to_s)
end end
def enabled?(key, thing = nil) def enabled?(key, thing = nil)
......
...@@ -542,10 +542,14 @@ msgstr "" ...@@ -542,10 +542,14 @@ msgstr ""
msgid "An error occurred while importing project: ${details}" msgid "An error occurred while importing project: ${details}"
msgstr "" msgstr ""
<<<<<<< HEAD
msgid "An error occurred while initializing path locks" msgid "An error occurred while initializing path locks"
msgstr "" msgstr ""
msgid "An error occurred while loading commits" msgid "An error occurred while loading commits"
=======
msgid "An error occurred while loading commit signatures"
>>>>>>> upstream/master
msgstr "" msgstr ""
msgid "An error occurred while loading diff" msgid "An error occurred while loading diff"
......
...@@ -238,6 +238,5 @@ def check_author_link(email, author) ...@@ -238,6 +238,5 @@ def check_author_link(email, author)
author_link = find('.commit-author-link') author_link = find('.commit-author-link')
expect(author_link['href']).to eq(user_path(author)) expect(author_link['href']).to eq(user_path(author))
expect(author_link['title']).to eq(email)
expect(find('.commit-author-name').text).to eq(author.name) expect(find('.commit-author-name').text).to eq(author.name)
end end
...@@ -7,7 +7,7 @@ describe 'GPG signed commits', :js do ...@@ -7,7 +7,7 @@ describe 'GPG signed commits', :js do
user = create :user, email: 'unrelated.user@example.org' user = create :user, email: 'unrelated.user@example.org'
project.add_maintainer(user) project.add_maintainer(user)
Sidekiq::Testing.inline! do perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User1.public_key, user: user create :gpg_key, key: GpgHelpers::User1.public_key, user: user
end end
...@@ -21,7 +21,7 @@ describe 'GPG signed commits', :js do ...@@ -21,7 +21,7 @@ describe 'GPG signed commits', :js do
end end
# user changes his email which makes the gpg key verified # user changes his email which makes the gpg key verified
Sidekiq::Testing.inline! do perform_enqueued_jobs do
user.skip_reconfirmation! user.skip_reconfirmation!
user.update!(email: GpgHelpers::User1.emails.first) user.update!(email: GpgHelpers::User1.emails.first)
end end
...@@ -48,7 +48,7 @@ describe 'GPG signed commits', :js do ...@@ -48,7 +48,7 @@ describe 'GPG signed commits', :js do
end end
# user adds the gpg key which makes the signature valid # user adds the gpg key which makes the signature valid
Sidekiq::Testing.inline! do perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User1.public_key, user: user create :gpg_key, key: GpgHelpers::User1.public_key, user: user
end end
...@@ -66,7 +66,7 @@ describe 'GPG signed commits', :js do ...@@ -66,7 +66,7 @@ describe 'GPG signed commits', :js do
end end
let(:user_1_key) do let(:user_1_key) do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User1.public_key, user: user_1 create :gpg_key, key: GpgHelpers::User1.public_key, user: user_1
end end
end end
...@@ -79,7 +79,7 @@ describe 'GPG signed commits', :js do ...@@ -79,7 +79,7 @@ describe 'GPG signed commits', :js do
end end
let(:user_2_key) do let(:user_2_key) do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
create :gpg_key, key: GpgHelpers::User2.public_key, user: user_2 create :gpg_key, key: GpgHelpers::User2.public_key, user: user_2
end end
end end
......
...@@ -121,6 +121,8 @@ describe ButtonHelper do ...@@ -121,6 +121,8 @@ describe ButtonHelper do
end end
describe 'clipboard_button' do describe 'clipboard_button' do
include IconsHelper
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { build_stubbed(:project) } let(:project) { build_stubbed(:project) }
...@@ -145,7 +147,7 @@ describe ButtonHelper do ...@@ -145,7 +147,7 @@ describe ButtonHelper do
expect(element.attr('data-clipboard-text')).to eq(nil) expect(element.attr('data-clipboard-text')).to eq(nil)
expect(element.inner_text).to eq("") expect(element.inner_text).to eq("")
expect(element).to have_selector('.fa.fa-clipboard') expect(element.to_html).to include sprite_icon('duplicate')
end end
end end
...@@ -178,7 +180,7 @@ describe ButtonHelper do ...@@ -178,7 +180,7 @@ describe ButtonHelper do
context 'with `hide_button_icon` attribute provided' do context 'with `hide_button_icon` attribute provided' do
it 'shows copy to clipboard button without tooltip support' do it 'shows copy to clipboard button without tooltip support' do
expect(element(hide_button_icon: true)).not_to have_selector('.fa.fa-clipboard') expect(element(hide_button_icon: true).to_html).not_to include sprite_icon('duplicate')
end end
end end
end end
......
...@@ -50,7 +50,11 @@ describe('DiffLineGutterContent', () => { ...@@ -50,7 +50,11 @@ describe('DiffLineGutterContent', () => {
it('should return discussions for the given lineCode', () => { it('should return discussions for the given lineCode', () => {
const { lineCode } = getDiffFileMock().highlightedDiffLines[1]; const { lineCode } = getDiffFileMock().highlightedDiffLines[1];
const component = createComponent({ lineCode, showCommentButton: true }); const component = createComponent({
lineCode,
showCommentButton: true,
discussions: getDiscussionsMockData(),
});
setDiscussions(component); setDiscussions(component);
......
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import GpgBadges from '~/gpg_badges'; import GpgBadges from '~/gpg_badges';
import { TEST_HOST } from 'spec/test_constants';
describe('GpgBadges', () => { describe('GpgBadges', () => {
let mock; let mock;
const dummyCommitSha = 'n0m0rec0ffee'; const dummyCommitSha = 'n0m0rec0ffee';
const dummyBadgeHtml = 'dummy html'; const dummyBadgeHtml = 'dummy html';
const dummyResponse = { const dummyResponse = {
signatures: [{ signatures: [
commit_sha: dummyCommitSha, {
html: dummyBadgeHtml, commit_sha: dummyCommitSha,
}], html: dummyBadgeHtml,
},
],
}; };
const dummyUrl = `${TEST_HOST}/dummy/signatures`;
beforeEach(() => { beforeEach(() => {
mock = new MockAdapter(axios); mock = new MockAdapter(axios);
setFixtures(` setFixtures(`
<form <form
class="commits-search-form js-signature-container" data-signatures-path="/hello" action="/hello" class="commits-search-form js-signature-container" data-signatures-path="${dummyUrl}" action="${dummyUrl}"
method="get"> method="get">
<input name="utf8" type="hidden" value="✓"> <input name="utf8" type="hidden" value="✓">
<input type="search" name="search" id="commits-search"class="form-control search-text-input input-short"> <input type="search" name="search" id="commits-search"class="form-control search-text-input input-short">
...@@ -32,25 +36,55 @@ describe('GpgBadges', () => { ...@@ -32,25 +36,55 @@ describe('GpgBadges', () => {
mock.restore(); mock.restore();
}); });
it('displays a loading spinner', (done) => { it('does not make a request if there is no container element', done => {
mock.onGet('/hello').reply(200); setFixtures('');
spyOn(axios, 'get');
GpgBadges.fetch().then(() => { GpgBadges.fetch()
expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null); .then(() => {
const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin'); expect(axios.get).not.toHaveBeenCalled();
expect(spinners.length).toBe(1); })
done(); .then(done)
}).catch(done.fail); .catch(done.fail);
}); });
it('replaces the loading spinner', (done) => { it('throws an error if the endpoint is missing', done => {
mock.onGet('/hello').reply(200, dummyResponse); setFixtures('<div class="js-signature-container"></div>');
spyOn(axios, 'get');
GpgBadges.fetch().then(() => { GpgBadges.fetch()
expect(document.querySelector('.js-loading-gpg-badge')).toBe(null); .then(() => done.fail('Expected error to be thrown'))
const parentContainer = document.querySelector('.parent-container'); .catch(error => {
expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml); expect(error.message).toBe('Missing commit signatures endpoint!');
done(); expect(axios.get).not.toHaveBeenCalled();
}).catch(done.fail); })
.then(done)
.catch(done.fail);
});
it('displays a loading spinner', done => {
mock.onGet(dummyUrl).replyOnce(200);
GpgBadges.fetch()
.then(() => {
expect(document.querySelector('.js-loading-gpg-badge:empty')).toBe(null);
const spinners = document.querySelectorAll('.js-loading-gpg-badge i.fa.fa-spinner.fa-spin');
expect(spinners.length).toBe(1);
done();
})
.catch(done.fail);
});
it('replaces the loading spinner', done => {
mock.onGet(dummyUrl).replyOnce(200, dummyResponse);
GpgBadges.fetch()
.then(() => {
expect(document.querySelector('.js-loading-gpg-badge')).toBe(null);
const parentContainer = document.querySelector('.parent-container');
expect(parentContainer.innerHTML.trim()).toEqual(dummyBadgeHtml);
done();
})
.catch(done.fail);
}); });
}); });
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import {
setEndpoint,
requestReports,
fetchReports,
stopPolling,
clearEtagPoll,
receiveReportsSuccess,
receiveReportsError,
} from '~/reports/store/actions';
import state from '~/reports/store/state';
import * as types from '~/reports/store/mutation_types';
import testAction from 'spec/helpers/vuex_action_helper';
import { TEST_HOST } from 'spec/test_constants';
describe('Reports Store Actions', () => {
let mockedState;
beforeEach(() => {
mockedState = state();
});
describe('setEndpoint', () => {
it('should commit SET_ENDPOINT mutation', done => {
testAction(
setEndpoint,
'endpoint.json',
mockedState,
[{ type: types.SET_ENDPOINT, payload: 'endpoint.json' }],
[],
done,
);
});
});
describe('requestReports', () => {
it('should commit REQUEST_REPORTS mutation', done => {
testAction(requestReports, null, mockedState, [{ type: types.REQUEST_REPORTS }], [], done);
});
});
describe('fetchReports', () => {
let mock;
beforeEach(() => {
mockedState.endpoint = `${TEST_HOST}/endpoint.json`;
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
stopPolling();
clearEtagPoll();
});
describe('success', () => {
it('dispatches requestReports and receiveReportsSuccess ', done => {
mock.onGet(`${TEST_HOST}/endpoint.json`).replyOnce(200, { summary: {}, suites: [{ name: 'rspec' }] });
testAction(
fetchReports,
null,
mockedState,
[],
[
{
type: 'requestReports',
},
{
payload: { summary: {}, suites: [{ name: 'rspec' }] },
type: 'receiveReportsSuccess',
},
],
done,
);
});
});
describe('error', () => {
beforeEach(() => {
mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
});
it('dispatches requestReports and receiveReportsError ', done => {
testAction(
fetchReports,
null,
mockedState,
[],
[
{
type: 'requestReports',
},
{
type: 'receiveReportsError',
},
],
done,
);
});
});
});
describe('receiveReportsSuccess', () => {
it('should commit RECEIVE_REPORTS_SUCCESS mutation', done => {
testAction(
receiveReportsSuccess,
{ summary: {} },
mockedState,
[{ type: types.RECEIVE_REPORTS_SUCCESS, payload: { summary: {} } }],
[],
done,
);
});
});
describe('receiveReportsError', () => {
it('should commit RECEIVE_REPORTS_ERROR mutation', done => {
testAction(
receiveReportsError,
null,
mockedState,
[{ type: types.RECEIVE_REPORTS_ERROR }],
[],
done,
);
});
});
});
import state from '~/reports/store/state';
import mutations from '~/reports/store/mutations';
import * as types from '~/reports/store/mutation_types';
describe('Reports Store Mutations', () => {
let stateCopy;
beforeEach(() => {
stateCopy = state();
});
describe('SET_ENDPOINT', () => {
it('should set endpoint', () => {
mutations[types.SET_ENDPOINT](stateCopy, 'endpoint.json');
expect(stateCopy.endpoint).toEqual('endpoint.json');
});
});
describe('REQUEST_REPORTS', () => {
it('should set isLoading to true', () => {
mutations[types.REQUEST_REPORTS](stateCopy);
expect(stateCopy.isLoading).toEqual(true);
});
});
describe('RECEIVE_REPORTS_SUCCESS', () => {
const mockedResponse = {
summary: {
total: 14,
resolved: 0,
failed: 7,
},
suites: [
{
name: 'build:linux',
summary: {
total: 2,
resolved: 0,
failed: 1,
},
new_failures: [
{
name: 'StringHelper#concatenate when a is git and b is lab returns summary',
execution_time: 0.0092435,
system_output:
'Failure/Error: is_expected.to eq(\'gitlab\')',
},
],
resolved_failures: [
{
name: 'StringHelper#concatenate when a is git and b is lab returns summary',
execution_time: 0.009235,
system_output:
'Failure/Error: is_expected.to eq(\'gitlab\')',
},
],
existing_failures: [
{
name: 'StringHelper#concatenate when a is git and b is lab returns summary',
execution_time: 1232.08,
system_output:
'Failure/Error: is_expected.to eq(\'gitlab\')',
},
],
},
],
};
beforeEach(() => {
mutations[types.RECEIVE_REPORTS_SUCCESS](stateCopy, mockedResponse);
});
it('should reset isLoading', () => {
expect(stateCopy.isLoading).toEqual(false);
});
it('should set summary counts', () => {
expect(stateCopy.summary.total).toEqual(mockedResponse.summary.total);
expect(stateCopy.summary.resolved).toEqual(mockedResponse.summary.resolved);
expect(stateCopy.summary.failed).toEqual(mockedResponse.summary.failed);
});
it('should set reports', () => {
expect(stateCopy.reports).toEqual(mockedResponse.suites);
});
});
describe('RECEIVE_REPORTS_ERROR', () => {
beforeEach(() => {
mutations[types.RECEIVE_REPORTS_ERROR](stateCopy);
});
it('should reset isLoading', () => {
expect(stateCopy.isLoading).toEqual(false);
});
it('should set hasError to true', () => {
expect(stateCopy.hasError).toEqual(true);
});
});
});
...@@ -21,7 +21,7 @@ describe('clipboard button', () => { ...@@ -21,7 +21,7 @@ describe('clipboard button', () => {
it('renders a button for clipboard', () => { it('renders a button for clipboard', () => {
expect(vm.$el.tagName).toEqual('BUTTON'); expect(vm.$el.tagName).toEqual('BUTTON');
expect(vm.$el.getAttribute('data-clipboard-text')).toEqual('copy me'); expect(vm.$el.getAttribute('data-clipboard-text')).toEqual('copy me');
expect(vm.$el.querySelector('i').className).toEqual('fa fa-clipboard'); expect(vm.$el).toHaveSpriteIcon('duplicate');
}); });
it('should have a tooltip with default values', () => { it('should have a tooltip with default values', () => {
......
...@@ -7,6 +7,20 @@ describe Banzai::Filter::ImageLazyLoadFilter do ...@@ -7,6 +7,20 @@ describe Banzai::Filter::ImageLazyLoadFilter do
%(<img src="#{path}" />) %(<img src="#{path}" />)
end end
def image_with_class(path, class_attr = nil)
%(<img src="#{path}" class="#{class_attr}"/>)
end
it 'adds a class attribute' do
doc = filter(image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'))
expect(doc.at_css('img')['class']).to eq 'lazy'
end
it 'appends to the current class attribute' do
doc = filter(image_with_class('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg', 'test'))
expect(doc.at_css('img')['class']).to eq 'test lazy'
end
it 'transforms the image src to a data-src' do it 'transforms the image src to a data-src' do
doc = filter(image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg')) doc = filter(image('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'))
expect(doc.at_css('img')['data-src']).to eq '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' expect(doc.at_css('img')['data-src']).to eq '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg'
......
...@@ -39,18 +39,36 @@ describe Feature do ...@@ -39,18 +39,36 @@ describe Feature do
end end
describe '.persisted?' do describe '.persisted?' do
it 'returns true for a persisted feature' do context 'when the feature is persisted' do
Feature::FlipperFeature.create!(key: 'foo') it 'returns true when feature name is a string' do
Feature::FlipperFeature.create!(key: 'foo')
feature = double(:feature, name: 'foo')
expect(described_class.persisted?(feature)).to eq(true)
end
it 'returns true when feature name is a symbol' do
Feature::FlipperFeature.create!(key: 'foo')
feature = double(:feature, name: 'foo') feature = double(:feature, name: :foo)
expect(described_class.persisted?(feature)).to eq(true) expect(described_class.persisted?(feature)).to eq(true)
end
end end
it 'returns false for a feature that is not persisted' do context 'when the feature is not persisted' do
feature = double(:feature, name: 'foo') it 'returns false when feature name is a string' do
feature = double(:feature, name: 'foo')
expect(described_class.persisted?(feature)).to eq(false)
end
expect(described_class.persisted?(feature)).to eq(false) it 'returns false when feature name is a symbol' do
feature = double(:feature, name: :bar)
expect(described_class.persisted?(feature)).to eq(false)
end
end end
end end
......
...@@ -65,7 +65,7 @@ describe Gitlab::HashedStorage::Migrator do ...@@ -65,7 +65,7 @@ describe Gitlab::HashedStorage::Migrator do
end end
it 'migrate project' do it 'migrate project' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
subject.migrate(project) subject.migrate(project)
end end
......
...@@ -31,7 +31,7 @@ describe ScheduleSetConfidentialNoteEventsOnServices, :migration, :sidekiq do ...@@ -31,7 +31,7 @@ describe ScheduleSetConfidentialNoteEventsOnServices, :migration, :sidekiq do
end end
it 'correctly processes services' do it 'correctly processes services' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
expect(services_table.where(confidential_note_events: nil).count).to eq 4 expect(services_table.where(confidential_note_events: nil).count).to eq 4
expect(services_table.where(confidential_note_events: true).count).to eq 1 expect(services_table.where(confidential_note_events: true).count).to eq 1
......
...@@ -44,7 +44,7 @@ describe MigrateStageIdReferenceInBackground, :migration, :sidekiq do ...@@ -44,7 +44,7 @@ describe MigrateStageIdReferenceInBackground, :migration, :sidekiq do
end end
it 'schedules background migrations' do it 'schedules background migrations' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
expect(jobs.where(stage_id: nil).count).to eq 5 expect(jobs.where(stage_id: nil).count).to eq 5
migrate! migrate!
......
...@@ -34,7 +34,7 @@ describe MigrateStagesStatuses, :sidekiq, :migration do ...@@ -34,7 +34,7 @@ describe MigrateStagesStatuses, :sidekiq, :migration do
end end
it 'correctly migrates stages statuses' do it 'correctly migrates stages statuses' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
expect(stages.where(status: nil).count).to eq 3 expect(stages.where(status: nil).count).to eq 3
migrate! migrate!
......
...@@ -38,7 +38,7 @@ describe NormalizeLdapExternUids, :migration, :sidekiq do ...@@ -38,7 +38,7 @@ describe NormalizeLdapExternUids, :migration, :sidekiq do
end end
it 'migrates the LDAP identities' do it 'migrates the LDAP identities' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
migrate! migrate!
identities.where(id: 1..4).each do |identity| identities.where(id: 1..4).each do |identity|
expect(identity.extern_uid).to eq("uid=foo #{identity.id},ou=people,dc=example,dc=com") expect(identity.extern_uid).to eq("uid=foo #{identity.id},ou=people,dc=example,dc=com")
...@@ -47,7 +47,7 @@ describe NormalizeLdapExternUids, :migration, :sidekiq do ...@@ -47,7 +47,7 @@ describe NormalizeLdapExternUids, :migration, :sidekiq do
end end
it 'does not modify non-LDAP identities' do it 'does not modify non-LDAP identities' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
migrate! migrate!
identity = identities.last identity = identities.last
expect(identity.extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ") expect(identity.extern_uid).to eq(" uid = foo 5, ou = People, dc = example, dc = com ")
......
...@@ -20,7 +20,7 @@ describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do ...@@ -20,7 +20,7 @@ describe ScheduleCreateGpgKeySubkeysFromGpgKeys, :migration, :sidekiq do
end end
it 'schedules background migrations' do it 'schedules background migrations' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
expect(GpgKeySubkey.count).to eq(0) expect(GpgKeySubkey.count).to eq(0)
migrate! migrate!
......
...@@ -33,7 +33,7 @@ describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do ...@@ -33,7 +33,7 @@ describe ScheduleMergeRequestDiffMigrations, :migration, :sidekiq do
end end
it 'schedules background migrations' do it 'schedules background migrations' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL' non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
expect(merge_request_diffs.where(non_empty).count).to eq 3 expect(merge_request_diffs.where(non_empty).count).to eq 3
......
...@@ -33,7 +33,7 @@ describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do ...@@ -33,7 +33,7 @@ describe ScheduleMergeRequestDiffMigrationsTakeTwo, :migration, :sidekiq do
end end
it 'migrates the data' do it 'migrates the data' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL' non_empty = 'st_commits IS NOT NULL OR st_diffs IS NOT NULL'
expect(merge_request_diffs.where(non_empty).count).to eq 3 expect(merge_request_diffs.where(non_empty).count).to eq 3
......
...@@ -53,7 +53,7 @@ describe ScheduleMergeRequestLatestMergeRequestDiffIdMigrations, :migration, :si ...@@ -53,7 +53,7 @@ describe ScheduleMergeRequestLatestMergeRequestDiffIdMigrations, :migration, :si
end end
it 'schedules background migrations' do it 'schedules background migrations' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
expect(merge_requests_table.where(latest_merge_request_diff_id: nil).count).to eq 3 expect(merge_requests_table.where(latest_merge_request_diff_id: nil).count).to eq 3
migrate! migrate!
......
...@@ -31,7 +31,7 @@ describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration, :sidekiq do ...@@ -31,7 +31,7 @@ describe ScheduleSetConfidentialNoteEventsOnWebhooks, :migration, :sidekiq do
end end
it 'correctly processes web hooks' do it 'correctly processes web hooks' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
expect(web_hooks_table.where(confidential_note_events: nil).count).to eq 4 expect(web_hooks_table.where(confidential_note_events: nil).count).to eq 4
expect(web_hooks_table.where(confidential_note_events: true).count).to eq 1 expect(web_hooks_table.where(confidential_note_events: true).count).to eq 1
......
...@@ -179,7 +179,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -179,7 +179,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end end
it 'migrates data to object storage' do it 'migrates data to object storage' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
subject subject
build_trace_chunk.reload build_trace_chunk.reload
...@@ -201,7 +201,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -201,7 +201,7 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
end end
it 'does not migrate data to object storage' do it 'does not migrate data to object storage' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
data_store = build_trace_chunk.data_store data_store = build_trace_chunk.data_store
subject subject
......
...@@ -22,7 +22,7 @@ describe SpamLog do ...@@ -22,7 +22,7 @@ describe SpamLog do
spam_log = build(:spam_log) spam_log = build(:spam_log)
user = spam_log.user user = spam_log.user
Sidekiq::Testing.inline! do perform_enqueued_jobs do
spam_log.remove_user(deleted_by: admin) spam_log.remove_user(deleted_by: admin)
end end
......
...@@ -102,7 +102,7 @@ describe API::ProjectImport do ...@@ -102,7 +102,7 @@ describe API::ProjectImport do
it 'correctly overrides params during the import' do it 'correctly overrides params during the import' do
override_params = { 'description' => 'Hello world' } override_params = { 'description' => 'Hello world' }
Sidekiq::Testing.inline! do perform_enqueued_jobs do
post api('/projects/import', user), post api('/projects/import', user),
path: 'test-import', path: 'test-import',
file: fixture_file_upload(file), file: fixture_file_upload(file),
......
...@@ -1100,7 +1100,7 @@ describe API::Users do ...@@ -1100,7 +1100,7 @@ describe API::Users do
end end
it "deletes user" do it "deletes user" do
Sidekiq::Testing.inline! { delete api("/users/#{user.id}", admin) } perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(204) expect(response).to have_gitlab_http_status(204)
expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound expect { User.find(user.id) }.to raise_error ActiveRecord::RecordNotFound
...@@ -1112,30 +1112,30 @@ describe API::Users do ...@@ -1112,30 +1112,30 @@ describe API::Users do
end end
it "does not delete for unauthenticated user" do it "does not delete for unauthenticated user" do
Sidekiq::Testing.inline! { delete api("/users/#{user.id}") } perform_enqueued_jobs { delete api("/users/#{user.id}") }
expect(response).to have_gitlab_http_status(401) expect(response).to have_gitlab_http_status(401)
end end
it "is not available for non admin users" do it "is not available for non admin users" do
Sidekiq::Testing.inline! { delete api("/users/#{user.id}", user) } perform_enqueued_jobs { delete api("/users/#{user.id}", user) }
expect(response).to have_gitlab_http_status(403) expect(response).to have_gitlab_http_status(403)
end end
it "returns 404 for non-existing user" do it "returns 404 for non-existing user" do
Sidekiq::Testing.inline! { delete api("/users/999999", admin) } perform_enqueued_jobs { delete api("/users/999999", admin) }
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
expect(json_response['message']).to eq('404 User Not Found') expect(json_response['message']).to eq('404 User Not Found')
end end
it "returns a 404 for invalid ID" do it "returns a 404 for invalid ID" do
Sidekiq::Testing.inline! { delete api("/users/ASDF", admin) } perform_enqueued_jobs { delete api("/users/ASDF", admin) }
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
context "hard delete disabled" do context "hard delete disabled" do
it "moves contributions to the ghost user" do it "moves contributions to the ghost user" do
Sidekiq::Testing.inline! { delete api("/users/#{user.id}", admin) } perform_enqueued_jobs { delete api("/users/#{user.id}", admin) }
expect(response).to have_gitlab_http_status(204) expect(response).to have_gitlab_http_status(204)
expect(issue.reload).to be_persisted expect(issue.reload).to be_persisted
...@@ -1145,7 +1145,7 @@ describe API::Users do ...@@ -1145,7 +1145,7 @@ describe API::Users do
context "hard delete enabled" do context "hard delete enabled" do
it "removes contributions" do it "removes contributions" do
Sidekiq::Testing.inline! { delete api("/users/#{user.id}?hard_delete=true", admin) } perform_enqueued_jobs { delete api("/users/#{user.id}?hard_delete=true", admin) }
expect(response).to have_gitlab_http_status(204) expect(response).to have_gitlab_http_status(204)
expect(Issue.exists?(issue.id)).to be_falsy expect(Issue.exists?(issue.id)).to be_falsy
......
...@@ -49,7 +49,7 @@ describe Groups::DestroyService do ...@@ -49,7 +49,7 @@ describe Groups::DestroyService do
context 'Sidekiq inline' do context 'Sidekiq inline' do
before do before do
# Run sidekiq immediately to check that renamed dir will be removed # Run sidekiq immediately to check that renamed dir will be removed
Sidekiq::Testing.inline! { destroy_group(group, user, async) } perform_enqueued_jobs { destroy_group(group, user, async) }
end end
it 'verifies that paths have been deleted' do it 'verifies that paths have been deleted' do
......
...@@ -28,7 +28,7 @@ describe Projects::CreateFromTemplateService do ...@@ -28,7 +28,7 @@ describe Projects::CreateFromTemplateService do
context 'the result project' do context 'the result project' do
before do before do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
@project = subject.execute @project = subject.execute
end end
......
...@@ -45,18 +45,18 @@ describe Projects::DestroyService do ...@@ -45,18 +45,18 @@ describe Projects::DestroyService do
shared_examples 'handles errors thrown during async destroy' do |error_message| shared_examples 'handles errors thrown during async destroy' do |error_message|
it 'does not allow the error to bubble up' do it 'does not allow the error to bubble up' do
expect do expect do
Sidekiq::Testing.inline! { destroy_project(project, user, {}) } perform_enqueued_jobs { destroy_project(project, user, {}) }
end.not_to raise_error end.not_to raise_error
end end
it 'unmarks the project as "pending deletion"' do it 'unmarks the project as "pending deletion"' do
Sidekiq::Testing.inline! { destroy_project(project, user, {}) } perform_enqueued_jobs { destroy_project(project, user, {}) }
expect(project.reload.pending_delete).to be(false) expect(project.reload.pending_delete).to be(false)
end end
it 'stores an error message in `projects.delete_error`' do it 'stores an error message in `projects.delete_error`' do
Sidekiq::Testing.inline! { destroy_project(project, user, {}) } perform_enqueued_jobs { destroy_project(project, user, {}) }
expect(project.reload.delete_error).to be_present expect(project.reload.delete_error).to be_present
expect(project.delete_error).to include(error_message) expect(project.delete_error).to include(error_message)
...@@ -66,7 +66,7 @@ describe Projects::DestroyService do ...@@ -66,7 +66,7 @@ describe Projects::DestroyService do
context 'Sidekiq inline' do context 'Sidekiq inline' do
before do before do
# Run sidekiq immediatly to check that renamed repository will be removed # Run sidekiq immediatly to check that renamed repository will be removed
Sidekiq::Testing.inline! { destroy_project(project, user, {}) } perform_enqueued_jobs { destroy_project(project, user, {}) }
end end
context 'when has remote mirrors' do context 'when has remote mirrors' do
...@@ -110,7 +110,7 @@ describe Projects::DestroyService do ...@@ -110,7 +110,7 @@ describe Projects::DestroyService do
end end
it 'keeps project team intact upon an error' do it 'keeps project team intact upon an error' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
begin begin
destroy_project(project, user, {}) destroy_project(project, user, {})
rescue ::Redis::CannotConnectError rescue ::Redis::CannotConnectError
...@@ -128,7 +128,7 @@ describe Projects::DestroyService do ...@@ -128,7 +128,7 @@ describe Projects::DestroyService do
before do before do
project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE) project.project_feature.update_attribute("issues_access_level", ProjectFeature::PRIVATE)
# Run sidekiq immediately to check that renamed repository will be removed # Run sidekiq immediately to check that renamed repository will be removed
Sidekiq::Testing.inline! { destroy_project(project, user, {}) } perform_enqueued_jobs { destroy_project(project, user, {}) }
end end
it_behaves_like 'deleting the project' it_behaves_like 'deleting the project'
...@@ -172,7 +172,7 @@ describe Projects::DestroyService do ...@@ -172,7 +172,7 @@ describe Projects::DestroyService do
it 'allows error to bubble up and rolls back project deletion' do it 'allows error to bubble up and rolls back project deletion' do
expect do expect do
Sidekiq::Testing.inline! { destroy_project(project, user, {}) } perform_enqueued_jobs { destroy_project(project, user, {}) }
end.to raise_error(Exception, 'Other error message') end.to raise_error(Exception, 'Other error message')
expect(project.reload.pending_delete).to be(false) expect(project.reload.pending_delete).to be(false)
......
...@@ -35,7 +35,7 @@ describe Projects::HousekeepingService do ...@@ -35,7 +35,7 @@ describe Projects::HousekeepingService do
allow(subject).to receive(:gc_period).and_return(1) allow(subject).to receive(:gc_period).and_return(1)
project.increment_pushes_since_gc project.increment_pushes_since_gc
Sidekiq::Testing.inline! do perform_enqueued_jobs do
expect { subject.execute }.to change { project.pushes_since_gc }.to(0) expect { subject.execute }.to change { project.pushes_since_gc }.to(0)
end end
end end
......
...@@ -69,7 +69,7 @@ describe Projects::ImportService do ...@@ -69,7 +69,7 @@ describe Projects::ImportService do
result = subject.execute result = subject.execute
expect(result[:status]).to eq :error expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - The repository could not be created." expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - The repository could not be created."
end end
context 'when repository creation succeeds' do context 'when repository creation succeeds' do
...@@ -141,7 +141,7 @@ describe Projects::ImportService do ...@@ -141,7 +141,7 @@ describe Projects::ImportService do
result = subject.execute result = subject.execute
expect(result[:status]).to eq :error expect(result[:status]).to eq :error
expect(result[:message]).to eq "Error importing repository #{project.import_url} into #{project.full_path} - Failed to import the repository" expect(result[:message]).to eq "Error importing repository #{project.safe_import_url} into #{project.full_path} - Failed to import the repository"
end end
context 'when repository import scheduled' do context 'when repository import scheduled' do
......
...@@ -173,7 +173,7 @@ describe Users::DestroyService do ...@@ -173,7 +173,7 @@ describe Users::DestroyService do
describe "user personal's repository removal" do describe "user personal's repository removal" do
before do before do
Sidekiq::Testing.inline! { service.execute(user) } perform_enqueued_jobs { service.execute(user) }
end end
context 'legacy storage' do context 'legacy storage' do
......
...@@ -13,7 +13,7 @@ describe StorageMigratorWorker do ...@@ -13,7 +13,7 @@ describe StorageMigratorWorker do
end end
it 'migrates projects in the specified range' do it 'migrates projects in the specified range' do
Sidekiq::Testing.inline! do perform_enqueued_jobs do
worker.perform(ids.min, ids.max) worker.perform(ids.min, ids.max)
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment