Commit cfff3b09 authored by Nick Thomas's avatar Nick Thomas

Merge branch 'ph/31406/fetchWidgetDataAsync' into 'master'

Fetch initial merge request widget data async

Closes #31406

See merge request gitlab-org/gitlab!20719
parents 1109cbe5 2662b4b0
<script>
import { GlSkeletonLoader } from '@gitlab/ui';
export default {
components: {
GlSkeletonLoader,
},
};
</script>
<template>
<div class="prepend-top-default">
<div class="mr-widget-heading p-3">
<gl-skeleton-loader :width="577" :height="12">
<rect width="86" height="12" rx="2" />
<rect x="96" width="300" height="12" rx="2" />
</gl-skeleton-loader>
</div>
<div class="mr-widget-heading mr-widget-workflow p-3">
<gl-skeleton-loader :width="577" :height="72">
<rect width="120" height="12" rx="2" />
<rect y="20" width="300" height="12" rx="2" />
<rect y="40" width="60" height="12" rx="2" />
<rect y="40" x="68" width="100" height="12" rx="2" />
<rect y="60" width="40" height="12" rx="2" />
</gl-skeleton-loader>
</div>
</div>
</template>
......@@ -7,6 +7,7 @@ import MRWidgetStore from 'ee_else_ce/vue_merge_request_widget/stores/mr_widget_
import MRWidgetService from 'ee_else_ce/vue_merge_request_widget/services/mr_widget_service';
import stateMaps from 'ee_else_ce/vue_merge_request_widget/stores/state_maps';
import createFlash from '../flash';
import Loading from './components/loading.vue';
import WidgetHeader from './components/mr_widget_header.vue';
import WidgetMergeHelp from './components/mr_widget_merge_help.vue';
import MrWidgetPipelineContainer from './components/mr_widget_pipeline_container.vue';
......@@ -44,6 +45,7 @@ export default {
// eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
name: 'MRWidget',
components: {
Loading,
'mr-widget-header': WidgetHeader,
'mr-widget-merge-help': WidgetMergeHelp,
MrWidgetPipelineContainer,
......@@ -80,12 +82,12 @@ export default {
},
},
data() {
const store = new MRWidgetStore(this.mrData || window.gl.mrWidgetData);
const service = this.createService(store);
const store = this.mrData && new MRWidgetStore(this.mrData);
return {
mr: store,
state: store.state,
service,
state: store && store.state,
service: store && this.createService(store),
};
},
computed: {
......@@ -133,29 +135,58 @@ export default {
}
},
},
created() {
this.initPolling();
this.bindEventHubListeners();
eventHub.$on('mr.discussion.updated', this.checkStatus);
},
mounted() {
this.setFaviconHelper();
this.initDeploymentsPolling();
if (this.shouldRenderMergedPipeline) {
this.initPostMergeDeploymentsPolling();
if (gon && gon.features && gon.features.asyncMrWidget) {
MRWidgetService.fetchInitialData()
.then(({ data }) => this.initWidget(data))
.catch(() =>
createFlash(__('Unable to load the merge request widget. Try reloading the page.')),
);
} else {
this.initWidget();
}
},
beforeDestroy() {
eventHub.$off('mr.discussion.updated', this.checkStatus);
if (this.pollingInterval) {
this.pollingInterval.destroy();
}
if (this.deploymentsInterval) {
this.deploymentsInterval.destroy();
}
if (this.postMergeDeploymentsInterval) {
this.postMergeDeploymentsInterval.destroy();
}
},
methods: {
initWidget(data = {}) {
if (this.mr) {
this.mr.setData({ ...window.gl.mrWidgetData, ...data });
} else {
this.mr = new MRWidgetStore({ ...window.gl.mrWidgetData, ...data });
}
if (!this.state) {
this.state = this.mr.state;
}
if (!this.service) {
this.service = this.createService(this.mr);
}
this.setFaviconHelper();
this.initDeploymentsPolling();
if (this.shouldRenderMergedPipeline) {
this.initPostMergeDeploymentsPolling();
}
this.initPolling();
this.bindEventHubListeners();
eventHub.$on('mr.discussion.updated', this.checkStatus);
},
getServiceEndpoints(store) {
return {
mergePath: store.mergePath,
......@@ -319,7 +350,7 @@ export default {
};
</script>
<template>
<div class="mr-state-widget prepend-top-default">
<div v-if="mr" class="mr-state-widget prepend-top-default">
<mr-widget-header :mr="mr" />
<mr-widget-pipeline-container
v-if="shouldRenderPipelines"
......@@ -377,4 +408,5 @@ export default {
:is-post-merge="true"
/>
</div>
<loading v-else />
</template>
......@@ -61,4 +61,11 @@ export default class MRWidgetService {
static fetchMetrics(metricsUrl) {
return axios.get(`${metricsUrl}.json`);
}
static fetchInitialData() {
return Promise.all([
axios.get(window.gl.mrWidgetData.merge_request_cached_widget_path),
axios.get(window.gl.mrWidgetData.merge_request_widget_path),
]).then(axios.spread((res, cachedRes) => ({ data: Object.assign(res.data, cachedRes.data) })));
}
}
......@@ -51,6 +51,10 @@
position: relative;
border: 1px solid $border-color;
border-radius: $border-radius-default;
.gl-skeleton-loader {
display: block;
}
}
.mr-widget-extension {
......
......@@ -25,6 +25,7 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, @project.group)
push_frontend_feature_flag(:release_search_filter, @project, default_enabled: true)
push_frontend_feature_flag(:async_mr_widget, @project)
end
around_action :allow_gitaly_ref_name_caching, only: [:index, :show, :discussions]
......
......@@ -3,6 +3,9 @@
class MergeRequestWidgetEntity < Grape::Entity
include RequestAwareEntity
expose :id
expose :iid
expose :source_project_full_path do |merge_request|
merge_request.source_project&.full_path
end
......@@ -65,6 +68,8 @@ class MergeRequestWidgetEntity < Grape::Entity
end
def as_json(options = {})
return super(options) if Feature.enabled?(:async_mr_widget)
super(options)
.merge(MergeRequestPollCachedWidgetEntity.new(object, **@options.opts_hash).as_json(options))
.merge(MergeRequestPollWidgetEntity.new(object, **@options.opts_hash).as_json(options))
......
---
title: Fetches initial merge request widget data async
merge_request: 20719
author:
type: changed
......@@ -43,7 +43,7 @@ export default {
return this.mr.hasApprovalsAvailable && this.mr.state !== 'nothingToMerge';
},
shouldRenderCodeQuality() {
const { codeclimate } = this.mr;
const { codeclimate } = this.mr || {};
return codeclimate && codeclimate.head_path && codeclimate.base_path;
},
shouldRenderLicenseReport() {
......@@ -67,7 +67,7 @@ export default {
);
},
shouldRenderPerformance() {
const { performance } = this.mr;
const { performance } = this.mr || {};
return performance && performance.head_path && performance.base_path;
},
shouldRenderSecurityReport() {
......@@ -149,15 +149,18 @@ export default {
return (gl && gl.mrWidgetData && gl.mrWidgetData.license_management_comparison_path) || null;
},
},
created() {
if (this.shouldRenderCodeQuality) {
watch: {
shouldRenderCodeQuality(newVal) {
if (newVal) {
this.fetchCodeQuality();
}
if (this.shouldRenderPerformance) {
},
shouldRenderPerformance(newVal) {
if (newVal) {
this.fetchPerformance();
}
},
},
methods: {
getServiceEndpoints(store) {
const base = CEWidgetOptions.methods.getServiceEndpoints(store);
......@@ -223,7 +226,7 @@ export default {
};
</script>
<template>
<div class="mr-state-widget prepend-top-default">
<div v-if="mr" class="mr-state-widget prepend-top-default">
<mr-widget-header :mr="mr" />
<mr-widget-pipeline-container
v-if="shouldRenderPipelines"
......@@ -366,4 +369,5 @@ export default {
:is-post-merge="true"
/>
</div>
<loading v-else />
</template>
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import mrWidgetOptions from 'ee/vue_merge_request_widget/mr_widget_options.vue';
import MRWidgetService from 'ee/vue_merge_request_widget/services/mr_widget_service';
import MRWidgetStore from 'ee/vue_merge_request_widget/stores/mr_widget_store';
import filterByKey from 'ee/vue_shared/security_reports/store/utils/filter_by_key';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { TEST_HOST } from 'spec/test_constants';
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
import state from 'ee/vue_shared/security_reports/store/state';
import mockData, {
baseIssues,
headIssues,
......@@ -47,21 +45,19 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
delete mrWidgetOptions.extends.el; // Prevent component mounting
gon.features = { asyncMrWidget: true };
Component = Vue.extend(mrWidgetOptions);
mock = new MockAdapter(axios);
mock.onGet(mockData.merge_request_widget_path).reply(() => [200, gl.mrWidgetData]);
mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, gl.mrWidgetData]);
});
afterEach(() => {
vm.$destroy();
mock.restore();
if (Component.mr) {
// Clean security reports state
Component.mr.sast = state().sast;
Component.mr.sastContainer = state().sastContainer;
Component.mr.dast = state().dast;
Component.mr.dependencyScanning = state().dependencyScanning;
}
gon.features = {};
});
describe('security widget', () => {
......@@ -74,9 +70,6 @@ describe('ee merge request widget options', () => {
},
vulnerability_feedback_path: 'vulnerability_feedback_path',
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
});
describe('when it is loading', () => {
......@@ -85,7 +78,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(200, sastHeadAllIssues);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
expect(vm.$el.querySelector('.js-sast-widget').textContent.trim()).toContain(
'SAST is loading',
......@@ -98,7 +91,7 @@ describe('ee merge request widget options', () => {
mock.onGet('path.json').reply(200, sastIssuesBase);
mock.onGet('head_path.json').reply(200, sastIssues);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render provided data', done => {
......@@ -120,7 +113,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(200, sastBaseAllIssues);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('renders no new vulnerabilities message', done => {
......@@ -142,7 +135,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(200, []);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render provided data', done => {
......@@ -164,7 +157,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(500, []);
mock.onGet('vulnerability_feedback_path').reply(500, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render error indicator', done => {
......@@ -188,9 +181,6 @@ describe('ee merge request widget options', () => {
},
vulnerability_feedback_path: 'vulnerability_feedback_path',
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
});
describe('when it is loading', () => {
......@@ -199,7 +189,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(200, sastIssues);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
expect(
removeBreakLine(vm.$el.querySelector('.js-dependency-scanning-widget').textContent),
......@@ -213,7 +203,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(200, sastIssues);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render provided data', done => {
......@@ -236,7 +226,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(200, sastBaseAllIssues);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('renders no new vulnerabilities message', done => {
......@@ -259,7 +249,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(200, []);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render provided data', done => {
......@@ -282,7 +272,7 @@ describe('ee merge request widget options', () => {
mock.onGet('head_path.json').reply(500, []);
mock.onGet('vulnerability_feedback_path').reply(500, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render error indicator', done => {
......@@ -300,25 +290,28 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
gl.mrWidgetData = {
...mockData,
codeclimate: {
head_path: 'head.json',
base_path: 'base.json',
},
codeclimate: {},
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
});
describe('when it is loading', () => {
it('should render loading indicator', () => {
it('should render loading indicator', done => {
mock.onGet('head.json').reply(200, headIssues);
mock.onGet('base.json').reply(200, baseIssues);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
vm.mr.codeclimate = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.$nextTick(() => {
expect(
removeBreakLine(vm.$el.querySelector('.js-codequality-widget').textContent),
).toContain('Loading codeclimate report');
done();
});
});
});
......@@ -326,7 +319,14 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
mock.onGet('head.json').reply(200, headIssues);
mock.onGet('base.json').reply(200, baseIssues);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.codeclimate = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.codeclimate = gl.mrWidgetData.codeclimate;
// mock worker response
spyOn(MRWidgetStore, 'doCodeClimateComparison').and.callFake(() =>
......@@ -384,7 +384,13 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
mock.onGet('head.json').reply(200, []);
mock.onGet('base.json').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.codeclimate = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.codeclimate = gl.mrWidgetData.codeclimate;
// mock worker response
spyOn(MRWidgetStore, 'doCodeClimateComparison').and.callFake(() =>
......@@ -415,7 +421,13 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
mock.onGet('head.json').reply(200, headIssues);
mock.onGet('base.json').reply(200, baseIssues);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.codeclimate = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.codeclimate = gl.mrWidgetData.codeclimate;
// mock worker rejection
spyOn(MRWidgetStore, 'doCodeClimateComparison').and.callFake(() => Promise.reject());
......@@ -437,7 +449,13 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
mock.onGet('head.json').reply(500, []);
mock.onGet('base.json').reply(500, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.codeclimate = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.codeclimate = gl.mrWidgetData.codeclimate;
});
it('should render error indicator', done => {
......@@ -457,25 +475,28 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
gl.mrWidgetData = {
...mockData,
performance: {
head_path: 'head.json',
base_path: 'base.json',
},
performance: {},
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
});
describe('when it is loading', () => {
it('should render loading indicator', () => {
it('should render loading indicator', done => {
mock.onGet('head.json').reply(200, headPerformance);
mock.onGet('base.json').reply(200, basePerformance);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
vm.mr.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.$nextTick(() => {
expect(
removeBreakLine(vm.$el.querySelector('.js-performance-widget').textContent),
).toContain('Loading performance report');
done();
});
});
});
......@@ -483,7 +504,13 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
mock.onGet('head.json').reply(200, headPerformance);
mock.onGet('base.json').reply(200, basePerformance);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = gl.mrWidgetData.performance;
});
it('should render provided data', done => {
......@@ -534,8 +561,15 @@ describe('ee merge request widget options', () => {
beforeEach(done => {
mock.onGet('head.json').reply(200, []);
mock.onGet('base.json').reply(200, []);
vm = mountComponent(Component);
// wait for network request from component created() method
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = gl.mrWidgetData.performance;
// wait for network request from component watch update method
setTimeout(done, 0);
});
......@@ -562,7 +596,13 @@ describe('ee merge request widget options', () => {
beforeEach(() => {
mock.onGet('head.json').reply(500, []);
mock.onGet('base.json').reply(500, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = gl.mrWidgetData.performance;
});
it('should render error indicator', done => {
......@@ -588,9 +628,6 @@ describe('ee merge request widget options', () => {
},
vulnerability_feedback_path: 'vulnerability_feedback_path',
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
});
describe('when it is loading', () => {
......@@ -599,7 +636,7 @@ describe('ee merge request widget options', () => {
mock.onGet('sast-container-base.json').reply(200, dockerBaseReport);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
expect(removeBreakLine(vm.$el.querySelector('.js-sast-container').textContent)).toContain(
'Container scanning is loading',
......@@ -613,7 +650,7 @@ describe('ee merge request widget options', () => {
mock.onGet('sast-container-base.json').reply(200, dockerBaseReport);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render provided data', done => {
......@@ -635,7 +672,7 @@ describe('ee merge request widget options', () => {
mock.onGet('sast-container-base.json').reply(500, {});
mock.onGet('vulnerability_feedback_path').reply(500, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render error indicator', done => {
......@@ -659,9 +696,6 @@ describe('ee merge request widget options', () => {
},
vulnerability_feedback_path: 'vulnerability_feedback_path',
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
});
describe('when it is loading', () => {
......@@ -670,7 +704,7 @@ describe('ee merge request widget options', () => {
mock.onGet('dast_base.json').reply(200, dastBase);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
expect(vm.$el.querySelector('.js-dast-widget').textContent.trim()).toContain(
'DAST is loading',
......@@ -684,7 +718,7 @@ describe('ee merge request widget options', () => {
mock.onGet('dast_base.json').reply(200, dastBase);
mock.onGet('vulnerability_feedback_path').reply(200, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render provided data', done => {
......@@ -705,7 +739,7 @@ describe('ee merge request widget options', () => {
mock.onGet('dast_base.json').reply(500, {});
mock.onGet('vulnerability_feedback_path').reply(500, []);
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
});
it('should render error indicator', done => {
......@@ -735,9 +769,7 @@ describe('ee merge request widget options', () => {
},
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
expect(vm.$el.querySelector('.license-report-widget')).not.toBeNull();
});
......@@ -748,9 +780,7 @@ describe('ee merge request widget options', () => {
license_management: {},
};
Component.mr = new MRWidgetStore(gl.mrWidgetData);
Component.service = new MRWidgetService({});
vm = mountComponent(Component);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
expect(vm.$el.querySelector('.license-report-widget')).toBeNull();
});
......
# frozen_string_literal: true
require 'spec_helper'
describe MergeRequestPollWidgetEntity do
include ProjectForksHelper
set(:user) { create(:user) }
set(:project) { create :project, :repository }
set(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:request) { double('request', current_user: user) }
before do
project.add_developer(user)
end
subject(:entity) do
described_class.new(merge_request, current_user: user, request: request)
end
describe 'Merge Trains' do
let!(:merge_train) { create(:merge_train, merge_request: merge_request) }
before do
stub_licensed_features(merge_pipelines: true, merge_trains: true)
project.update!(merge_pipelines_enabled: true)
end
it 'has merge train entity' do
expect(subject.as_json).to include(:merge_trains_count)
expect(subject.as_json).to include(:merge_train_index)
end
context 'when the merge train feature is disabled' do
before do
stub_feature_flags(merge_trains_enabled: false)
end
it 'does not have merge trains count' do
expect(subject.as_json).not_to include(:merge_trains_count)
end
end
context 'when the merge request is not on a merge train' do
let!(:merge_train) { }
it 'does not have merge train index' do
expect(subject.as_json).not_to include(:merge_train_index)
end
end
end
end
......@@ -198,38 +198,6 @@ describe MergeRequestWidgetEntity do
expect(subject.as_json).to include(:pipeline_id)
end
describe 'Merge Trains' do
let!(:merge_train) { create(:merge_train, merge_request: merge_request) }
before do
stub_licensed_features(merge_pipelines: true, merge_trains: true)
project.update!(merge_pipelines_enabled: true)
end
it 'has merge train entity' do
expect(subject.as_json).to include(:merge_trains_count)
expect(subject.as_json).to include(:merge_train_index)
end
context 'when the merge train feature is disabled' do
before do
stub_feature_flags(merge_trains_enabled: false)
end
it 'does not have merge trains count' do
expect(subject.as_json).not_to include(:merge_trains_count)
end
end
context 'when the merge request is not on a merge train' do
let!(:merge_train) { }
it 'does not have merge train index' do
expect(subject.as_json).not_to include(:merge_train_index)
end
end
end
describe 'blocking merge requests' do
set(:merge_request_block) { create(:merge_request_block, blocked_merge_request: merge_request) }
......
......@@ -18760,6 +18760,9 @@ msgstr ""
msgid "Unable to load the diff. %{button_try_again}"
msgstr ""
msgid "Unable to load the merge request widget. Try reloading the page."
msgstr ""
msgid "Unable to resolve"
msgstr ""
......
......@@ -1073,7 +1073,7 @@ describe Projects::MergeRequestsController do
end
it 'renders MergeRequest as JSON' do
expect(json_response.keys).to include('id', 'iid', 'description')
expect(json_response.keys).to include('id', 'iid')
end
end
......@@ -1107,7 +1107,7 @@ describe Projects::MergeRequestsController do
it 'renders MergeRequest as JSON' do
subject
expect(json_response.keys).to include('id', 'iid', 'description')
expect(json_response.keys).to include('id', 'iid')
end
end
......
......@@ -222,6 +222,7 @@ export default {
plain_diff_path: '/root/acets-app/merge_requests/22.diff',
merge_request_basic_path: '/root/acets-app/merge_requests/22.json?serializer=basic',
merge_request_widget_path: '/root/acets-app/merge_requests/22/widget.json',
merge_request_cached_widget_path: '/cached.json',
merge_check_path: '/root/acets-app/merge_requests/22/merge_check',
ci_environments_status_url: '/root/acets-app/merge_requests/22/ci_environments_status',
project_archived: false,
......
import Vue from 'vue';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import mrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
import notify from '~/lib/utils/notify';
......@@ -17,6 +19,7 @@ const returnPromise = data =>
describe('mrWidgetOptions', () => {
let vm;
let mock;
let MrWidgetOptions;
const COLLABORATION_MESSAGE = 'Allows commits from members who can merge to the target branch';
......@@ -25,6 +28,13 @@ describe('mrWidgetOptions', () => {
// Prevent component mounting
delete mrWidgetOptions.el;
gl.mrWidgetData = { ...mockData };
gon.features = { asyncMrWidget: true };
mock = new MockAdapter(axios);
mock.onGet(mockData.merge_request_widget_path).reply(() => [200, { ...mockData }]);
mock.onGet(mockData.merge_request_cached_widget_path).reply(() => [200, { ...mockData }]);
MrWidgetOptions = Vue.extend(mrWidgetOptions);
vm = mountComponent(MrWidgetOptions, {
mrData: { ...mockData },
......@@ -33,6 +43,9 @@ describe('mrWidgetOptions', () => {
afterEach(() => {
vm.$destroy();
mock.restore();
gl.mrWidgetData = {};
gon.features = {};
});
describe('data', () => {
......@@ -308,17 +321,16 @@ describe('mrWidgetOptions', () => {
});
describe('bindEventHubListeners', () => {
it('should bind eventHub listeners', () => {
it('should bind eventHub listeners', done => {
spyOn(vm, 'checkStatus').and.returnValue(() => {});
spyOn(vm.service, 'checkStatus').and.returnValue(returnPromise(mockData));
spyOn(vm, 'fetchActionsContent');
spyOn(vm.mr, 'setData');
spyOn(vm, 'resumePolling');
spyOn(vm, 'stopPolling');
spyOn(eventHub, '$on');
vm.bindEventHubListeners();
spyOn(eventHub, '$on').and.callThrough();
setTimeout(() => {
eventHub.$emit('SetBranchRemoveFlag', ['flag']);
expect(vm.mr.isRemovingSourceBranch).toEqual('flag');
......@@ -361,6 +373,9 @@ describe('mrWidgetOptions', () => {
listenersWithServiceRequest.FetchActionsContent();
expect(vm.fetchActionsContent).toHaveBeenCalled();
done();
});
});
});
......@@ -451,22 +466,30 @@ describe('mrWidgetOptions', () => {
});
describe('resumePolling', () => {
it('should call stopTimer on pollingInterval', () => {
it('should call stopTimer on pollingInterval', done => {
setTimeout(() => {
spyOn(vm.pollingInterval, 'resume');
vm.resumePolling();
expect(vm.pollingInterval.resume).toHaveBeenCalled();
done();
});
});
});
describe('stopPolling', () => {
it('should call stopTimer on pollingInterval', () => {
it('should call stopTimer on pollingInterval', done => {
setTimeout(() => {
spyOn(vm.pollingInterval, 'stopTimer');
vm.stopPolling();
expect(vm.pollingInterval.stopTimer).toHaveBeenCalled();
done();
});
});
});
});
......
# frozen_string_literal: true
require 'spec_helper'
describe MergeRequestPollCachedWidgetEntity do
include ProjectForksHelper
let(:project) { create :project, :repository }
let(:resource) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) }
subject do
described_class.new(resource, request: request).as_json
end
it 'has the latest sha of the target branch' do
is_expected.to include(:target_branch_sha)
end
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: true,
diverged_commits_count: 10)
expect(subject[:diverged_commits_count]).to eq(10)
end
end
context 'when MR is not open' do
it 'returns 0' do
allow(resource).to receive_messages(open?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
context 'when MR is not diverging' do
it 'returns 0' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
end
describe 'diff_head_sha' do
before do
allow(resource).to receive(:diff_head_sha) { 'sha' }
end
context 'when diff head commit is empty' do
it 'returns nil' do
allow(resource).to receive(:diff_head_sha) { '' }
expect(subject[:diff_head_sha]).to be_nil
end
end
context 'when diff head commit present' do
it 'returns diff head commit short id' do
expect(subject[:diff_head_sha]).to eq('sha')
end
end
end
describe 'metrics' do
context 'when metrics record exists with merged data' do
before do
resource.mark_as_merged!
resource.metrics.update!(merged_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(resource.metrics.merged_by_id)
end
end
context 'when metrics record exists with closed data' do
before do
resource.close!
resource.metrics.update!(latest_closed_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(resource.metrics.latest_closed_by_id)
end
end
context 'when metrics does not exists' do
before do
resource.mark_as_merged!
resource.metrics.destroy!
resource.reload
end
context 'when events exists' do
let!(:closed_event) { create(:event, :closed, project: project, target: resource) }
let!(:merge_event) { create(:event, :merged, project: project, target: resource) }
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from events record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(merge_event.author_id)
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(closed_event.author_id)
expect(subject.dig(:metrics, :merged_at).to_s)
.to eq(merge_event.updated_at.to_s)
expect(subject.dig(:metrics, :closed_at).to_s)
.to eq(closed_event.updated_at.to_s)
end
end
context 'when events does not exists' do
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
end
end
end
describe 'commits_without_merge_commits' do
def find_matching_commit(short_id)
resource.commits.find { |c| c.short_id == short_id }
end
it 'does not include merge commits' do
commits_in_widget = subject[:commits_without_merge_commits]
expect(commits_in_widget.length).to be < resource.commits.length
expect(commits_in_widget.length).to eq(resource.commits.without_merge_commits.length)
commits_in_widget.each do |c|
expect(find_matching_commit(c[:short_id]).merge_commit?).to eq(false)
end
end
end
describe 'auto merge' do
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_truthy
end
end
context 'when auto merge is not enabled' do
let(:resource) { create(:merge_request) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_falsy
end
end
end
describe 'attributes for squash commit message' do
context 'when merge request is mergeable' do
before do
stub_const('MergeRequestDiff::COMMITS_SAFE_SIZE', 20)
end
it 'has default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message])
.to eq(resource.default_squash_commit_message)
expect(subject[:commits_without_merge_commits].size).to eq(12)
end
end
context 'when merge request is not mergeable' do
before do
allow(resource).to receive(:mergeable?).and_return(false)
end
it 'does not have default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message]).to eq(nil)
expect(subject[:commits_without_merge_commits]).to eq(nil)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe MergeRequestPollWidgetEntity do
include ProjectForksHelper
let(:project) { create :project, :repository }
let(:resource) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) }
subject do
described_class.new(resource, request: request).as_json
end
it 'has default_merge_commit_message_with_description' do
expect(subject[:default_merge_commit_message_with_description])
.to eq(resource.default_merge_commit_message(include_description: true))
end
describe 'merge_pipeline' do
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
context 'when is merged' do
let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
before do
project.add_maintainer(user)
end
it 'returns merge_pipeline' do
pipeline.reload
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: request)
.as_json
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
context 'when user cannot read pipelines on target project' do
before do
project.add_guest(user)
end
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
end
end
end
describe 'new_blob_path' do
context 'when user can push to project' do
it 'returns path' do
project.add_developer(user)
expect(subject[:new_blob_path])
.to eq("/#{resource.project.full_path}/new/#{resource.source_branch}")
end
end
context 'when user cannot push to project' do
it 'returns nil' do
expect(subject[:new_blob_path]).to be_nil
end
end
end
describe 'exposed_artifacts_path' do
context 'when merge request has exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(true)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_present
end
end
context 'when merge request has no exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(false)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_nil
end
end
end
describe 'auto merge' do
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_strategy]).to eq('merge_when_pipeline_succeeds')
end
end
context 'when auto merge is not enabled' do
let(:resource) { create(:merge_request) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_strategy]).to be_nil
end
end
context 'when head pipeline is running' do
before do
create(:ci_pipeline, :running, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to eq(%w[merge_when_pipeline_succeeds])
end
end
context 'when head pipeline is finished' do
before do
create(:ci_pipeline, :success, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to be_empty
end
end
end
describe 'pipeline' do
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
before do
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).and_call_original
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).with(user, :read_pipeline, anything).and_return(result)
end
context 'when user has access to pipelines' do
let(:result) { true }
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
it 'returns pipeline' do
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: req)
.as_json
expect(subject[:pipeline]).to eq(pipeline_payload)
end
end
context 'when is not up to date' do
it 'returns nil' do
pipeline.update(sha: "not up to date")
expect(subject[:pipeline]).to eq(nil)
end
end
end
context 'when user does not have access to pipelines' do
let(:result) { false }
it 'does not have pipeline' do
expect(subject[:pipeline]).to eq(nil)
end
end
end
end
......@@ -15,10 +15,6 @@ describe MergeRequestWidgetEntity do
described_class.new(resource, request: request).as_json
end
it 'has the latest sha of the target branch' do
is_expected.to include(:target_branch_sha)
end
describe 'source_project_full_path' do
it 'includes the full path of the source project' do
expect(subject[:source_project_full_path]).to be_present
......@@ -47,156 +43,6 @@ describe MergeRequestWidgetEntity do
end
end
describe 'pipeline' do
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.source_branch, sha: resource.source_branch_sha, head_pipeline_of: resource) }
before do
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).and_call_original
allow_any_instance_of(MergeRequestPresenter).to receive(:can?).with(user, :read_pipeline, anything).and_return(result)
end
context 'when user has access to pipelines' do
let(:result) { true }
context 'when is up to date' do
let(:req) { double('request', current_user: user, project: project) }
it 'returns pipeline' do
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: req)
.as_json
expect(subject[:pipeline]).to eq(pipeline_payload)
end
end
context 'when is not up to date' do
it 'returns nil' do
pipeline.update(sha: "not up to date")
expect(subject[:pipeline]).to eq(nil)
end
end
end
context 'when user does not have access to pipelines' do
let(:result) { false }
it 'does not have pipeline' do
expect(subject[:pipeline]).to eq(nil)
end
end
end
describe 'merge_pipeline' do
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
context 'when is merged' do
let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
before do
project.add_maintainer(user)
end
it 'returns merge_pipeline' do
pipeline.reload
pipeline_payload = PipelineDetailsEntity
.represent(pipeline, request: request)
.as_json
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
context 'when user cannot read pipelines on target project' do
before do
project.add_guest(user)
end
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
end
end
end
describe 'metrics' do
context 'when metrics record exists with merged data' do
before do
resource.mark_as_merged!
resource.metrics.update!(merged_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(resource.metrics.merged_by_id)
end
end
context 'when metrics record exists with closed data' do
before do
resource.close!
resource.metrics.update!(latest_closed_by: user)
end
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from metrics record' do
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(resource.metrics.latest_closed_by_id)
end
end
context 'when metrics does not exists' do
before do
resource.mark_as_merged!
resource.metrics.destroy!
resource.reload
end
context 'when events exists' do
let!(:closed_event) { create(:event, :closed, project: project, target: resource) }
let!(:merge_event) { create(:event, :merged, project: project, target: resource) }
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
it 'returns values from events record' do
expect(subject.dig(:metrics, :merged_by, :id))
.to eq(merge_event.author_id)
expect(subject.dig(:metrics, :closed_by, :id))
.to eq(closed_event.author_id)
expect(subject.dig(:metrics, :merged_at).to_s)
.to eq(merge_event.updated_at.to_s)
expect(subject.dig(:metrics, :closed_at).to_s)
.to eq(closed_event.updated_at.to_s)
end
end
context 'when events does not exists' do
it 'matches merge request metrics schema' do
expect(subject[:metrics].with_indifferent_access)
.to match_schema('entities/merge_request_metrics')
end
end
end
end
it 'has email_patches_path' do
expect(subject[:email_patches_path])
.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.patch")
......@@ -207,100 +53,6 @@ describe MergeRequestWidgetEntity do
.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.diff")
end
it 'has default_merge_commit_message_with_description' do
expect(subject[:default_merge_commit_message_with_description])
.to eq(resource.default_merge_commit_message(include_description: true))
end
describe 'attributes for squash commit message' do
context 'when merge request is mergeable' do
before do
stub_const('MergeRequestDiff::COMMITS_SAFE_SIZE', 20)
end
it 'has default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message])
.to eq(resource.default_squash_commit_message)
expect(subject[:commits_without_merge_commits].size).to eq(12)
end
end
context 'when merge request is not mergeable' do
before do
allow(resource).to receive(:mergeable?).and_return(false)
end
it 'does not have default_squash_commit_message and commits_without_merge_commits' do
expect(subject[:default_squash_commit_message]).to eq(nil)
expect(subject[:commits_without_merge_commits]).to eq(nil)
end
end
end
describe 'new_blob_path' do
context 'when user can push to project' do
it 'returns path' do
project.add_developer(user)
expect(subject[:new_blob_path])
.to eq("/#{resource.project.full_path}/new/#{resource.source_branch}")
end
end
context 'when user cannot push to project' do
it 'returns nil' do
expect(subject[:new_blob_path]).to be_nil
end
end
end
describe 'diff_head_sha' do
before do
allow(resource).to receive(:diff_head_sha) { 'sha' }
end
context 'when diff head commit is empty' do
it 'returns nil' do
allow(resource).to receive(:diff_head_sha) { '' }
expect(subject[:diff_head_sha]).to be_nil
end
end
context 'when diff head commit present' do
it 'returns diff head commit short id' do
expect(subject[:diff_head_sha]).to eq('sha')
end
end
end
describe 'diverged_commits_count' do
context 'when MR open and its diverging' do
it 'returns diverged commits count' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: true,
diverged_commits_count: 10)
expect(subject[:diverged_commits_count]).to eq(10)
end
end
context 'when MR is not open' do
it 'returns 0' do
allow(resource).to receive_messages(open?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
context 'when MR is not diverging' do
it 'returns 0' do
allow(resource).to receive_messages(open?: true, diverged_from_target_branch?: false)
expect(subject[:diverged_commits_count]).to be_zero
end
end
end
describe 'when source project is deleted' do
let(:project) { create(:project, :repository) }
let(:forked_project) { fork_project(project) }
......@@ -316,88 +68,4 @@ describe MergeRequestWidgetEntity do
expect(entity[:rebase_path]).to be_nil
end
end
describe 'commits_without_merge_commits' do
def find_matching_commit(short_id)
resource.commits.find { |c| c.short_id == short_id }
end
it 'does not include merge commits' do
commits_in_widget = subject[:commits_without_merge_commits]
expect(commits_in_widget.length).to be < resource.commits.length
expect(commits_in_widget.length).to eq(resource.commits.without_merge_commits.length)
commits_in_widget.each do |c|
expect(find_matching_commit(c[:short_id]).merge_commit?).to eq(false)
end
end
end
describe 'auto merge' do
context 'when auto merge is enabled' do
let(:resource) { create(:merge_request, :merge_when_pipeline_succeeds) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_truthy
expect(subject[:auto_merge_strategy]).to eq('merge_when_pipeline_succeeds')
end
end
context 'when auto merge is not enabled' do
let(:resource) { create(:merge_request) }
it 'returns auto merge related information' do
expect(subject[:auto_merge_enabled]).to be_falsy
expect(subject[:auto_merge_strategy]).to be_nil
end
end
context 'when head pipeline is running' do
before do
create(:ci_pipeline, :running, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to eq(%w[merge_when_pipeline_succeeds])
end
end
context 'when head pipeline is finished' do
before do
create(:ci_pipeline, :success, project: project,
ref: resource.source_branch,
sha: resource.diff_head_sha)
resource.update_head_pipeline
end
it 'returns available auto merge strategies' do
expect(subject[:available_auto_merge_strategies]).to be_empty
end
end
end
describe 'exposed_artifacts_path' do
context 'when merge request has exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(true)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_present
end
end
context 'when merge request has no exposed artifacts' do
before do
expect(resource).to receive(:has_exposed_artifacts?).and_return(false)
end
it 'set the path to poll data' do
expect(subject[:exposed_artifacts_path]).to be_nil
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment