Commit ba907426 authored by Phil Hughes's avatar Phil Hughes

group jobs into stages

parent 50985f54
...@@ -230,12 +230,12 @@ const Api = { ...@@ -230,12 +230,12 @@ const Api = {
return axios.get(url, { params }); return axios.get(url, { params });
}, },
pipelineJobs(projectPath, pipelineId) { pipelineJobs(projectPath, pipelineId, params = {}) {
const url = Api.buildUrl(this.pipelineJobsPath) const url = Api.buildUrl(this.pipelineJobsPath)
.replace(':id', encodeURIComponent(projectPath)) .replace(':id', encodeURIComponent(projectPath))
.replace(':pipeline_id', pipelineId); .replace(':pipeline_id', pipelineId);
return axios.get(url); return axios.get(url, { params });
}, },
buildUrl(url) { buildUrl(url) {
......
...@@ -28,12 +28,20 @@ export const receiveJobsError = ({ commit }) => { ...@@ -28,12 +28,20 @@ export const receiveJobsError = ({ commit }) => {
}; };
export const receiveJobsSuccess = ({ commit }, data) => commit(types.RECEIVE_JOBS_SUCCESS, data); export const receiveJobsSuccess = ({ commit }, data) => commit(types.RECEIVE_JOBS_SUCCESS, data);
export const fetchJobs = ({ dispatch, state, rootState }) => { export const fetchJobs = ({ dispatch, state, rootState }, page = '1') => {
dispatch('requestJobs'); dispatch('requestJobs');
Api.pipelineJobs(rootState.currentProjectId, state.latestPipeline.id) Api.pipelineJobs(rootState.currentProjectId, state.latestPipeline.id, {
.then(({ data }) => { page,
})
.then(({ data, headers }) => {
const nextPage = headers && headers['x-next-page'];
dispatch('receiveJobsSuccess', data); dispatch('receiveJobsSuccess', data);
if (nextPage) {
dispatch('fetchJobs', nextPage);
}
}) })
.catch(() => dispatch('receiveJobsError')); .catch(() => dispatch('receiveJobsError'));
}; };
......
// eslint-disable-next-line import/prefer-default-export
export const hasLatestPipeline = state => !state.isLoadingPipeline && !!state.latestPipeline; export const hasLatestPipeline = state => !state.isLoadingPipeline && !!state.latestPipeline;
export const failedJobs = state =>
state.stages.reduce(
(acc, stage) => acc.concat(stage.jobs.filter(job => job.status === 'failed')),
[],
);
...@@ -26,12 +26,28 @@ export default { ...@@ -26,12 +26,28 @@ export default {
}, },
[types.RECEIVE_JOBS_SUCCESS](state, jobs) { [types.RECEIVE_JOBS_SUCCESS](state, jobs) {
state.isLoadingJobs = false; state.isLoadingJobs = false;
state.jobs = jobs.map(job => ({
state.stages = jobs.reduce((acc, job) => {
let stage = acc.find(s => s.title === job.stage);
if (!stage) {
stage = {
title: job.stage,
jobs: [],
};
acc.push(stage);
}
stage.jobs = stage.jobs.concat({
id: job.id, id: job.id,
name: job.name, name: job.name,
status: job.status, status: job.status,
stage: job.stage, stage: job.stage,
duration: job.duration, duration: job.duration,
})); });
return acc;
}, state.stages);
}, },
}; };
...@@ -2,5 +2,5 @@ export default () => ({ ...@@ -2,5 +2,5 @@ export default () => ({
isLoadingPipeline: false, isLoadingPipeline: false,
isLoadingJobs: false, isLoadingJobs: false,
latestPipeline: null, latestPipeline: null,
jobs: [], stages: [],
}); });
...@@ -51,4 +51,11 @@ export const jobs = [ ...@@ -51,4 +51,11 @@ export const jobs = [
stage: 'test', stage: 'test',
duration: 1, duration: 1,
}, },
{
id: 4,
name: 'test 3',
status: 'failed',
stage: 'build',
duration: 1,
},
]; ];
...@@ -182,13 +182,21 @@ describe('IDE pipelines actions', () => { ...@@ -182,13 +182,21 @@ describe('IDE pipelines actions', () => {
}); });
describe('fetchJobs', () => { describe('fetchJobs', () => {
let page = '';
beforeEach(() => { beforeEach(() => {
mockedState.latestPipeline = pipelines[0]; mockedState.latestPipeline = pipelines[0];
}); });
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines\/(.*)\/jobs/).replyOnce(200, jobs); mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines\/(.*)\/jobs/).replyOnce(() => [
200,
jobs,
{
'x-next-page': page,
},
]);
}); });
it('dispatches request', done => { it('dispatches request', done => {
...@@ -213,12 +221,51 @@ describe('IDE pipelines actions', () => { ...@@ -213,12 +221,51 @@ describe('IDE pipelines actions', () => {
); );
}); });
it('dispatches twice for both pages', done => {
page = '2';
testAction(
fetchJobs,
null,
mockedState,
[],
[
{ type: 'requestJobs' },
{ type: 'receiveJobsSuccess', payload: jobs },
{ type: 'fetchJobs', payload: '2' },
{ type: 'requestJobs' },
{ type: 'receiveJobsSuccess', payload: jobs },
],
done,
);
});
it('calls axios with correct URL', () => { it('calls axios with correct URL', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough(); const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState }); fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs'); expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '1' },
});
});
it('calls axios with page next page', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '1' },
});
page = '2';
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState }, page);
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '2' },
});
}); });
}); });
......
...@@ -37,4 +37,35 @@ describe('IDE pipeline getters', () => { ...@@ -37,4 +37,35 @@ describe('IDE pipeline getters', () => {
expect(getters.hasLatestPipeline(mockedState)).toBe(true); expect(getters.hasLatestPipeline(mockedState)).toBe(true);
}); });
}); });
describe('failedJobs', () => {
it('returns array of failed jobs', () => {
mockedState.stages = [
{
title: 'test',
jobs: [{ id: 1, status: 'failed' }, { id: 2, status: 'success' }],
},
{
title: 'build',
jobs: [{ id: 3, status: 'failed' }, { id: 4, status: 'failed' }],
},
];
expect(getters.failedJobs(mockedState).length).toBe(3);
expect(getters.failedJobs(mockedState)).toEqual([
{
id: 1,
status: jasmine.anything(),
},
{
id: 3,
status: jasmine.anything(),
},
{
id: 4,
status: jasmine.anything(),
},
]);
});
});
}); });
...@@ -72,19 +72,49 @@ describe('IDE pipelines mutations', () => { ...@@ -72,19 +72,49 @@ describe('IDE pipelines mutations', () => {
expect(mockedState.isLoadingJobs).toBe(false); expect(mockedState.isLoadingJobs).toBe(false);
}); });
it('sets jobs', () => { it('sets stages', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs); mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.jobs.length).toBe(3); expect(mockedState.stages.length).toBe(2);
expect(mockedState.jobs).toEqual( expect(mockedState.stages).toEqual([
jobs.map(job => ({ {
title: 'test',
jobs: jasmine.anything(),
},
{
title: 'build',
jobs: jasmine.anything(),
},
]);
});
it('sets jobs in stages', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.stages[0].jobs.length).toBe(3);
expect(mockedState.stages[1].jobs.length).toBe(1);
expect(mockedState.stages).toEqual([
{
title: jasmine.anything(),
jobs: jobs.filter(job => job.stage === 'test').map(job => ({
id: job.id,
name: job.name,
status: job.status,
stage: job.stage,
duration: job.duration,
})),
},
{
title: jasmine.anything(),
jobs: jobs.filter(job => job.stage === 'build').map(job => ({
id: job.id, id: job.id,
name: job.name, name: job.name,
status: job.status, status: job.status,
stage: job.stage, stage: job.stage,
duration: job.duration, duration: job.duration,
})), })),
); },
]);
}); });
}); });
}); });
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment