Commit c771b661 authored by Sarah GP's avatar Sarah GP

Destructure all the functions

Also applies linting fixes
parent a5fe808a
...@@ -40,11 +40,6 @@ export default { ...@@ -40,11 +40,6 @@ export default {
}) })
.catch(reportFailure); .catch(reportFailure);
}, },
computed: {
shouldDisplayGraph() {
return !this.showFailureAlert;
},
},
methods: { methods: {
drawGraph(data) { drawGraph(data) {
return data; return data;
......
import * as d3 from 'd3';
import { sankey, sankeyLeft } from 'd3-sankey'; import { sankey, sankeyLeft } from 'd3-sankey';
import { uniqWith, isEqual } from 'lodash'; import { uniqWith, isEqual } from 'lodash';
/** /*
The following functions are the main engine in transforming the data as The following functions are the main engine in transforming the data as
received from the endpoint into the format the d3 graph expects. received from the endpoint into the format the d3 graph expects.
...@@ -36,16 +35,14 @@ import { uniqWith, isEqual } from 'lodash'; ...@@ -36,16 +35,14 @@ import { uniqWith, isEqual } from 'lodash';
stages.groups.name -> target stages.groups.name -> target
stages.groups.needs.each -> source (source is the name of the group, not the parallel job) stages.groups.needs.each -> source (source is the name of the group, not the parallel job)
10 -> value (constant) 10 -> value (constant)
**/ */
export const createNodes = data => { export const createNodes = data => {
return data return data.flatMap(({ groups, name }) => {
.map(({ groups }, idx, stages) => {
return groups.map(group => { return groups.map(group => {
return { ...group, category: stages[idx].name }; return { ...group, category: name };
});
}); });
})
.flat();
}; };
export const createNodeDict = nodes => { export const createNodeDict = nodes => {
...@@ -110,7 +107,7 @@ export const getAllAncestors = (nodes, nodeDict) => { ...@@ -110,7 +107,7 @@ export const getAllAncestors = (nodes, nodeDict) => {
}; };
export const filterByAncestors = (links, nodeDict) => export const filterByAncestors = (links, nodeDict) =>
links.filter(link => { links.filter(({ target, source }) => {
/* /*
for every link, check out it's target for every link, check out it's target
...@@ -122,12 +119,12 @@ export const filterByAncestors = (links, nodeDict) => ...@@ -122,12 +119,12 @@ export const filterByAncestors = (links, nodeDict) =>
then we drop this link then we drop this link
*/ */
const targetNode = link.target; const targetNode = target;
const targetNodeNeeds = nodeDict[targetNode].needs; const targetNodeNeeds = nodeDict[targetNode].needs;
const targetNodeNeedsMinusSource = targetNodeNeeds.filter(need => need !== link.source); const targetNodeNeedsMinusSource = targetNodeNeeds.filter(need => need !== source);
const allAncestors = getAllAncestors(targetNodeNeedsMinusSource, nodeDict); const allAncestors = getAllAncestors(targetNodeNeedsMinusSource, nodeDict);
return !allAncestors.includes(link.source); return !allAncestors.includes(source);
}); });
export const parseData = data => { export const parseData = data => {
...@@ -139,14 +136,14 @@ export const parseData = data => { ...@@ -139,14 +136,14 @@ export const parseData = data => {
return { nodes, links }; return { nodes, links };
}; };
/** /*
createSankey calls the d3 layout to generate the relationships and positioning createSankey calls the d3 layout to generate the relationships and positioning
values for the nodes and links in the graph. values for the nodes and links in the graph.
**/ */
export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingForLabels }) => { export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingForLabels }) => {
const sankeyGenerator = sankey() const sankeyGenerator = sankey()
.nodeId(d => d.name) .nodeId(({ name }) => name)
.nodeAlign(sankeyLeft) .nodeAlign(sankeyLeft)
.nodeWidth(nodeWidth) .nodeWidth(nodeWidth)
.nodePadding(nodePadding) .nodePadding(nodePadding)
...@@ -161,17 +158,17 @@ export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingFor ...@@ -161,17 +158,17 @@ export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingFor
}); });
}; };
/** /*
The number of nodes in the most populous generation drives the height of the graph. The number of nodes in the most populous generation drives the height of the graph.
**/ */
export const getMaxNodes = nodes => { export const getMaxNodes = nodes => {
const counts = nodes.reduce((acc, currentNode) => { const counts = nodes.reduce((acc, { layer }) => {
if (!acc[currentNode.layer]) { if (!acc[layer]) {
acc[currentNode.layer] = 0; acc[layer] = 0;
} }
acc[currentNode.layer] += 1; acc[layer] += 1;
return acc; return acc;
}, []); }, []);
...@@ -179,11 +176,11 @@ export const getMaxNodes = nodes => { ...@@ -179,11 +176,11 @@ export const getMaxNodes = nodes => {
return Math.max(...counts); return Math.max(...counts);
}; };
/** /*
Because we cannot know if a node is part of a relationship until after we Because we cannot know if a node is part of a relationship until after we
generate the links with createSankey, this function is used after the first call generate the links with createSankey, this function is used after the first call
to find nodes that have no relations. to find nodes that have no relations.
**/ */
export const removeOrphanNodes = sankeyfiedNodes => { export const removeOrphanNodes = sankeyfiedNodes => {
return sankeyfiedNodes.filter(node => node.sourceLinks.length || node.targetLinks.length); return sankeyfiedNodes.filter(node => node.sourceLinks.length || node.targetLinks.length);
......
/** /*
It is important that the simple base include parallel jobs It is important that the simple base include parallel jobs
as well as non-parallel jobs with spaces in the name to prevent as well as non-parallel jobs with spaces in the name to prevent
us relying on spaces as an indicator. us relying on spaces as an indicator.
**/ */
export default { export default {
stages: [ stages: [
{ {
......
...@@ -8,7 +8,7 @@ import { ...@@ -8,7 +8,7 @@ import {
getMaxNodes, getMaxNodes,
} from '~/pipelines/components/dag/utils'; } from '~/pipelines/components/dag/utils';
import mockGraphData from './mock-data.js'; import mockGraphData from './mock_data';
describe('DAG visualization parsing utilities', () => { describe('DAG visualization parsing utilities', () => {
const { nodes, nodeDict } = createNodesStructure(mockGraphData.stages); const { nodes, nodeDict } = createNodesStructure(mockGraphData.stages);
...@@ -71,7 +71,7 @@ describe('DAG visualization parsing utilities', () => { ...@@ -71,7 +71,7 @@ describe('DAG visualization parsing utilities', () => {
const dedupedLinks = [{ source: 'job1', target: 'job2' }, { source: 'job2', target: 'job4' }]; const dedupedLinks = [{ source: 'job1', target: 'job2' }, { source: 'job2', target: 'job4' }];
const nodeDict = { const nodeLookup = {
job1: { job1: {
name: 'job1', name: 'job1',
}, },
...@@ -87,14 +87,12 @@ describe('DAG visualization parsing utilities', () => { ...@@ -87,14 +87,12 @@ describe('DAG visualization parsing utilities', () => {
}; };
it('dedupes links', () => { it('dedupes links', () => {
expect(filterByAncestors(allLinks, nodeDict)).toMatchObject(dedupedLinks); expect(filterByAncestors(allLinks, nodeLookup)).toMatchObject(dedupedLinks);
}); });
}); });
describe('parseData parent function', () => { describe('parseData parent function', () => {
it('returns an object containing a list of nodes and links', () => { it('returns an object containing a list of nodes and links', () => {
const parsed = parseData(mockGraphData.stages);
// an array of nodes exist and the values are defined // an array of nodes exist and the values are defined
expect(parsed).toHaveProperty('nodes'); expect(parsed).toHaveProperty('nodes');
expect(Array.isArray(parsed.nodes)).toBe(true); expect(Array.isArray(parsed.nodes)).toBe(true);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment