Commit c771b661 authored by Sarah GP's avatar Sarah GP

Destructure all the functions

Also applies linting fixes
parent a5fe808a
...@@ -40,11 +40,6 @@ export default { ...@@ -40,11 +40,6 @@ export default {
}) })
.catch(reportFailure); .catch(reportFailure);
}, },
computed: {
shouldDisplayGraph() {
return !this.showFailureAlert;
},
},
methods: { methods: {
drawGraph(data) { drawGraph(data) {
return data; return data;
......
import * as d3 from 'd3';
import { sankey, sankeyLeft } from 'd3-sankey'; import { sankey, sankeyLeft } from 'd3-sankey';
import { uniqWith, isEqual } from 'lodash'; import { uniqWith, isEqual } from 'lodash';
/** /*
The following functions are the main engine in transforming the data as The following functions are the main engine in transforming the data as
received from the endpoint into the format the d3 graph expects. received from the endpoint into the format the d3 graph expects.
Input is of the form: Input is of the form:
[stages] [stages]
stages: {name, groups} stages: {name, groups}
groups: [{ name, size, jobs }] groups: [{ name, size, jobs }]
name is a group name; in the case that the group has one job, it is name is a group name; in the case that the group has one job, it is
also the job name also the job name
size is the number of parallel jobs size is the number of parallel jobs
jobs: [{ name, needs}] jobs: [{ name, needs}]
job name is either the same as the group name or group x/y job name is either the same as the group name or group x/y
Output is of the form: Output is of the form:
{ nodes: [node], links: [link] } { nodes: [node], links: [link] }
node: { name, category }, + unused info passed through node: { name, category }, + unused info passed through
link: { source, target, value }, with source & target being node names link: { source, target, value }, with source & target being node names
and value being a constant and value being a constant
We create nodes, create links, and then dedupe the links, so that in the case where We create nodes, create links, and then dedupe the links, so that in the case where
job 4 depends on job 1 and job 2, and job 2 depends on job 1, we show only a single link job 4 depends on job 1 and job 2, and job 2 depends on job 1, we show only a single link
from job 1 to job 2 then another from job 2 to job 4. from job 1 to job 2 then another from job 2 to job 4.
CREATE NODES CREATE NODES
stage.name -> node.category stage.name -> node.category
stage.group.name -> node.name (this is the group name if there are parallel jobs) stage.group.name -> node.name (this is the group name if there are parallel jobs)
stage.group.jobs -> node.jobs stage.group.jobs -> node.jobs
stage.group.size -> node.size stage.group.size -> node.size
CREATE LINKS CREATE LINKS
stages.groups.name -> target stages.groups.name -> target
stages.groups.needs.each -> source (source is the name of the group, not the parallel job) stages.groups.needs.each -> source (source is the name of the group, not the parallel job)
10 -> value (constant) 10 -> value (constant)
**/ */
export const createNodes = data => { export const createNodes = data => {
return data return data.flatMap(({ groups, name }) => {
.map(({ groups }, idx, stages) => { return groups.map(group => {
return groups.map(group => { return { ...group, category: name };
return { ...group, category: stages[idx].name }; });
}); });
})
.flat();
}; };
export const createNodeDict = nodes => { export const createNodeDict = nodes => {
...@@ -110,24 +107,24 @@ export const getAllAncestors = (nodes, nodeDict) => { ...@@ -110,24 +107,24 @@ export const getAllAncestors = (nodes, nodeDict) => {
}; };
export const filterByAncestors = (links, nodeDict) => export const filterByAncestors = (links, nodeDict) =>
links.filter(link => { links.filter(({ target, source }) => {
/* /*
for every link, check out it's target for every link, check out it's target
for every target, get the target node's needs for every target, get the target node's needs
then drop the current link source from that list then drop the current link source from that list
call a function to get all ancestors, recursively call a function to get all ancestors, recursively
is the current link's source in the list of all parents? is the current link's source in the list of all parents?
then we drop this link then we drop this link
*/ */
const targetNode = link.target; const targetNode = target;
const targetNodeNeeds = nodeDict[targetNode].needs; const targetNodeNeeds = nodeDict[targetNode].needs;
const targetNodeNeedsMinusSource = targetNodeNeeds.filter(need => need !== link.source); const targetNodeNeedsMinusSource = targetNodeNeeds.filter(need => need !== source);
const allAncestors = getAllAncestors(targetNodeNeedsMinusSource, nodeDict); const allAncestors = getAllAncestors(targetNodeNeedsMinusSource, nodeDict);
return !allAncestors.includes(link.source); return !allAncestors.includes(source);
}); });
export const parseData = data => { export const parseData = data => {
...@@ -139,14 +136,14 @@ export const parseData = data => { ...@@ -139,14 +136,14 @@ export const parseData = data => {
return { nodes, links }; return { nodes, links };
}; };
/** /*
createSankey calls the d3 layout to generate the relationships and positioning createSankey calls the d3 layout to generate the relationships and positioning
values for the nodes and links in the graph. values for the nodes and links in the graph.
**/ */
export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingForLabels }) => { export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingForLabels }) => {
const sankeyGenerator = sankey() const sankeyGenerator = sankey()
.nodeId(d => d.name) .nodeId(({ name }) => name)
.nodeAlign(sankeyLeft) .nodeAlign(sankeyLeft)
.nodeWidth(nodeWidth) .nodeWidth(nodeWidth)
.nodePadding(nodePadding) .nodePadding(nodePadding)
...@@ -161,17 +158,17 @@ export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingFor ...@@ -161,17 +158,17 @@ export const createSankey = ({ width, height, nodeWidth, nodePadding, paddingFor
}); });
}; };
/** /*
The number of nodes in the most populous generation drives the height of the graph. The number of nodes in the most populous generation drives the height of the graph.
**/ */
export const getMaxNodes = nodes => { export const getMaxNodes = nodes => {
const counts = nodes.reduce((acc, currentNode) => { const counts = nodes.reduce((acc, { layer }) => {
if (!acc[currentNode.layer]) { if (!acc[layer]) {
acc[currentNode.layer] = 0; acc[layer] = 0;
} }
acc[currentNode.layer] += 1; acc[layer] += 1;
return acc; return acc;
}, []); }, []);
...@@ -179,11 +176,11 @@ export const getMaxNodes = nodes => { ...@@ -179,11 +176,11 @@ export const getMaxNodes = nodes => {
return Math.max(...counts); return Math.max(...counts);
}; };
/** /*
Because we cannot know if a node is part of a relationship until after we Because we cannot know if a node is part of a relationship until after we
generate the links with createSankey, this function is used after the first call generate the links with createSankey, this function is used after the first call
to find nodes that have no relations. to find nodes that have no relations.
**/ */
export const removeOrphanNodes = sankeyfiedNodes => { export const removeOrphanNodes = sankeyfiedNodes => {
return sankeyfiedNodes.filter(node => node.sourceLinks.length || node.targetLinks.length); return sankeyfiedNodes.filter(node => node.sourceLinks.length || node.targetLinks.length);
......
/** /*
It is important that the simple base include parallel jobs It is important that the simple base include parallel jobs
as well as non-parallel jobs with spaces in the name to prevent as well as non-parallel jobs with spaces in the name to prevent
us relying on spaces as an indicator. us relying on spaces as an indicator.
**/ */
export default { export default {
stages: [ stages: [
{ {
......
...@@ -8,7 +8,7 @@ import { ...@@ -8,7 +8,7 @@ import {
getMaxNodes, getMaxNodes,
} from '~/pipelines/components/dag/utils'; } from '~/pipelines/components/dag/utils';
import mockGraphData from './mock-data.js'; import mockGraphData from './mock_data';
describe('DAG visualization parsing utilities', () => { describe('DAG visualization parsing utilities', () => {
const { nodes, nodeDict } = createNodesStructure(mockGraphData.stages); const { nodes, nodeDict } = createNodesStructure(mockGraphData.stages);
...@@ -71,7 +71,7 @@ describe('DAG visualization parsing utilities', () => { ...@@ -71,7 +71,7 @@ describe('DAG visualization parsing utilities', () => {
const dedupedLinks = [{ source: 'job1', target: 'job2' }, { source: 'job2', target: 'job4' }]; const dedupedLinks = [{ source: 'job1', target: 'job2' }, { source: 'job2', target: 'job4' }];
const nodeDict = { const nodeLookup = {
job1: { job1: {
name: 'job1', name: 'job1',
}, },
...@@ -87,14 +87,12 @@ describe('DAG visualization parsing utilities', () => { ...@@ -87,14 +87,12 @@ describe('DAG visualization parsing utilities', () => {
}; };
it('dedupes links', () => { it('dedupes links', () => {
expect(filterByAncestors(allLinks, nodeDict)).toMatchObject(dedupedLinks); expect(filterByAncestors(allLinks, nodeLookup)).toMatchObject(dedupedLinks);
}); });
}); });
describe('parseData parent function', () => { describe('parseData parent function', () => {
it('returns an object containing a list of nodes and links', () => { it('returns an object containing a list of nodes and links', () => {
const parsed = parseData(mockGraphData.stages);
// an array of nodes exist and the values are defined // an array of nodes exist and the values are defined
expect(parsed).toHaveProperty('nodes'); expect(parsed).toHaveProperty('nodes');
expect(Array.isArray(parsed.nodes)).toBe(true); expect(Array.isArray(parsed.nodes)).toBe(true);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment