2020-06-23 00:09:42 +05:30
|
|
|
import {
|
2020-10-24 23:57:45 +05:30
|
|
|
createNodeDict,
|
2020-06-23 00:09:42 +05:30
|
|
|
makeLinksFromNodes,
|
|
|
|
filterByAncestors,
|
|
|
|
parseData,
|
|
|
|
removeOrphanNodes,
|
|
|
|
getMaxNodes,
|
2021-01-03 14:25:43 +05:30
|
|
|
} from '~/pipelines/components/parsing_utils';
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
import { createSankey } from '~/pipelines/components/dag/drawing_utils';
|
2020-10-24 23:57:45 +05:30
|
|
|
import { mockParsedGraphQLNodes } from './mock_data';
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
describe('DAG visualization parsing utilities', () => {
|
2020-10-24 23:57:45 +05:30
|
|
|
const nodeDict = createNodeDict(mockParsedGraphQLNodes);
|
|
|
|
const unfilteredLinks = makeLinksFromNodes(mockParsedGraphQLNodes, nodeDict);
|
|
|
|
const parsed = parseData(mockParsedGraphQLNodes);
|
2020-06-23 00:09:42 +05:30
|
|
|
|
|
|
|
describe('makeLinksFromNodes', () => {
|
|
|
|
it('returns the expected link structure', () => {
|
2020-10-24 23:57:45 +05:30
|
|
|
expect(unfilteredLinks[0]).toHaveProperty('source', 'build_a');
|
|
|
|
expect(unfilteredLinks[0]).toHaveProperty('target', 'test_a');
|
2020-06-23 00:09:42 +05:30
|
|
|
expect(unfilteredLinks[0]).toHaveProperty('value', 10);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
describe('filterByAncestors', () => {
|
|
|
|
const allLinks = [
|
|
|
|
{ source: 'job1', target: 'job4' },
|
|
|
|
{ source: 'job1', target: 'job2' },
|
|
|
|
{ source: 'job2', target: 'job4' },
|
|
|
|
];
|
|
|
|
|
|
|
|
const dedupedLinks = [{ source: 'job1', target: 'job2' }, { source: 'job2', target: 'job4' }];
|
|
|
|
|
|
|
|
const nodeLookup = {
|
|
|
|
job1: {
|
|
|
|
name: 'job1',
|
|
|
|
},
|
|
|
|
job2: {
|
|
|
|
name: 'job2',
|
|
|
|
needs: ['job1'],
|
|
|
|
},
|
|
|
|
job4: {
|
|
|
|
name: 'job4',
|
|
|
|
needs: ['job1', 'job2'],
|
|
|
|
category: 'build',
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
it('dedupes links', () => {
|
|
|
|
expect(filterByAncestors(allLinks, nodeLookup)).toMatchObject(dedupedLinks);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
describe('parseData parent function', () => {
|
|
|
|
it('returns an object containing a list of nodes and links', () => {
|
|
|
|
// an array of nodes exist and the values are defined
|
|
|
|
expect(parsed).toHaveProperty('nodes');
|
|
|
|
expect(Array.isArray(parsed.nodes)).toBe(true);
|
|
|
|
expect(parsed.nodes.filter(Boolean)).not.toHaveLength(0);
|
|
|
|
|
|
|
|
// an array of links exist and the values are defined
|
|
|
|
expect(parsed).toHaveProperty('links');
|
|
|
|
expect(Array.isArray(parsed.links)).toBe(true);
|
|
|
|
expect(parsed.links.filter(Boolean)).not.toHaveLength(0);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
describe('removeOrphanNodes', () => {
|
|
|
|
it('removes sankey nodes that have no needs and are not needed', () => {
|
2020-10-24 23:57:45 +05:30
|
|
|
const layoutSettings = {
|
|
|
|
width: 200,
|
|
|
|
height: 200,
|
|
|
|
nodeWidth: 10,
|
|
|
|
nodePadding: 20,
|
|
|
|
paddingForLabels: 100,
|
|
|
|
};
|
|
|
|
|
|
|
|
const sankeyLayout = createSankey(layoutSettings)(parsed);
|
2020-06-23 00:09:42 +05:30
|
|
|
const cleanedNodes = removeOrphanNodes(sankeyLayout.nodes);
|
2020-10-24 23:57:45 +05:30
|
|
|
/*
|
|
|
|
These lengths are determined by the mock data.
|
|
|
|
If the data changes, the numbers may also change.
|
|
|
|
*/
|
|
|
|
expect(parsed.nodes).toHaveLength(21);
|
|
|
|
expect(cleanedNodes).toHaveLength(12);
|
2020-06-23 00:09:42 +05:30
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
describe('getMaxNodes', () => {
|
|
|
|
it('returns the number of nodes in the most populous generation', () => {
|
|
|
|
const layerNodes = [
|
|
|
|
{ layer: 0 },
|
|
|
|
{ layer: 0 },
|
|
|
|
{ layer: 1 },
|
|
|
|
{ layer: 1 },
|
|
|
|
{ layer: 0 },
|
|
|
|
{ layer: 3 },
|
|
|
|
{ layer: 2 },
|
|
|
|
{ layer: 4 },
|
|
|
|
{ layer: 1 },
|
|
|
|
{ layer: 3 },
|
|
|
|
{ layer: 4 },
|
|
|
|
];
|
|
|
|
expect(getMaxNodes(layerNodes)).toBe(3);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
});
|