2905 lines
78 KiB
JavaScript
2905 lines
78 KiB
JavaScript
!function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.dagre=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
|
|
/*
|
|
Copyright (c) 2012-2014 Chris Pettitt
|
|
|
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
of this software and associated documentation files (the "Software"), to deal
|
|
in the Software without restriction, including without limitation the rights
|
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
copies of the Software, and to permit persons to whom the Software is
|
|
furnished to do so, subject to the following conditions:
|
|
|
|
The above copyright notice and this permission notice shall be included in
|
|
all copies or substantial portions of the Software.
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
THE SOFTWARE.
|
|
*/
|
|
|
|
module.exports = {
|
|
graphlib: require("./lib/graphlib"),
|
|
|
|
layout: require("./lib/layout"),
|
|
debug: require("./lib/debug"),
|
|
util: {
|
|
time: require("./lib/util").time,
|
|
notime: require("./lib/util").notime
|
|
},
|
|
version: require("./lib/version")
|
|
};
|
|
|
|
},{"./lib/debug":6,"./lib/graphlib":7,"./lib/layout":9,"./lib/util":29,"./lib/version":30}],2:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("./lodash"),
|
|
greedyFAS = require("./greedy-fas");
|
|
|
|
module.exports = {
|
|
run: run,
|
|
undo: undo
|
|
};
|
|
|
|
function run(g) {
|
|
var fas = (g.graph().acyclicer === "greedy"
|
|
? greedyFAS(g, weightFn(g))
|
|
: dfsFAS(g));
|
|
_.each(fas, function(e) {
|
|
var label = g.edge(e);
|
|
g.removeEdge(e);
|
|
label.forwardName = e.name;
|
|
label.reversed = true;
|
|
g.setEdge(e.w, e.v, label, _.uniqueId("rev"));
|
|
});
|
|
|
|
function weightFn(g) {
|
|
return function(e) {
|
|
return g.edge(e).weight;
|
|
};
|
|
}
|
|
}
|
|
|
|
function dfsFAS(g) {
|
|
var fas = [],
|
|
stack = {},
|
|
visited = {};
|
|
|
|
function dfs(v) {
|
|
if (_.has(visited, v)) {
|
|
return;
|
|
}
|
|
visited[v] = true;
|
|
stack[v] = true;
|
|
_.each(g.outEdges(v), function(e) {
|
|
if (_.has(stack, e.w)) {
|
|
fas.push(e);
|
|
} else {
|
|
dfs(e.w);
|
|
}
|
|
});
|
|
delete stack[v];
|
|
}
|
|
|
|
_.each(g.nodes(), dfs);
|
|
return fas;
|
|
}
|
|
|
|
function undo(g) {
|
|
_.each(g.edges(), function(e) {
|
|
var label = g.edge(e);
|
|
if (label.reversed) {
|
|
g.removeEdge(e);
|
|
|
|
var forwardName = label.forwardName;
|
|
delete label.reversed;
|
|
delete label.forwardName;
|
|
g.setEdge(e.w, e.v, label, forwardName);
|
|
}
|
|
});
|
|
}
|
|
|
|
},{"./greedy-fas":8,"./lodash":10}],3:[function(require,module,exports){
|
|
var _ = require("./lodash"),
|
|
util = require("./util");
|
|
|
|
module.exports = addBorderSegments;
|
|
|
|
function addBorderSegments(g) {
|
|
function dfs(v) {
|
|
var children = g.children(v),
|
|
node = g.node(v);
|
|
if (children.length) {
|
|
_.each(children, dfs);
|
|
}
|
|
|
|
if (_.has(node, "minRank")) {
|
|
node.borderLeft = [];
|
|
node.borderRight = [];
|
|
for (var rank = node.minRank, maxRank = node.maxRank + 1;
|
|
rank < maxRank;
|
|
++rank) {
|
|
addBorderNode(g, "borderLeft", "_bl", v, node, rank);
|
|
addBorderNode(g, "borderRight", "_br", v, node, rank);
|
|
}
|
|
}
|
|
}
|
|
|
|
_.each(g.children(), dfs);
|
|
}
|
|
|
|
function addBorderNode(g, prop, prefix, sg, sgNode, rank) {
|
|
var label = { width: 0, height: 0, rank: rank },
|
|
prev = sgNode[prop][rank - 1],
|
|
curr = util.addDummyNode(g, "border", label, prefix);
|
|
sgNode[prop][rank] = curr;
|
|
g.setParent(curr, sg);
|
|
if (prev) {
|
|
g.setEdge(prev, curr, { weight: 1 });
|
|
}
|
|
}
|
|
|
|
},{"./lodash":10,"./util":29}],4:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("./lodash");
|
|
|
|
module.exports = {
|
|
adjust: adjust,
|
|
undo: undo
|
|
};
|
|
|
|
function adjust(g) {
|
|
var rankDir = g.graph().rankdir.toLowerCase();
|
|
if (rankDir === "lr" || rankDir === "rl") {
|
|
swapWidthHeight(g);
|
|
}
|
|
}
|
|
|
|
function undo(g) {
|
|
var rankDir = g.graph().rankdir.toLowerCase();
|
|
if (rankDir === "bt" || rankDir === "rl") {
|
|
reverseY(g);
|
|
}
|
|
|
|
if (rankDir === "lr" || rankDir === "rl") {
|
|
swapXY(g);
|
|
swapWidthHeight(g);
|
|
}
|
|
}
|
|
|
|
function swapWidthHeight(g) {
|
|
_.each(g.nodes(), function(v) { swapWidthHeightOne(g.node(v)); });
|
|
_.each(g.edges(), function(e) { swapWidthHeightOne(g.edge(e)); });
|
|
}
|
|
|
|
function swapWidthHeightOne(attrs) {
|
|
var w = attrs.width;
|
|
attrs.width = attrs.height;
|
|
attrs.height = w;
|
|
}
|
|
|
|
function reverseY(g) {
|
|
_.each(g.nodes(), function(v) { reverseYOne(g.node(v)); });
|
|
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
_.each(edge.points, reverseYOne);
|
|
if (_.has(edge, "y")) {
|
|
reverseYOne(edge);
|
|
}
|
|
});
|
|
}
|
|
|
|
function reverseYOne(attrs) {
|
|
attrs.y = -attrs.y;
|
|
}
|
|
|
|
function swapXY(g) {
|
|
_.each(g.nodes(), function(v) { swapXYOne(g.node(v)); });
|
|
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
_.each(edge.points, swapXYOne);
|
|
if (_.has(edge, "x")) {
|
|
swapXYOne(edge);
|
|
}
|
|
});
|
|
}
|
|
|
|
function swapXYOne(attrs) {
|
|
var x = attrs.x;
|
|
attrs.x = attrs.y;
|
|
attrs.y = x;
|
|
}
|
|
|
|
},{"./lodash":10}],5:[function(require,module,exports){
|
|
/*
|
|
* Simple doubly linked list implementation derived from Cormen, et al.,
|
|
* "Introduction to Algorithms".
|
|
*/
|
|
|
|
module.exports = List;
|
|
|
|
function List() {
|
|
var sentinel = {};
|
|
sentinel._next = sentinel._prev = sentinel;
|
|
this._sentinel = sentinel;
|
|
}
|
|
|
|
List.prototype.dequeue = function() {
|
|
var sentinel = this._sentinel,
|
|
entry = sentinel._prev;
|
|
if (entry !== sentinel) {
|
|
unlink(entry);
|
|
return entry;
|
|
}
|
|
};
|
|
|
|
List.prototype.enqueue = function(entry) {
|
|
var sentinel = this._sentinel;
|
|
if (entry._prev && entry._next) {
|
|
unlink(entry);
|
|
}
|
|
entry._next = sentinel._next;
|
|
sentinel._next._prev = entry;
|
|
sentinel._next = entry;
|
|
entry._prev = sentinel;
|
|
};
|
|
|
|
List.prototype.toString = function() {
|
|
var strs = [],
|
|
sentinel = this._sentinel,
|
|
curr = sentinel._prev;
|
|
while (curr !== sentinel) {
|
|
strs.push(JSON.stringify(curr, filterOutLinks));
|
|
curr = curr._prev;
|
|
}
|
|
return "[" + strs.join(", ") + "]";
|
|
};
|
|
|
|
function unlink(entry) {
|
|
entry._prev._next = entry._next;
|
|
entry._next._prev = entry._prev;
|
|
delete entry._next;
|
|
delete entry._prev;
|
|
}
|
|
|
|
function filterOutLinks(k, v) {
|
|
if (k !== "_next" && k !== "_prev") {
|
|
return v;
|
|
}
|
|
}
|
|
|
|
},{}],6:[function(require,module,exports){
|
|
var _ = require("./lodash"),
|
|
util = require("./util"),
|
|
Graph = require("./graphlib").Graph;
|
|
|
|
module.exports = {
|
|
debugOrdering: debugOrdering
|
|
};
|
|
|
|
/* istanbul ignore next */
|
|
function debugOrdering(g) {
|
|
var layerMatrix = util.buildLayerMatrix(g);
|
|
|
|
var h = new Graph({ compound: true, multigraph: true }).setGraph({});
|
|
|
|
_.each(g.nodes(), function(v) {
|
|
h.setNode(v, { label: v });
|
|
h.setParent(v, "layer" + g.node(v).rank);
|
|
});
|
|
|
|
_.each(g.edges(), function(e) {
|
|
h.setEdge(e.v, e.w, {}, e.name);
|
|
});
|
|
|
|
_.each(layerMatrix, function(layer, i) {
|
|
var layerV = "layer" + i;
|
|
h.setNode(layerV, { rank: "same" });
|
|
_.reduce(layer, function(u, v) {
|
|
h.setEdge(u, v, { style: "invis" });
|
|
return v;
|
|
});
|
|
});
|
|
|
|
return h;
|
|
}
|
|
|
|
},{"./graphlib":7,"./lodash":10,"./util":29}],7:[function(require,module,exports){
|
|
/* global window */
|
|
|
|
var graphlib;
|
|
|
|
if (require) {
|
|
try {
|
|
graphlib = require("graphlib");
|
|
} catch (e) {}
|
|
}
|
|
|
|
if (!graphlib) {
|
|
graphlib = window.graphlib;
|
|
}
|
|
|
|
module.exports = graphlib;
|
|
|
|
},{"graphlib":undefined}],8:[function(require,module,exports){
|
|
var _ = require("./lodash"),
|
|
Graph = require("./graphlib").Graph,
|
|
List = require("./data/list");
|
|
|
|
/*
|
|
* A greedy heuristic for finding a feedback arc set for a graph. A feedback
|
|
* arc set is a set of edges that can be removed to make a graph acyclic.
|
|
* The algorithm comes from: P. Eades, X. Lin, and W. F. Smyth, "A fast and
|
|
* effective heuristic for the feedback arc set problem." This implementation
|
|
* adjusts that from the paper to allow for weighted edges.
|
|
*/
|
|
module.exports = greedyFAS;
|
|
|
|
var DEFAULT_WEIGHT_FN = _.constant(1);
|
|
|
|
function greedyFAS(g, weightFn) {
|
|
if (g.nodeCount() <= 1) {
|
|
return [];
|
|
}
|
|
var state = buildState(g, weightFn || DEFAULT_WEIGHT_FN);
|
|
var results = doGreedyFAS(state.graph, state.buckets, state.zeroIdx);
|
|
|
|
// Expand multi-edges
|
|
return _.flatten(_.map(results, function(e) {
|
|
return g.outEdges(e.v, e.w);
|
|
}), true);
|
|
}
|
|
|
|
function doGreedyFAS(g, buckets, zeroIdx) {
|
|
var results = [],
|
|
sources = buckets[buckets.length - 1],
|
|
sinks = buckets[0];
|
|
|
|
var entry;
|
|
while (g.nodeCount()) {
|
|
while ((entry = sinks.dequeue())) { removeNode(g, buckets, zeroIdx, entry); }
|
|
while ((entry = sources.dequeue())) { removeNode(g, buckets, zeroIdx, entry); }
|
|
if (g.nodeCount()) {
|
|
for (var i = buckets.length - 2; i > 0; --i) {
|
|
entry = buckets[i].dequeue();
|
|
if (entry) {
|
|
results = results.concat(removeNode(g, buckets, zeroIdx, entry, true));
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
return results;
|
|
}
|
|
|
|
function removeNode(g, buckets, zeroIdx, entry, collectPredecessors) {
|
|
var results = collectPredecessors ? [] : undefined;
|
|
|
|
_.each(g.inEdges(entry.v), function(edge) {
|
|
var weight = g.edge(edge),
|
|
uEntry = g.node(edge.v);
|
|
|
|
if (collectPredecessors) {
|
|
results.push({ v: edge.v, w: edge.w });
|
|
}
|
|
|
|
uEntry.out -= weight;
|
|
assignBucket(buckets, zeroIdx, uEntry);
|
|
});
|
|
|
|
_.each(g.outEdges(entry.v), function(edge) {
|
|
var weight = g.edge(edge),
|
|
w = edge.w,
|
|
wEntry = g.node(w);
|
|
wEntry.in -= weight;
|
|
assignBucket(buckets, zeroIdx, wEntry);
|
|
});
|
|
|
|
g.removeNode(entry.v);
|
|
|
|
return results;
|
|
}
|
|
|
|
function buildState(g, weightFn) {
|
|
var fasGraph = new Graph(),
|
|
maxIn = 0,
|
|
maxOut = 0;
|
|
|
|
_.each(g.nodes(), function(v) {
|
|
fasGraph.setNode(v, { v: v, in: 0, out: 0 });
|
|
});
|
|
|
|
// Aggregate weights on nodes, but also sum the weights across multi-edges
|
|
// into a single edge for the fasGraph.
|
|
_.each(g.edges(), function(e) {
|
|
var prevWeight = fasGraph.edge(e.v, e.w) || 0,
|
|
weight = weightFn(e),
|
|
edgeWeight = prevWeight + weight;
|
|
fasGraph.setEdge(e.v, e.w, edgeWeight);
|
|
maxOut = Math.max(maxOut, fasGraph.node(e.v).out += weight);
|
|
maxIn = Math.max(maxIn, fasGraph.node(e.w).in += weight);
|
|
});
|
|
|
|
var buckets = _.range(maxOut + maxIn + 3).map(function() { return new List(); });
|
|
var zeroIdx = maxIn + 1;
|
|
|
|
_.each(fasGraph.nodes(), function(v) {
|
|
assignBucket(buckets, zeroIdx, fasGraph.node(v));
|
|
});
|
|
|
|
return { graph: fasGraph, buckets: buckets, zeroIdx: zeroIdx };
|
|
}
|
|
|
|
function assignBucket(buckets, zeroIdx, entry) {
|
|
if (!entry.out) {
|
|
buckets[0].enqueue(entry);
|
|
} else if (!entry.in) {
|
|
buckets[buckets.length - 1].enqueue(entry);
|
|
} else {
|
|
buckets[entry.out - entry.in + zeroIdx].enqueue(entry);
|
|
}
|
|
}
|
|
|
|
},{"./data/list":5,"./graphlib":7,"./lodash":10}],9:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("./lodash"),
|
|
acyclic = require("./acyclic"),
|
|
normalize = require("./normalize"),
|
|
rank = require("./rank"),
|
|
normalizeRanks = require("./util").normalizeRanks,
|
|
parentDummyChains = require("./parent-dummy-chains"),
|
|
removeEmptyRanks = require("./util").removeEmptyRanks,
|
|
nestingGraph = require("./nesting-graph"),
|
|
addBorderSegments = require("./add-border-segments"),
|
|
coordinateSystem = require("./coordinate-system"),
|
|
order = require("./order"),
|
|
position = require("./position"),
|
|
util = require("./util"),
|
|
Graph = require("./graphlib").Graph;
|
|
|
|
module.exports = layout;
|
|
|
|
function layout(g, opts) {
|
|
var time = opts && opts.debugTiming ? util.time : util.notime;
|
|
time("layout", function() {
|
|
var layoutGraph = time(" buildLayoutGraph",
|
|
function() { return buildLayoutGraph(g); });
|
|
time(" runLayout", function() { runLayout(layoutGraph, time); });
|
|
time(" updateInputGraph", function() { updateInputGraph(g, layoutGraph); });
|
|
});
|
|
}
|
|
|
|
function runLayout(g, time) {
|
|
time(" makeSpaceForEdgeLabels", function() { makeSpaceForEdgeLabels(g); });
|
|
time(" removeSelfEdges", function() { removeSelfEdges(g); });
|
|
time(" acyclic", function() { acyclic.run(g); });
|
|
time(" nestingGraph.run", function() { nestingGraph.run(g); });
|
|
time(" rank", function() { rank(util.asNonCompoundGraph(g)); });
|
|
time(" injectEdgeLabelProxies", function() { injectEdgeLabelProxies(g); });
|
|
time(" removeEmptyRanks", function() { removeEmptyRanks(g); });
|
|
time(" nestingGraph.cleanup", function() { nestingGraph.cleanup(g); });
|
|
time(" normalizeRanks", function() { normalizeRanks(g); });
|
|
time(" assignRankMinMax", function() { assignRankMinMax(g); });
|
|
time(" removeEdgeLabelProxies", function() { removeEdgeLabelProxies(g); });
|
|
time(" normalize.run", function() { normalize.run(g); });
|
|
time(" parentDummyChains", function() { parentDummyChains(g); });
|
|
time(" addBorderSegments", function() { addBorderSegments(g); });
|
|
time(" order", function() { order(g); });
|
|
time(" insertSelfEdges", function() { insertSelfEdges(g); });
|
|
time(" adjustCoordinateSystem", function() { coordinateSystem.adjust(g); });
|
|
time(" position", function() { position(g); });
|
|
time(" positionSelfEdges", function() { positionSelfEdges(g); });
|
|
time(" removeBorderNodes", function() { removeBorderNodes(g); });
|
|
time(" normalize.undo", function() { normalize.undo(g); });
|
|
time(" fixupEdgeLabelCoords", function() { fixupEdgeLabelCoords(g); });
|
|
time(" undoCoordinateSystem", function() { coordinateSystem.undo(g); });
|
|
time(" translateGraph", function() { translateGraph(g); });
|
|
time(" assignNodeIntersects", function() { assignNodeIntersects(g); });
|
|
time(" reversePoints", function() { reversePointsForReversedEdges(g); });
|
|
time(" acyclic.undo", function() { acyclic.undo(g); });
|
|
}
|
|
|
|
/*
|
|
* Copies final layout information from the layout graph back to the input
|
|
* graph. This process only copies whitelisted attributes from the layout graph
|
|
* to the input graph, so it serves as a good place to determine what
|
|
* attributes can influence layout.
|
|
*/
|
|
function updateInputGraph(inputGraph, layoutGraph) {
|
|
_.each(inputGraph.nodes(), function(v) {
|
|
var inputLabel = inputGraph.node(v),
|
|
layoutLabel = layoutGraph.node(v);
|
|
|
|
if (inputLabel) {
|
|
inputLabel.x = layoutLabel.x;
|
|
inputLabel.y = layoutLabel.y;
|
|
|
|
if (layoutGraph.children(v).length) {
|
|
inputLabel.width = layoutLabel.width;
|
|
inputLabel.height = layoutLabel.height;
|
|
}
|
|
}
|
|
});
|
|
|
|
_.each(inputGraph.edges(), function(e) {
|
|
var inputLabel = inputGraph.edge(e),
|
|
layoutLabel = layoutGraph.edge(e);
|
|
|
|
inputLabel.points = layoutLabel.points;
|
|
if (_.has(layoutLabel, "x")) {
|
|
inputLabel.x = layoutLabel.x;
|
|
inputLabel.y = layoutLabel.y;
|
|
}
|
|
});
|
|
|
|
inputGraph.graph().width = layoutGraph.graph().width;
|
|
inputGraph.graph().height = layoutGraph.graph().height;
|
|
}
|
|
|
|
var graphNumAttrs = ["nodesep", "edgesep", "ranksep", "marginx", "marginy"],
|
|
graphDefaults = { ranksep: 50, edgesep: 20, nodesep: 50, rankdir: "tb" },
|
|
graphAttrs = ["acyclicer", "ranker", "rankdir", "align"],
|
|
nodeNumAttrs = ["width", "height"],
|
|
nodeDefaults = { width: 0, height: 0 },
|
|
edgeNumAttrs = ["minlen", "weight", "width", "height", "labeloffset"],
|
|
edgeDefaults = {
|
|
minlen: 1, weight: 1, width: 0, height: 0,
|
|
labeloffset: 10, labelpos: "r"
|
|
},
|
|
edgeAttrs = ["labelpos"];
|
|
|
|
/*
|
|
* Constructs a new graph from the input graph, which can be used for layout.
|
|
* This process copies only whitelisted attributes from the input graph to the
|
|
* layout graph. Thus this function serves as a good place to determine what
|
|
* attributes can influence layout.
|
|
*/
|
|
function buildLayoutGraph(inputGraph) {
|
|
var g = new Graph({ multigraph: true, compound: true }),
|
|
graph = canonicalize(inputGraph.graph());
|
|
|
|
g.setGraph(_.merge({},
|
|
graphDefaults,
|
|
selectNumberAttrs(graph, graphNumAttrs),
|
|
_.pick(graph, graphAttrs)));
|
|
|
|
_.each(inputGraph.nodes(), function(v) {
|
|
var node = canonicalize(inputGraph.node(v));
|
|
g.setNode(v, _.defaults(selectNumberAttrs(node, nodeNumAttrs), nodeDefaults));
|
|
g.setParent(v, inputGraph.parent(v));
|
|
});
|
|
|
|
_.each(inputGraph.edges(), function(e) {
|
|
var edge = canonicalize(inputGraph.edge(e));
|
|
g.setEdge(e, _.merge({},
|
|
edgeDefaults,
|
|
selectNumberAttrs(edge, edgeNumAttrs),
|
|
_.pick(edge, edgeAttrs)));
|
|
});
|
|
|
|
return g;
|
|
}
|
|
|
|
/*
|
|
* This idea comes from the Gansner paper: to account for edge labels in our
|
|
* layout we split each rank in half by doubling minlen and halving ranksep.
|
|
* Then we can place labels at these mid-points between nodes.
|
|
*
|
|
* We also add some minimal padding to the width to push the label for the edge
|
|
* away from the edge itself a bit.
|
|
*/
|
|
function makeSpaceForEdgeLabels(g) {
|
|
var graph = g.graph();
|
|
graph.ranksep /= 2;
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
edge.minlen *= 2;
|
|
if (edge.labelpos.toLowerCase() !== "c") {
|
|
if (graph.rankdir === "TB" || graph.rankdir === "BT") {
|
|
edge.width += edge.labeloffset;
|
|
} else {
|
|
edge.height += edge.labeloffset;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
/*
|
|
* Creates temporary dummy nodes that capture the rank in which each edge's
|
|
* label is going to, if it has one of non-zero width and height. We do this
|
|
* so that we can safely remove empty ranks while preserving balance for the
|
|
* label's position.
|
|
*/
|
|
function injectEdgeLabelProxies(g) {
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
if (edge.width && edge.height) {
|
|
var v = g.node(e.v),
|
|
w = g.node(e.w),
|
|
label = { rank: (w.rank - v.rank) / 2 + v.rank, e: e };
|
|
util.addDummyNode(g, "edge-proxy", label, "_ep");
|
|
}
|
|
});
|
|
}
|
|
|
|
function assignRankMinMax(g) {
|
|
var maxRank = 0;
|
|
_.each(g.nodes(), function(v) {
|
|
var node = g.node(v);
|
|
if (node.borderTop) {
|
|
node.minRank = g.node(node.borderTop).rank;
|
|
node.maxRank = g.node(node.borderBottom).rank;
|
|
maxRank = _.max(maxRank, node.maxRank);
|
|
}
|
|
});
|
|
g.graph().maxRank = maxRank;
|
|
}
|
|
|
|
function removeEdgeLabelProxies(g) {
|
|
_.each(g.nodes(), function(v) {
|
|
var node = g.node(v);
|
|
if (node.dummy === "edge-proxy") {
|
|
g.edge(node.e).labelRank = node.rank;
|
|
g.removeNode(v);
|
|
}
|
|
});
|
|
}
|
|
|
|
function translateGraph(g) {
|
|
var minX = Number.POSITIVE_INFINITY,
|
|
maxX = 0,
|
|
minY = Number.POSITIVE_INFINITY,
|
|
maxY = 0,
|
|
graphLabel = g.graph(),
|
|
marginX = graphLabel.marginx || 0,
|
|
marginY = graphLabel.marginy || 0;
|
|
|
|
function getExtremes(attrs) {
|
|
var x = attrs.x,
|
|
y = attrs.y,
|
|
w = attrs.width,
|
|
h = attrs.height;
|
|
minX = Math.min(minX, x - w / 2);
|
|
maxX = Math.max(maxX, x + w / 2);
|
|
minY = Math.min(minY, y - h / 2);
|
|
maxY = Math.max(maxY, y + h / 2);
|
|
}
|
|
|
|
_.each(g.nodes(), function(v) { getExtremes(g.node(v)); });
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
if (_.has(edge, "x")) {
|
|
getExtremes(edge);
|
|
}
|
|
});
|
|
|
|
minX -= marginX;
|
|
minY -= marginY;
|
|
|
|
_.each(g.nodes(), function(v) {
|
|
var node = g.node(v);
|
|
node.x -= minX;
|
|
node.y -= minY;
|
|
});
|
|
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
_.each(edge.points, function(p) {
|
|
p.x -= minX;
|
|
p.y -= minY;
|
|
});
|
|
if (_.has(edge, "x")) { edge.x -= minX; }
|
|
if (_.has(edge, "y")) { edge.y -= minY; }
|
|
});
|
|
|
|
graphLabel.width = maxX - minX + marginX;
|
|
graphLabel.height = maxY - minY + marginY;
|
|
}
|
|
|
|
function assignNodeIntersects(g) {
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e),
|
|
nodeV = g.node(e.v),
|
|
nodeW = g.node(e.w),
|
|
p1, p2;
|
|
if (!edge.points) {
|
|
edge.points = [];
|
|
p1 = nodeW;
|
|
p2 = nodeV;
|
|
} else {
|
|
p1 = edge.points[0];
|
|
p2 = edge.points[edge.points.length - 1];
|
|
}
|
|
edge.points.unshift(util.intersectRect(nodeV, p1));
|
|
edge.points.push(util.intersectRect(nodeW, p2));
|
|
});
|
|
}
|
|
|
|
function fixupEdgeLabelCoords(g) {
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
if (_.has(edge, "x")) {
|
|
if (edge.labelpos === "l" || edge.labelpos === "r") {
|
|
edge.width -= edge.labeloffset;
|
|
}
|
|
switch (edge.labelpos) {
|
|
case "l": edge.x -= edge.width / 2 + edge.labeloffset; break;
|
|
case "r": edge.x += edge.width / 2 + edge.labeloffset; break;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
function reversePointsForReversedEdges(g) {
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
if (edge.reversed) {
|
|
edge.points.reverse();
|
|
}
|
|
});
|
|
}
|
|
|
|
function removeBorderNodes(g) {
|
|
_.each(g.nodes(), function(v) {
|
|
if (g.children(v).length) {
|
|
var node = g.node(v),
|
|
t = g.node(node.borderTop),
|
|
b = g.node(node.borderBottom),
|
|
l = g.node(_.last(node.borderLeft)),
|
|
r = g.node(_.last(node.borderRight));
|
|
|
|
node.width = Math.abs(r.x - l.x);
|
|
node.height = Math.abs(b.y - t.y);
|
|
node.x = l.x + node.width / 2;
|
|
node.y = t.y + node.height / 2;
|
|
}
|
|
});
|
|
|
|
_.each(g.nodes(), function(v) {
|
|
if (g.node(v).dummy === "border") {
|
|
g.removeNode(v);
|
|
}
|
|
});
|
|
}
|
|
|
|
function removeSelfEdges(g) {
|
|
_.each(g.edges(), function(e) {
|
|
if (e.v === e.w) {
|
|
var node = g.node(e.v);
|
|
if (!node.selfEdges) {
|
|
node.selfEdges = [];
|
|
}
|
|
node.selfEdges.push({ e: e, label: g.edge(e) });
|
|
g.removeEdge(e);
|
|
}
|
|
});
|
|
}
|
|
|
|
function insertSelfEdges(g) {
|
|
var layers = util.buildLayerMatrix(g);
|
|
_.each(layers, function(layer) {
|
|
var orderShift = 0;
|
|
_.each(layer, function(v, i) {
|
|
var node = g.node(v);
|
|
node.order = i + orderShift;
|
|
_.each(node.selfEdges, function(selfEdge) {
|
|
util.addDummyNode(g, "selfedge", {
|
|
width: selfEdge.label.width,
|
|
height: selfEdge.label.height,
|
|
rank: node.rank,
|
|
order: i + (++orderShift),
|
|
e: selfEdge.e,
|
|
label: selfEdge.label
|
|
}, "_se");
|
|
});
|
|
delete node.selfEdges;
|
|
});
|
|
});
|
|
}
|
|
|
|
function positionSelfEdges(g) {
|
|
_.each(g.nodes(), function(v) {
|
|
var node = g.node(v);
|
|
if (node.dummy === "selfedge") {
|
|
var selfNode = g.node(node.e.v),
|
|
x = selfNode.x + selfNode.width / 2,
|
|
y = selfNode.y,
|
|
dx = node.x - x,
|
|
dy = selfNode.height / 2;
|
|
g.setEdge(node.e, node.label);
|
|
g.removeNode(v);
|
|
node.label.points = [
|
|
{ x: x + 2 * dx / 3, y: y - dy },
|
|
{ x: x + 5 * dx / 6, y: y - dy },
|
|
{ x: x + dx , y: y },
|
|
{ x: x + 5 * dx / 6, y: y + dy },
|
|
{ x: x + 2 * dx / 3, y: y + dy },
|
|
];
|
|
node.label.x = node.x;
|
|
node.label.y = node.y;
|
|
}
|
|
});
|
|
}
|
|
|
|
function selectNumberAttrs(obj, attrs) {
|
|
return _.mapValues(_.pick(obj, attrs), Number);
|
|
}
|
|
|
|
function canonicalize(attrs) {
|
|
var newAttrs = {};
|
|
_.each(attrs, function(v, k) {
|
|
newAttrs[k.toLowerCase()] = v;
|
|
});
|
|
return newAttrs;
|
|
}
|
|
|
|
},{"./acyclic":2,"./add-border-segments":3,"./coordinate-system":4,"./graphlib":7,"./lodash":10,"./nesting-graph":11,"./normalize":12,"./order":17,"./parent-dummy-chains":22,"./position":24,"./rank":26,"./util":29}],10:[function(require,module,exports){
|
|
/* global window */
|
|
|
|
var lodash;
|
|
|
|
if (require) {
|
|
try {
|
|
lodash = require("lodash");
|
|
} catch (e) {}
|
|
}
|
|
|
|
if (!lodash) {
|
|
lodash = window._;
|
|
}
|
|
|
|
module.exports = lodash;
|
|
|
|
},{"lodash":undefined}],11:[function(require,module,exports){
|
|
var _ = require("./lodash"),
|
|
util = require("./util");
|
|
|
|
module.exports = {
|
|
run: run,
|
|
cleanup: cleanup
|
|
};
|
|
|
|
/*
|
|
* A nesting graph creates dummy nodes for the tops and bottoms of subgraphs,
|
|
* adds appropriate edges to ensure that all cluster nodes are placed between
|
|
* these boundries, and ensures that the graph is connected.
|
|
*
|
|
* In addition we ensure, through the use of the minlen property, that nodes
|
|
* and subgraph border nodes to not end up on the same rank.
|
|
*
|
|
* Preconditions:
|
|
*
|
|
* 1. Input graph is a DAG
|
|
* 2. Nodes in the input graph has a minlen attribute
|
|
*
|
|
* Postconditions:
|
|
*
|
|
* 1. Input graph is connected.
|
|
* 2. Dummy nodes are added for the tops and bottoms of subgraphs.
|
|
* 3. The minlen attribute for nodes is adjusted to ensure nodes do not
|
|
* get placed on the same rank as subgraph border nodes.
|
|
*
|
|
* The nesting graph idea comes from Sander, "Layout of Compound Directed
|
|
* Graphs."
|
|
*/
|
|
function run(g) {
|
|
var root = util.addDummyNode(g, "root", {}, "_root"),
|
|
depths = treeDepths(g),
|
|
height = _.max(depths) - 1,
|
|
nodeSep = 2 * height + 1;
|
|
|
|
g.graph().nestingRoot = root;
|
|
|
|
// Multiply minlen by nodeSep to align nodes on non-border ranks.
|
|
_.each(g.edges(), function(e) { g.edge(e).minlen *= nodeSep; });
|
|
|
|
// Calculate a weight that is sufficient to keep subgraphs vertically compact
|
|
var weight = sumWeights(g) + 1;
|
|
|
|
// Create border nodes and link them up
|
|
_.each(g.children(), function(child) {
|
|
dfs(g, root, nodeSep, weight, height, depths, child);
|
|
});
|
|
|
|
// Save the multiplier for node layers for later removal of empty border
|
|
// layers.
|
|
g.graph().nodeRankFactor = nodeSep;
|
|
}
|
|
|
|
function dfs(g, root, nodeSep, weight, height, depths, v) {
|
|
var children = g.children(v);
|
|
if (!children.length) {
|
|
if (v !== root) {
|
|
g.setEdge(root, v, { weight: 0, minlen: nodeSep });
|
|
}
|
|
return;
|
|
}
|
|
|
|
var top = util.addBorderNode(g, "_bt"),
|
|
bottom = util.addBorderNode(g, "_bb"),
|
|
label = g.node(v);
|
|
|
|
g.setParent(top, v);
|
|
label.borderTop = top;
|
|
g.setParent(bottom, v);
|
|
label.borderBottom = bottom;
|
|
|
|
_.each(children, function(child) {
|
|
dfs(g, root, nodeSep, weight, height, depths, child);
|
|
|
|
var childNode = g.node(child),
|
|
childTop = childNode.borderTop ? childNode.borderTop : child,
|
|
childBottom = childNode.borderBottom ? childNode.borderBottom : child,
|
|
thisWeight = childNode.borderTop ? weight : 2 * weight,
|
|
minlen = childTop !== childBottom ? 1 : height - depths[v] + 1;
|
|
|
|
g.setEdge(top, childTop, {
|
|
weight: thisWeight,
|
|
minlen: minlen,
|
|
nestingEdge: true
|
|
});
|
|
|
|
g.setEdge(childBottom, bottom, {
|
|
weight: thisWeight,
|
|
minlen: minlen,
|
|
nestingEdge: true
|
|
});
|
|
});
|
|
|
|
if (!g.parent(v)) {
|
|
g.setEdge(root, top, { weight: 0, minlen: height + depths[v] });
|
|
}
|
|
}
|
|
|
|
function treeDepths(g) {
|
|
var depths = {};
|
|
function dfs(v, depth) {
|
|
var children = g.children(v);
|
|
if (children && children.length) {
|
|
_.each(children, function(child) {
|
|
dfs(child, depth + 1);
|
|
});
|
|
}
|
|
depths[v] = depth;
|
|
}
|
|
_.each(g.children(), function(v) { dfs(v, 1); });
|
|
return depths;
|
|
}
|
|
|
|
function sumWeights(g) {
|
|
return _.reduce(g.edges(), function(acc, e) {
|
|
return acc + g.edge(e).weight;
|
|
}, 0);
|
|
}
|
|
|
|
function cleanup(g) {
|
|
var graphLabel = g.graph();
|
|
g.removeNode(graphLabel.nestingRoot);
|
|
delete graphLabel.nestingRoot;
|
|
_.each(g.edges(), function(e) {
|
|
var edge = g.edge(e);
|
|
if (edge.nestingEdge) {
|
|
g.removeEdge(e);
|
|
}
|
|
});
|
|
}
|
|
|
|
},{"./lodash":10,"./util":29}],12:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("./lodash"),
|
|
util = require("./util");
|
|
|
|
module.exports = {
|
|
run: run,
|
|
undo: undo
|
|
};
|
|
|
|
/*
|
|
* Breaks any long edges in the graph into short segments that span 1 layer
|
|
* each. This operation is undoable with the denormalize function.
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. The input graph is a DAG.
|
|
* 2. Each node in the graph has a "rank" property.
|
|
*
|
|
* Post-condition:
|
|
*
|
|
* 1. All edges in the graph have a length of 1.
|
|
* 2. Dummy nodes are added where edges have been split into segments.
|
|
* 3. The graph is augmented with a "dummyChains" attribute which contains
|
|
* the first dummy in each chain of dummy nodes produced.
|
|
*/
|
|
function run(g) {
|
|
g.graph().dummyChains = [];
|
|
_.each(g.edges(), function(edge) { normalizeEdge(g, edge); });
|
|
}
|
|
|
|
function normalizeEdge(g, e) {
|
|
var v = e.v,
|
|
vRank = g.node(v).rank,
|
|
w = e.w,
|
|
wRank = g.node(w).rank,
|
|
name = e.name,
|
|
edgeLabel = g.edge(e),
|
|
labelRank = edgeLabel.labelRank;
|
|
|
|
if (wRank === vRank + 1) return;
|
|
|
|
g.removeEdge(e);
|
|
|
|
var dummy, attrs, i;
|
|
for (i = 0, ++vRank; vRank < wRank; ++i, ++vRank) {
|
|
edgeLabel.points = [];
|
|
attrs = {
|
|
width: 0, height: 0,
|
|
edgeLabel: edgeLabel, edgeObj: e,
|
|
rank: vRank
|
|
};
|
|
dummy = util.addDummyNode(g, "edge", attrs, "_d");
|
|
if (vRank === labelRank) {
|
|
attrs.width = edgeLabel.width;
|
|
attrs.height = edgeLabel.height;
|
|
attrs.dummy = "edge-label";
|
|
attrs.labelpos = edgeLabel.labelpos;
|
|
}
|
|
g.setEdge(v, dummy, { weight: edgeLabel.weight }, name);
|
|
if (i === 0) {
|
|
g.graph().dummyChains.push(dummy);
|
|
}
|
|
v = dummy;
|
|
}
|
|
|
|
g.setEdge(v, w, { weight: edgeLabel.weight }, name);
|
|
}
|
|
|
|
function undo(g) {
|
|
_.each(g.graph().dummyChains, function(v) {
|
|
var node = g.node(v),
|
|
origLabel = node.edgeLabel,
|
|
w;
|
|
g.setEdge(node.edgeObj, origLabel);
|
|
while (node.dummy) {
|
|
w = g.successors(v)[0];
|
|
g.removeNode(v);
|
|
origLabel.points.push({ x: node.x, y: node.y });
|
|
if (node.dummy === "edge-label") {
|
|
origLabel.x = node.x;
|
|
origLabel.y = node.y;
|
|
origLabel.width = node.width;
|
|
origLabel.height = node.height;
|
|
}
|
|
v = w;
|
|
node = g.node(v);
|
|
}
|
|
});
|
|
}
|
|
|
|
},{"./lodash":10,"./util":29}],13:[function(require,module,exports){
|
|
var _ = require("../lodash");
|
|
|
|
module.exports = addSubgraphConstraints;
|
|
|
|
function addSubgraphConstraints(g, cg, vs) {
|
|
var prev = {},
|
|
rootPrev;
|
|
|
|
_.each(vs, function(v) {
|
|
var child = g.parent(v),
|
|
parent,
|
|
prevChild;
|
|
while (child) {
|
|
parent = g.parent(child);
|
|
if (parent) {
|
|
prevChild = prev[parent];
|
|
prev[parent] = child;
|
|
} else {
|
|
prevChild = rootPrev;
|
|
rootPrev = child;
|
|
}
|
|
if (prevChild && prevChild !== child) {
|
|
cg.setEdge(prevChild, child);
|
|
return;
|
|
}
|
|
child = parent;
|
|
}
|
|
});
|
|
|
|
/*
|
|
function dfs(v) {
|
|
var children = v ? g.children(v) : g.children();
|
|
if (children.length) {
|
|
var min = Number.POSITIVE_INFINITY,
|
|
subgraphs = [];
|
|
_.each(children, function(child) {
|
|
var childMin = dfs(child);
|
|
if (g.children(child).length) {
|
|
subgraphs.push({ v: child, order: childMin });
|
|
}
|
|
min = Math.min(min, childMin);
|
|
});
|
|
_.reduce(_.sortBy(subgraphs, "order"), function(prev, curr) {
|
|
cg.setEdge(prev.v, curr.v);
|
|
return curr;
|
|
});
|
|
return min;
|
|
}
|
|
return g.node(v).order;
|
|
}
|
|
dfs(undefined);
|
|
*/
|
|
}
|
|
|
|
},{"../lodash":10}],14:[function(require,module,exports){
|
|
var _ = require("../lodash");
|
|
|
|
module.exports = barycenter;
|
|
|
|
function barycenter(g, movable) {
|
|
return _.map(movable, function(v) {
|
|
var inV = g.inEdges(v);
|
|
if (!inV.length) {
|
|
return { v: v };
|
|
} else {
|
|
var result = _.reduce(inV, function(acc, e) {
|
|
var edge = g.edge(e),
|
|
nodeU = g.node(e.v);
|
|
return {
|
|
sum: acc.sum + (edge.weight * nodeU.order),
|
|
weight: acc.weight + edge.weight
|
|
};
|
|
}, { sum: 0, weight: 0 });
|
|
|
|
return {
|
|
v: v,
|
|
barycenter: result.sum / result.weight,
|
|
weight: result.weight
|
|
};
|
|
}
|
|
});
|
|
}
|
|
|
|
|
|
},{"../lodash":10}],15:[function(require,module,exports){
|
|
var _ = require("../lodash"),
|
|
Graph = require("../graphlib").Graph;
|
|
|
|
module.exports = buildLayerGraph;
|
|
|
|
/*
|
|
* Constructs a graph that can be used to sort a layer of nodes. The graph will
|
|
* contain all base and subgraph nodes from the request layer in their original
|
|
* hierarchy and any edges that are incident on these nodes and are of the type
|
|
* requested by the "relationship" parameter.
|
|
*
|
|
* Nodes from the requested rank that do not have parents are assigned a root
|
|
* node in the output graph, which is set in the root graph attribute. This
|
|
* makes it easy to walk the hierarchy of movable nodes during ordering.
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. Input graph is a DAG
|
|
* 2. Base nodes in the input graph have a rank attribute
|
|
* 3. Subgraph nodes in the input graph has minRank and maxRank attributes
|
|
* 4. Edges have an assigned weight
|
|
*
|
|
* Post-conditions:
|
|
*
|
|
* 1. Output graph has all nodes in the movable rank with preserved
|
|
* hierarchy.
|
|
* 2. Root nodes in the movable layer are made children of the node
|
|
* indicated by the root attribute of the graph.
|
|
* 3. Non-movable nodes incident on movable nodes, selected by the
|
|
* relationship parameter, are included in the graph (without hierarchy).
|
|
* 4. Edges incident on movable nodes, selected by the relationship
|
|
* parameter, are added to the output graph.
|
|
* 5. The weights for copied edges are aggregated as need, since the output
|
|
* graph is not a multi-graph.
|
|
*/
|
|
function buildLayerGraph(g, rank, relationship) {
|
|
var root = createRootNode(g),
|
|
result = new Graph({ compound: true }).setGraph({ root: root })
|
|
.setDefaultNodeLabel(function(v) { return g.node(v); });
|
|
|
|
_.each(g.nodes(), function(v) {
|
|
var node = g.node(v),
|
|
parent = g.parent(v);
|
|
|
|
if (node.rank === rank || node.minRank <= rank && rank <= node.maxRank) {
|
|
result.setNode(v);
|
|
result.setParent(v, parent || root);
|
|
|
|
// This assumes we have only short edges!
|
|
_.each(g[relationship](v), function(e) {
|
|
var u = e.v === v ? e.w : e.v,
|
|
edge = result.edge(u, v),
|
|
weight = !_.isUndefined(edge) ? edge.weight : 0;
|
|
result.setEdge(u, v, { weight: g.edge(e).weight + weight });
|
|
});
|
|
|
|
if (_.has(node, "minRank")) {
|
|
result.setNode(v, {
|
|
borderLeft: node.borderLeft[rank],
|
|
borderRight: node.borderRight[rank]
|
|
});
|
|
}
|
|
}
|
|
});
|
|
|
|
return result;
|
|
}
|
|
|
|
function createRootNode(g) {
|
|
var v;
|
|
while (g.hasNode((v = _.uniqueId("_root"))));
|
|
return v;
|
|
}
|
|
|
|
},{"../graphlib":7,"../lodash":10}],16:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash");
|
|
|
|
module.exports = crossCount;
|
|
|
|
/*
|
|
* A function that takes a layering (an array of layers, each with an array of
|
|
* ordererd nodes) and a graph and returns a weighted crossing count.
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. Input graph must be simple (not a multigraph), directed, and include
|
|
* only simple edges.
|
|
* 2. Edges in the input graph must have assigned weights.
|
|
*
|
|
* Post-conditions:
|
|
*
|
|
* 1. The graph and layering matrix are left unchanged.
|
|
*
|
|
* This algorithm is derived from Barth, et al., "Bilayer Cross Counting."
|
|
*/
|
|
function crossCount(g, layering) {
|
|
var cc = 0;
|
|
for (var i = 1; i < layering.length; ++i) {
|
|
cc += twoLayerCrossCount(g, layering[i-1], layering[i]);
|
|
}
|
|
return cc;
|
|
}
|
|
|
|
function twoLayerCrossCount(g, northLayer, southLayer) {
|
|
// Sort all of the edges between the north and south layers by their position
|
|
// in the north layer and then the south. Map these edges to the position of
|
|
// their head in the south layer.
|
|
var southPos = _.zipObject(southLayer,
|
|
_.map(southLayer, function (v, i) { return i; }));
|
|
var southEntries = _.flatten(_.map(northLayer, function(v) {
|
|
return _.chain(g.outEdges(v))
|
|
.map(function(e) {
|
|
return { pos: southPos[e.w], weight: g.edge(e).weight };
|
|
})
|
|
.sortBy("pos")
|
|
.value();
|
|
}), true);
|
|
|
|
// Build the accumulator tree
|
|
var firstIndex = 1;
|
|
while (firstIndex < southLayer.length) firstIndex <<= 1;
|
|
var treeSize = 2 * firstIndex - 1;
|
|
firstIndex -= 1;
|
|
var tree = _.map(new Array(treeSize), function() { return 0; });
|
|
|
|
// Calculate the weighted crossings
|
|
var cc = 0;
|
|
_.each(southEntries.forEach(function(entry) {
|
|
var index = entry.pos + firstIndex;
|
|
tree[index] += entry.weight;
|
|
var weightSum = 0;
|
|
while (index > 0) {
|
|
if (index % 2) {
|
|
weightSum += tree[index + 1];
|
|
}
|
|
index = (index - 1) >> 1;
|
|
tree[index] += entry.weight;
|
|
}
|
|
cc += entry.weight * weightSum;
|
|
}));
|
|
|
|
return cc;
|
|
}
|
|
|
|
},{"../lodash":10}],17:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash"),
|
|
initOrder = require("./init-order"),
|
|
crossCount = require("./cross-count"),
|
|
sortSubgraph = require("./sort-subgraph"),
|
|
buildLayerGraph = require("./build-layer-graph"),
|
|
addSubgraphConstraints = require("./add-subgraph-constraints"),
|
|
Graph = require("../graphlib").Graph,
|
|
util = require("../util");
|
|
|
|
module.exports = order;
|
|
|
|
/*
|
|
* Applies heuristics to minimize edge crossings in the graph and sets the best
|
|
* order solution as an order attribute on each node.
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. Graph must be DAG
|
|
* 2. Graph nodes must be objects with a "rank" attribute
|
|
* 3. Graph edges must have the "weight" attribute
|
|
*
|
|
* Post-conditions:
|
|
*
|
|
* 1. Graph nodes will have an "order" attribute based on the results of the
|
|
* algorithm.
|
|
*/
|
|
function order(g) {
|
|
var maxRank = util.maxRank(g),
|
|
downLayerGraphs = buildLayerGraphs(g, _.range(1, maxRank + 1), "inEdges"),
|
|
upLayerGraphs = buildLayerGraphs(g, _.range(maxRank - 1, -1, -1), "outEdges");
|
|
|
|
var layering = initOrder(g);
|
|
assignOrder(g, layering);
|
|
|
|
var bestCC = Number.POSITIVE_INFINITY,
|
|
best;
|
|
|
|
for (var i = 0, lastBest = 0; lastBest < 4; ++i, ++lastBest) {
|
|
sweepLayerGraphs(i % 2 ? downLayerGraphs : upLayerGraphs, i % 4 >= 2);
|
|
|
|
layering = util.buildLayerMatrix(g);
|
|
var cc = crossCount(g, layering);
|
|
if (cc < bestCC) {
|
|
lastBest = 0;
|
|
best = _.cloneDeep(layering);
|
|
bestCC = cc;
|
|
}
|
|
}
|
|
|
|
assignOrder(g, best);
|
|
}
|
|
|
|
function buildLayerGraphs(g, ranks, relationship) {
|
|
return _.map(ranks, function(rank) {
|
|
return buildLayerGraph(g, rank, relationship);
|
|
});
|
|
}
|
|
|
|
function sweepLayerGraphs(layerGraphs, biasRight) {
|
|
var cg = new Graph();
|
|
_.each(layerGraphs, function(lg) {
|
|
var root = lg.graph().root;
|
|
var sorted = sortSubgraph(lg, root, cg, biasRight);
|
|
_.each(sorted.vs, function(v, i) {
|
|
lg.node(v).order = i;
|
|
});
|
|
addSubgraphConstraints(lg, cg, sorted.vs);
|
|
});
|
|
}
|
|
|
|
function assignOrder(g, layering) {
|
|
_.each(layering, function(layer) {
|
|
_.each(layer, function(v, i) {
|
|
g.node(v).order = i;
|
|
});
|
|
});
|
|
}
|
|
|
|
},{"../graphlib":7,"../lodash":10,"../util":29,"./add-subgraph-constraints":13,"./build-layer-graph":15,"./cross-count":16,"./init-order":18,"./sort-subgraph":20}],18:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash");
|
|
|
|
module.exports = initOrder;
|
|
|
|
/*
|
|
* Assigns an initial order value for each node by performing a DFS search
|
|
* starting from nodes in the first rank. Nodes are assigned an order in their
|
|
* rank as they are first visited.
|
|
*
|
|
* This approach comes from Gansner, et al., "A Technique for Drawing Directed
|
|
* Graphs."
|
|
*
|
|
* Returns a layering matrix with an array per layer and each layer sorted by
|
|
* the order of its nodes.
|
|
*/
|
|
function initOrder(g) {
|
|
var visited = {},
|
|
simpleNodes = _.filter(g.nodes(), function(v) {
|
|
return !g.children(v).length;
|
|
}),
|
|
maxRank = _.max(_.map(simpleNodes, function(v) { return g.node(v).rank; })),
|
|
layers = _.map(_.range(maxRank + 1), function() { return []; });
|
|
|
|
function dfs(v) {
|
|
if (_.has(visited, v)) return;
|
|
visited[v] = true;
|
|
var node = g.node(v);
|
|
layers[node.rank].push(v);
|
|
_.each(g.successors(v), dfs);
|
|
}
|
|
|
|
var orderedVs = _.sortBy(simpleNodes, function(v) { return g.node(v).rank; });
|
|
_.each(orderedVs, dfs);
|
|
|
|
return layers;
|
|
}
|
|
|
|
},{"../lodash":10}],19:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash");
|
|
|
|
module.exports = resolveConflicts;
|
|
|
|
/*
|
|
* Given a list of entries of the form {v, barycenter, weight} and a
|
|
* constraint graph this function will resolve any conflicts between the
|
|
* constraint graph and the barycenters for the entries. If the barycenters for
|
|
* an entry would violate a constraint in the constraint graph then we coalesce
|
|
* the nodes in the conflict into a new node that respects the contraint and
|
|
* aggregates barycenter and weight information.
|
|
*
|
|
* This implementation is based on the description in Forster, "A Fast and
|
|
* Simple Hueristic for Constrained Two-Level Crossing Reduction," thought it
|
|
* differs in some specific details.
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. Each entry has the form {v, barycenter, weight}, or if the node has
|
|
* no barycenter, then {v}.
|
|
*
|
|
* Returns:
|
|
*
|
|
* A new list of entries of the form {vs, i, barycenter, weight}. The list
|
|
* `vs` may either be a singleton or it may be an aggregation of nodes
|
|
* ordered such that they do not violate constraints from the constraint
|
|
* graph. The property `i` is the lowest original index of any of the
|
|
* elements in `vs`.
|
|
*/
|
|
function resolveConflicts(entries, cg) {
|
|
var mappedEntries = {};
|
|
_.each(entries, function(entry, i) {
|
|
var tmp = mappedEntries[entry.v] = {
|
|
indegree: 0,
|
|
in: [],
|
|
out: [],
|
|
vs: [entry.v],
|
|
i: i
|
|
};
|
|
if (!_.isUndefined(entry.barycenter)) {
|
|
tmp.barycenter = entry.barycenter;
|
|
tmp.weight = entry.weight;
|
|
}
|
|
});
|
|
|
|
_.each(cg.edges(), function(e) {
|
|
var entryV = mappedEntries[e.v],
|
|
entryW = mappedEntries[e.w];
|
|
if (!_.isUndefined(entryV) && !_.isUndefined(entryW)) {
|
|
entryW.indegree++;
|
|
entryV.out.push(mappedEntries[e.w]);
|
|
}
|
|
});
|
|
|
|
var sourceSet = _.filter(mappedEntries, function(entry) {
|
|
return !entry.indegree;
|
|
});
|
|
|
|
return doResolveConflicts(sourceSet);
|
|
}
|
|
|
|
function doResolveConflicts(sourceSet) {
|
|
var entries = [];
|
|
|
|
function handleIn(vEntry) {
|
|
return function(uEntry) {
|
|
if (uEntry.merged) {
|
|
return;
|
|
}
|
|
if (_.isUndefined(uEntry.barycenter) ||
|
|
_.isUndefined(vEntry.barycenter) ||
|
|
uEntry.barycenter >= vEntry.barycenter) {
|
|
mergeEntries(vEntry, uEntry);
|
|
}
|
|
};
|
|
}
|
|
|
|
function handleOut(vEntry) {
|
|
return function(wEntry) {
|
|
wEntry.in.push(vEntry);
|
|
if (--wEntry.indegree === 0) {
|
|
sourceSet.push(wEntry);
|
|
}
|
|
};
|
|
}
|
|
|
|
while (sourceSet.length) {
|
|
var entry = sourceSet.pop();
|
|
entries.push(entry);
|
|
_.each(entry.in.reverse(), handleIn(entry));
|
|
_.each(entry.out, handleOut(entry));
|
|
}
|
|
|
|
return _.chain(entries)
|
|
.filter(function(entry) { return !entry.merged; })
|
|
.map(function(entry) {
|
|
return _.pick(entry, ["vs", "i", "barycenter", "weight"]);
|
|
})
|
|
.value();
|
|
}
|
|
|
|
function mergeEntries(target, source) {
|
|
var sum = 0,
|
|
weight = 0;
|
|
|
|
if (target.weight) {
|
|
sum += target.barycenter * target.weight;
|
|
weight += target.weight;
|
|
}
|
|
|
|
if (source.weight) {
|
|
sum += source.barycenter * source.weight;
|
|
weight += source.weight;
|
|
}
|
|
|
|
target.vs = source.vs.concat(target.vs);
|
|
target.barycenter = sum / weight;
|
|
target.weight = weight;
|
|
target.i = Math.min(source.i, target.i);
|
|
source.merged = true;
|
|
}
|
|
|
|
},{"../lodash":10}],20:[function(require,module,exports){
|
|
var _ = require("../lodash"),
|
|
barycenter = require("./barycenter"),
|
|
resolveConflicts = require("./resolve-conflicts"),
|
|
sort = require("./sort");
|
|
|
|
module.exports = sortSubgraph;
|
|
|
|
function sortSubgraph(g, v, cg, biasRight) {
|
|
var movable = g.children(v),
|
|
node = g.node(v),
|
|
bl = node ? node.borderLeft : undefined,
|
|
br = node ? node.borderRight: undefined,
|
|
subgraphs = {};
|
|
|
|
if (bl) {
|
|
movable = _.filter(movable, function(w) {
|
|
return w !== bl && w !== br;
|
|
});
|
|
}
|
|
|
|
var barycenters = barycenter(g, movable);
|
|
_.each(barycenters, function(entry) {
|
|
if (g.children(entry.v).length) {
|
|
var subgraphResult = sortSubgraph(g, entry.v, cg, biasRight);
|
|
subgraphs[entry.v] = subgraphResult;
|
|
if (_.has(subgraphResult, "barycenter")) {
|
|
mergeBarycenters(entry, subgraphResult);
|
|
}
|
|
}
|
|
});
|
|
|
|
var entries = resolveConflicts(barycenters, cg);
|
|
expandSubgraphs(entries, subgraphs);
|
|
|
|
var result = sort(entries, biasRight);
|
|
|
|
if (bl) {
|
|
result.vs = _.flatten([bl, result.vs, br], true);
|
|
if (g.predecessors(bl).length) {
|
|
var blPred = g.node(g.predecessors(bl)[0]),
|
|
brPred = g.node(g.predecessors(br)[0]);
|
|
if (!_.has(result, "barycenter")) {
|
|
result.barycenter = 0;
|
|
result.weight = 0;
|
|
}
|
|
result.barycenter = (result.barycenter * result.weight +
|
|
blPred.order + brPred.order) / (result.weight + 2);
|
|
result.weight += 2;
|
|
}
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
function expandSubgraphs(entries, subgraphs) {
|
|
_.each(entries, function(entry) {
|
|
entry.vs = _.flatten(entry.vs.map(function(v) {
|
|
if (subgraphs[v]) {
|
|
return subgraphs[v].vs;
|
|
}
|
|
return v;
|
|
}), true);
|
|
});
|
|
}
|
|
|
|
function mergeBarycenters(target, other) {
|
|
if (!_.isUndefined(target.barycenter)) {
|
|
target.barycenter = (target.barycenter * target.weight +
|
|
other.barycenter * other.weight) /
|
|
(target.weight + other.weight);
|
|
target.weight += other.weight;
|
|
} else {
|
|
target.barycenter = other.barycenter;
|
|
target.weight = other.weight;
|
|
}
|
|
}
|
|
|
|
},{"../lodash":10,"./barycenter":14,"./resolve-conflicts":19,"./sort":21}],21:[function(require,module,exports){
|
|
var _ = require("../lodash"),
|
|
util = require("../util");
|
|
|
|
module.exports = sort;
|
|
|
|
function sort(entries, biasRight) {
|
|
var parts = util.partition(entries, function(entry) {
|
|
return _.has(entry, "barycenter");
|
|
});
|
|
var sortable = parts.lhs,
|
|
unsortable = _.sortBy(parts.rhs, function(entry) { return -entry.i; }),
|
|
vs = [],
|
|
sum = 0,
|
|
weight = 0,
|
|
vsIndex = 0;
|
|
|
|
sortable.sort(compareWithBias(!!biasRight));
|
|
|
|
vsIndex = consumeUnsortable(vs, unsortable, vsIndex);
|
|
|
|
_.each(sortable, function (entry) {
|
|
vsIndex += entry.vs.length;
|
|
vs.push(entry.vs);
|
|
sum += entry.barycenter * entry.weight;
|
|
weight += entry.weight;
|
|
vsIndex = consumeUnsortable(vs, unsortable, vsIndex);
|
|
});
|
|
|
|
var result = { vs: _.flatten(vs, true) };
|
|
if (weight) {
|
|
result.barycenter = sum / weight;
|
|
result.weight = weight;
|
|
}
|
|
return result;
|
|
}
|
|
|
|
function consumeUnsortable(vs, unsortable, index) {
|
|
var last;
|
|
while (unsortable.length && (last = _.last(unsortable)).i <= index) {
|
|
unsortable.pop();
|
|
vs.push(last.vs);
|
|
index++;
|
|
}
|
|
return index;
|
|
}
|
|
|
|
function compareWithBias(bias) {
|
|
return function(entryV, entryW) {
|
|
if (entryV.barycenter < entryW.barycenter) {
|
|
return -1;
|
|
} else if (entryV.barycenter > entryW.barycenter) {
|
|
return 1;
|
|
}
|
|
|
|
return !bias ? entryV.i - entryW.i : entryW.i - entryV.i;
|
|
};
|
|
}
|
|
|
|
},{"../lodash":10,"../util":29}],22:[function(require,module,exports){
|
|
var _ = require("./lodash");
|
|
|
|
module.exports = parentDummyChains;
|
|
|
|
function parentDummyChains(g) {
|
|
var postorderNums = postorder(g);
|
|
|
|
_.each(g.graph().dummyChains, function(v) {
|
|
var node = g.node(v),
|
|
edgeObj = node.edgeObj,
|
|
pathData = findPath(g, postorderNums, edgeObj.v, edgeObj.w),
|
|
path = pathData.path,
|
|
lca = pathData.lca,
|
|
pathIdx = 0,
|
|
pathV = path[pathIdx],
|
|
ascending = true;
|
|
|
|
while (v !== edgeObj.w) {
|
|
node = g.node(v);
|
|
|
|
if (ascending) {
|
|
while ((pathV = path[pathIdx]) !== lca &&
|
|
g.node(pathV).maxRank < node.rank) {
|
|
pathIdx++;
|
|
}
|
|
|
|
if (pathV === lca) {
|
|
ascending = false;
|
|
}
|
|
}
|
|
|
|
if (!ascending) {
|
|
while (pathIdx < path.length - 1 &&
|
|
g.node(pathV = path[pathIdx + 1]).minRank <= node.rank) {
|
|
pathIdx++;
|
|
}
|
|
pathV = path[pathIdx];
|
|
}
|
|
|
|
g.setParent(v, pathV);
|
|
v = g.successors(v)[0];
|
|
}
|
|
});
|
|
}
|
|
|
|
// Find a path from v to w through the lowest common ancestor (LCA). Return the
|
|
// full path and the LCA.
|
|
function findPath(g, postorderNums, v, w) {
|
|
var vPath = [],
|
|
wPath = [],
|
|
low = Math.min(postorderNums[v].low, postorderNums[w].low),
|
|
lim = Math.max(postorderNums[v].lim, postorderNums[w].lim),
|
|
parent,
|
|
lca;
|
|
|
|
// Traverse up from v to find the LCA
|
|
parent = v;
|
|
do {
|
|
parent = g.parent(parent);
|
|
vPath.push(parent);
|
|
} while (parent &&
|
|
(postorderNums[parent].low > low || lim > postorderNums[parent].lim));
|
|
lca = parent;
|
|
|
|
// Traverse from w to LCA
|
|
parent = w;
|
|
while ((parent = g.parent(parent)) !== lca) {
|
|
wPath.push(parent);
|
|
}
|
|
|
|
return { path: vPath.concat(wPath.reverse()), lca: lca };
|
|
}
|
|
|
|
function postorder(g) {
|
|
var result = {},
|
|
lim = 0;
|
|
|
|
function dfs(v) {
|
|
var low = lim;
|
|
_.each(g.children(v), dfs);
|
|
result[v] = { low: low, lim: lim++ };
|
|
}
|
|
_.each(g.children(), dfs);
|
|
|
|
return result;
|
|
}
|
|
|
|
},{"./lodash":10}],23:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash"),
|
|
util = require("../util");
|
|
|
|
/*
|
|
* This module provides coordinate assignment based on Brandes and Köpf, "Fast
|
|
* and Simple Horizontal Coordinate Assignment."
|
|
*/
|
|
|
|
module.exports = {
|
|
positionX: positionX,
|
|
findType1Conflicts: findType1Conflicts,
|
|
findType2Conflicts: findType2Conflicts,
|
|
addConflict: addConflict,
|
|
hasConflict: hasConflict,
|
|
verticalAlignment: verticalAlignment,
|
|
horizontalCompaction: horizontalCompaction,
|
|
alignCoordinates: alignCoordinates,
|
|
findSmallestWidthAlignment: findSmallestWidthAlignment,
|
|
balance: balance
|
|
};
|
|
|
|
/*
|
|
* Marks all edges in the graph with a type-1 conflict with the "type1Conflict"
|
|
* property. A type-1 conflict is one where a non-inner segment crosses an
|
|
* inner segment. An inner segment is an edge with both incident nodes marked
|
|
* with the "dummy" property.
|
|
*
|
|
* This algorithm scans layer by layer, starting with the second, for type-1
|
|
* conflicts between the current layer and the previous layer. For each layer
|
|
* it scans the nodes from left to right until it reaches one that is incident
|
|
* on an inner segment. It then scans predecessors to determine if they have
|
|
* edges that cross that inner segment. At the end a final scan is done for all
|
|
* nodes on the current rank to see if they cross the last visited inner
|
|
* segment.
|
|
*
|
|
* This algorithm (safely) assumes that a dummy node will only be incident on a
|
|
* single node in the layers being scanned.
|
|
*/
|
|
function findType1Conflicts(g, layering) {
|
|
var conflicts = {};
|
|
|
|
function visitLayer(prevLayer, layer) {
|
|
var
|
|
// last visited node in the previous layer that is incident on an inner
|
|
// segment.
|
|
k0 = 0,
|
|
// Tracks the last node in this layer scanned for crossings with a type-1
|
|
// segment.
|
|
scanPos = 0,
|
|
prevLayerLength = prevLayer.length,
|
|
lastNode = _.last(layer);
|
|
|
|
_.each(layer, function(v, i) {
|
|
var w = findOtherInnerSegmentNode(g, v),
|
|
k1 = w ? g.node(w).order : prevLayerLength;
|
|
|
|
if (w || v === lastNode) {
|
|
_.each(layer.slice(scanPos, i +1), function(scanNode) {
|
|
_.each(g.predecessors(scanNode), function(u) {
|
|
var uLabel = g.node(u),
|
|
uPos = uLabel.order;
|
|
if ((uPos < k0 || k1 < uPos) &&
|
|
!(uLabel.dummy && g.node(scanNode).dummy)) {
|
|
addConflict(conflicts, u, scanNode);
|
|
}
|
|
});
|
|
});
|
|
scanPos = i + 1;
|
|
k0 = k1;
|
|
}
|
|
});
|
|
|
|
return layer;
|
|
}
|
|
|
|
_.reduce(layering, visitLayer);
|
|
return conflicts;
|
|
}
|
|
|
|
function findType2Conflicts(g, layering) {
|
|
var conflicts = {};
|
|
|
|
function scan(south, southPos, southEnd, prevNorthBorder, nextNorthBorder) {
|
|
var v;
|
|
_.each(_.range(southPos, southEnd), function(i) {
|
|
v = south[i];
|
|
if (g.node(v).dummy) {
|
|
_.each(g.predecessors(v), function(u) {
|
|
var uNode = g.node(u);
|
|
if (uNode.dummy &&
|
|
(uNode.order < prevNorthBorder || uNode.order > nextNorthBorder)) {
|
|
addConflict(conflicts, u, v);
|
|
}
|
|
});
|
|
}
|
|
});
|
|
}
|
|
|
|
|
|
function visitLayer(north, south) {
|
|
var prevNorthPos = -1,
|
|
nextNorthPos,
|
|
southPos = 0;
|
|
|
|
_.each(south, function(v, southLookahead) {
|
|
if (g.node(v).dummy === "border") {
|
|
var predecessors = g.predecessors(v);
|
|
if (predecessors.length) {
|
|
nextNorthPos = g.node(predecessors[0]).order;
|
|
scan(south, southPos, southLookahead, prevNorthPos, nextNorthPos);
|
|
southPos = southLookahead;
|
|
prevNorthPos = nextNorthPos;
|
|
}
|
|
}
|
|
scan(south, southPos, south.length, nextNorthPos, north.length);
|
|
});
|
|
|
|
return south;
|
|
}
|
|
|
|
_.reduce(layering, visitLayer);
|
|
return conflicts;
|
|
}
|
|
|
|
function findOtherInnerSegmentNode(g, v) {
|
|
if (g.node(v).dummy) {
|
|
return _.find(g.predecessors(v), function(u) {
|
|
return g.node(u).dummy;
|
|
});
|
|
}
|
|
}
|
|
|
|
function addConflict(conflicts, v, w) {
|
|
if (v > w) {
|
|
var tmp = v;
|
|
v = w;
|
|
w = tmp;
|
|
}
|
|
|
|
var conflictsV = conflicts[v];
|
|
if (!conflictsV) {
|
|
conflicts[v] = conflictsV = {};
|
|
}
|
|
conflictsV[w] = true;
|
|
}
|
|
|
|
function hasConflict(conflicts, v, w) {
|
|
if (v > w) {
|
|
var tmp = v;
|
|
v = w;
|
|
w = tmp;
|
|
}
|
|
return _.has(conflicts[v], w);
|
|
}
|
|
|
|
/*
|
|
* Try to align nodes into vertical "blocks" where possible. This algorithm
|
|
* attempts to align a node with one of its median neighbors. If the edge
|
|
* connecting a neighbor is a type-1 conflict then we ignore that possibility.
|
|
* If a previous node has already formed a block with a node after the node
|
|
* we're trying to form a block with, we also ignore that possibility - our
|
|
* blocks would be split in that scenario.
|
|
*/
|
|
function verticalAlignment(g, layering, conflicts, neighborFn) {
|
|
var root = {},
|
|
align = {},
|
|
pos = {};
|
|
|
|
// We cache the position here based on the layering because the graph and
|
|
// layering may be out of sync. The layering matrix is manipulated to
|
|
// generate different extreme alignments.
|
|
_.each(layering, function(layer) {
|
|
_.each(layer, function(v, order) {
|
|
root[v] = v;
|
|
align[v] = v;
|
|
pos[v] = order;
|
|
});
|
|
});
|
|
|
|
_.each(layering, function(layer) {
|
|
var prevIdx = -1;
|
|
_.each(layer, function(v) {
|
|
var ws = neighborFn(v);
|
|
if (ws.length) {
|
|
ws = _.sortBy(ws, function(w) { return pos[w]; });
|
|
var mp = (ws.length - 1) / 2;
|
|
for (var i = Math.floor(mp), il = Math.ceil(mp); i <= il; ++i) {
|
|
var w = ws[i];
|
|
if (align[v] === v &&
|
|
prevIdx < pos[w] &&
|
|
!hasConflict(conflicts, v, w)) {
|
|
align[w] = v;
|
|
align[v] = root[v] = root[w];
|
|
prevIdx = pos[w];
|
|
}
|
|
}
|
|
}
|
|
});
|
|
});
|
|
|
|
return { root: root, align: align };
|
|
}
|
|
|
|
function horizontalCompaction(g, layering, root, align, reverseSep) {
|
|
// We use local variables for these parameters instead of manipulating the
|
|
// graph because it becomes more verbose to access them in a chained manner.
|
|
var shift = {},
|
|
shiftNeighbor = {},
|
|
sink = {},
|
|
xs = {},
|
|
pred = {},
|
|
graphLabel = g.graph(),
|
|
sepFn = sep(graphLabel.nodesep, graphLabel.edgesep, reverseSep);
|
|
|
|
_.each(layering, function(layer) {
|
|
_.each(layer, function(v, order) {
|
|
sink[v] = v;
|
|
shift[v] = Number.POSITIVE_INFINITY;
|
|
pred[v] = layer[order - 1];
|
|
});
|
|
});
|
|
|
|
_.each(g.nodes(), function(v) {
|
|
if (root[v] === v) {
|
|
placeBlock(g, layering, sepFn, root, align, shift, shiftNeighbor, sink, pred, xs, v);
|
|
}
|
|
});
|
|
|
|
_.each(layering, function(layer) {
|
|
_.each(layer, function(v) {
|
|
xs[v] = xs[root[v]];
|
|
// This line differs from the source paper. See
|
|
// http://www.inf.uni-konstanz.de/~brandes/publications/ for details.
|
|
if (v === root[v] && shift[sink[root[v]]] < Number.POSITIVE_INFINITY) {
|
|
xs[v] += shift[sink[root[v]]];
|
|
|
|
// Cascade shifts as necessary
|
|
var w = shiftNeighbor[sink[root[v]]];
|
|
if (w && shift[w] !== Number.POSITIVE_INFINITY) {
|
|
xs[v] += shift[w];
|
|
}
|
|
}
|
|
});
|
|
});
|
|
|
|
return xs;
|
|
}
|
|
|
|
function placeBlock(g, layering, sepFn, root, align, shift, shiftNeighbor, sink, pred, xs, v) {
|
|
if (_.has(xs, v)) return;
|
|
xs[v] = 0;
|
|
|
|
var w = v,
|
|
u;
|
|
do {
|
|
if (pred[w]) {
|
|
u = root[pred[w]];
|
|
placeBlock(g, layering, sepFn, root, align, shift, shiftNeighbor, sink, pred, xs, u);
|
|
if (sink[v] === v) {
|
|
sink[v] = sink[u];
|
|
}
|
|
|
|
var delta = sepFn(g, w, pred[w]);
|
|
if (sink[v] !== sink[u]) {
|
|
shift[sink[u]] = Math.min(shift[sink[u]], xs[v] - xs[u] - delta);
|
|
shiftNeighbor[sink[u]] = sink[v];
|
|
} else {
|
|
xs[v] = Math.max(xs[v], xs[u] + delta);
|
|
}
|
|
}
|
|
w = align[w];
|
|
} while (w !== v);
|
|
}
|
|
|
|
/*
|
|
* Returns the alignment that has the smallest width of the given alignments.
|
|
*/
|
|
function findSmallestWidthAlignment(g, xss) {
|
|
return _.min(xss, function(xs) {
|
|
var min = _.min(xs, function(x, v) { return x - width(g, v) / 2; }),
|
|
max = _.max(xs, function(x, v) { return x + width(g, v) / 2; });
|
|
return max - min;
|
|
});
|
|
}
|
|
|
|
/*
|
|
* Align the coordinates of each of the layout alignments such that
|
|
* left-biased alignments have their minimum coordinate at the same point as
|
|
* the minimum coordinate of the smallest width alignment and right-biased
|
|
* alignments have their maximum coordinate at the same point as the maximum
|
|
* coordinate of the smallest width alignment.
|
|
*/
|
|
function alignCoordinates(xss, alignTo) {
|
|
var alignToMin = _.min(alignTo),
|
|
alignToMax = _.max(alignTo);
|
|
|
|
_.each(["u", "d"], function(vert) {
|
|
_.each(["l", "r"], function(horiz) {
|
|
var alignment = vert + horiz,
|
|
xs = xss[alignment],
|
|
delta;
|
|
if (xs === alignTo) return;
|
|
|
|
delta = horiz === "l" ? alignToMin - _.min(xs) : alignToMax - _.max(xs);
|
|
|
|
if (delta) {
|
|
xss[alignment] = _.mapValues(xs, function(x) { return x + delta; });
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
function balance(xss, align) {
|
|
return _.mapValues(xss.ul, function(ignore, v) {
|
|
if (align) {
|
|
return xss[align.toLowerCase()][v];
|
|
} else {
|
|
var xs = _.sortBy(_.pluck(xss, v));
|
|
return (xs[1] + xs[2]) / 2;
|
|
}
|
|
});
|
|
}
|
|
|
|
function positionX(g) {
|
|
var layering = util.buildLayerMatrix(g),
|
|
conflicts = _.merge(findType1Conflicts(g, layering),
|
|
findType2Conflicts(g, layering));
|
|
|
|
var xss = {},
|
|
adjustedLayering;
|
|
_.each(["u", "d"], function(vert) {
|
|
adjustedLayering = vert === "u" ? layering : _.values(layering).reverse();
|
|
_.each(["l", "r"], function(horiz) {
|
|
if (horiz === "r") {
|
|
adjustedLayering = _.map(adjustedLayering, function(inner) {
|
|
return _.values(inner).reverse();
|
|
});
|
|
}
|
|
|
|
var neighborFn = _.bind(vert === "u" ? g.predecessors : g.successors, g);
|
|
var align = verticalAlignment(g, adjustedLayering, conflicts, neighborFn);
|
|
var xs = horizontalCompaction(g, adjustedLayering,
|
|
align.root, align.align,
|
|
horiz === "r");
|
|
if (horiz === "r") {
|
|
xs = _.mapValues(xs, function(x) { return -x; });
|
|
}
|
|
xss[vert + horiz] = xs;
|
|
});
|
|
});
|
|
|
|
var smallestWidth = findSmallestWidthAlignment(g, xss);
|
|
alignCoordinates(xss, smallestWidth);
|
|
return balance(xss, g.graph().align);
|
|
}
|
|
|
|
function sep(nodeSep, edgeSep, reverseSep) {
|
|
return function(g, v, w) {
|
|
var vLabel = g.node(v),
|
|
wLabel = g.node(w),
|
|
sum = 0,
|
|
delta;
|
|
|
|
sum += vLabel.width / 2;
|
|
if (_.has(vLabel, "labelpos")) {
|
|
switch (vLabel.labelpos.toLowerCase()) {
|
|
case "l": delta = -vLabel.width / 2; break;
|
|
case "r": delta = vLabel.width / 2; break;
|
|
}
|
|
}
|
|
if (delta) {
|
|
sum += reverseSep ? delta : -delta;
|
|
}
|
|
delta = 0;
|
|
|
|
sum += (vLabel.dummy ? edgeSep : nodeSep) / 2;
|
|
sum += (wLabel.dummy ? edgeSep : nodeSep) / 2;
|
|
|
|
sum += wLabel.width / 2;
|
|
if (_.has(wLabel, "labelpos")) {
|
|
switch (wLabel.labelpos.toLowerCase()) {
|
|
case "l": delta = wLabel.width / 2; break;
|
|
case "r": delta = -wLabel.width / 2; break;
|
|
}
|
|
}
|
|
if (delta) {
|
|
sum += reverseSep ? delta : -delta;
|
|
}
|
|
delta = 0;
|
|
|
|
return sum;
|
|
};
|
|
}
|
|
|
|
function width(g, v) {
|
|
return g.node(v).width;
|
|
}
|
|
|
|
},{"../lodash":10,"../util":29}],24:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash"),
|
|
util = require("../util"),
|
|
positionX = require("./bk").positionX;
|
|
|
|
module.exports = position;
|
|
|
|
function position(g) {
|
|
g = util.asNonCompoundGraph(g);
|
|
|
|
positionY(g);
|
|
_.each(positionX(g), function(x, v) {
|
|
g.node(v).x = x;
|
|
});
|
|
}
|
|
|
|
function positionY(g) {
|
|
var layering = util.buildLayerMatrix(g),
|
|
rankSep = g.graph().ranksep,
|
|
prevY = 0;
|
|
_.each(layering, function(layer) {
|
|
var maxHeight = _.max(_.map(layer, function(v) { return g.node(v).height; }));
|
|
_.each(layer, function(v) {
|
|
g.node(v).y = prevY + maxHeight / 2;
|
|
});
|
|
prevY += maxHeight + rankSep;
|
|
});
|
|
}
|
|
|
|
|
|
},{"../lodash":10,"../util":29,"./bk":23}],25:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash"),
|
|
Graph = require("../graphlib").Graph,
|
|
slack = require("./util").slack;
|
|
|
|
module.exports = feasibleTree;
|
|
|
|
/*
|
|
* Constructs a spanning tree with tight edges and adjusted the input node's
|
|
* ranks to achieve this. A tight edge is one that is has a length that matches
|
|
* its "minlen" attribute.
|
|
*
|
|
* The basic structure for this function is derived from Gansner, et al., "A
|
|
* Technique for Drawing Directed Graphs."
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. Graph must be a DAG.
|
|
* 2. Graph must be connected.
|
|
* 3. Graph must have at least one node.
|
|
* 5. Graph nodes must have been previously assigned a "rank" property that
|
|
* respects the "minlen" property of incident edges.
|
|
* 6. Graph edges must have a "minlen" property.
|
|
*
|
|
* Post-conditions:
|
|
*
|
|
* - Graph nodes will have their rank adjusted to ensure that all edges are
|
|
* tight.
|
|
*
|
|
* Returns a tree (undirected graph) that is constructed using only "tight"
|
|
* edges.
|
|
*/
|
|
function feasibleTree(g) {
|
|
var t = new Graph({ directed: false });
|
|
|
|
// Choose arbitrary node from which to start our tree
|
|
var start = g.nodes()[0],
|
|
size = g.nodeCount();
|
|
t.setNode(start, {});
|
|
|
|
var edge, delta;
|
|
while (tightTree(t, g) < size) {
|
|
edge = findMinSlackEdge(t, g);
|
|
delta = t.hasNode(edge.v) ? slack(g, edge) : -slack(g, edge);
|
|
shiftRanks(t, g, delta);
|
|
}
|
|
|
|
return t;
|
|
}
|
|
|
|
/*
|
|
* Finds a maximal tree of tight edges and returns the number of nodes in the
|
|
* tree.
|
|
*/
|
|
function tightTree(t, g) {
|
|
function dfs(v) {
|
|
_.each(g.nodeEdges(v), function(e) {
|
|
var edgeV = e.v,
|
|
w = (v === edgeV) ? e.w : edgeV;
|
|
if (!t.hasNode(w) && !slack(g, e)) {
|
|
t.setNode(w, {});
|
|
t.setEdge(v, w, {});
|
|
dfs(w);
|
|
}
|
|
});
|
|
}
|
|
|
|
_.each(t.nodes(), dfs);
|
|
return t.nodeCount();
|
|
}
|
|
|
|
/*
|
|
* Finds the edge with the smallest slack that is incident on tree and returns
|
|
* it.
|
|
*/
|
|
function findMinSlackEdge(t, g) {
|
|
return _.min(g.edges(), function(e) {
|
|
if (t.hasNode(e.v) !== t.hasNode(e.w)) {
|
|
return slack(g, e);
|
|
}
|
|
});
|
|
}
|
|
|
|
function shiftRanks(t, g, delta) {
|
|
_.each(t.nodes(), function(v) {
|
|
g.node(v).rank += delta;
|
|
});
|
|
}
|
|
|
|
},{"../graphlib":7,"../lodash":10,"./util":28}],26:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var rankUtil = require("./util"),
|
|
longestPath = rankUtil.longestPath,
|
|
feasibleTree = require("./feasible-tree"),
|
|
networkSimplex = require("./network-simplex");
|
|
|
|
module.exports = rank;
|
|
|
|
/*
|
|
* Assigns a rank to each node in the input graph that respects the "minlen"
|
|
* constraint specified on edges between nodes.
|
|
*
|
|
* This basic structure is derived from Gansner, et al., "A Technique for
|
|
* Drawing Directed Graphs."
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. Graph must be a connected DAG
|
|
* 2. Graph nodes must be objects
|
|
* 3. Graph edges must have "weight" and "minlen" attributes
|
|
*
|
|
* Post-conditions:
|
|
*
|
|
* 1. Graph nodes will have a "rank" attribute based on the results of the
|
|
* algorithm. Ranks can start at any index (including negative), we'll
|
|
* fix them up later.
|
|
*/
|
|
function rank(g) {
|
|
switch(g.graph().ranker) {
|
|
case "network-simplex": networkSimplexRanker(g); break;
|
|
case "tight-tree": tightTreeRanker(g); break;
|
|
case "longest-path": longestPathRanker(g); break;
|
|
default: networkSimplexRanker(g);
|
|
}
|
|
}
|
|
|
|
// A fast and simple ranker, but results are far from optimal.
|
|
var longestPathRanker = longestPath;
|
|
|
|
function tightTreeRanker(g) {
|
|
longestPath(g);
|
|
feasibleTree(g);
|
|
}
|
|
|
|
function networkSimplexRanker(g) {
|
|
networkSimplex(g);
|
|
}
|
|
|
|
},{"./feasible-tree":25,"./network-simplex":27,"./util":28}],27:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash"),
|
|
feasibleTree = require("./feasible-tree"),
|
|
slack = require("./util").slack,
|
|
initRank = require("./util").longestPath,
|
|
preorder = require("../graphlib").alg.preorder,
|
|
postorder = require("../graphlib").alg.postorder,
|
|
simplify = require("../util").simplify;
|
|
|
|
module.exports = networkSimplex;
|
|
|
|
// Expose some internals for testing purposes
|
|
networkSimplex.initLowLimValues = initLowLimValues;
|
|
networkSimplex.initCutValues = initCutValues;
|
|
networkSimplex.calcCutValue = calcCutValue;
|
|
networkSimplex.leaveEdge = leaveEdge;
|
|
networkSimplex.enterEdge = enterEdge;
|
|
networkSimplex.exchangeEdges = exchangeEdges;
|
|
|
|
/*
|
|
* The network simplex algorithm assigns ranks to each node in the input graph
|
|
* and iteratively improves the ranking to reduce the length of edges.
|
|
*
|
|
* Preconditions:
|
|
*
|
|
* 1. The input graph must be a DAG.
|
|
* 2. All nodes in the graph must have an object value.
|
|
* 3. All edges in the graph must have "minlen" and "weight" attributes.
|
|
*
|
|
* Postconditions:
|
|
*
|
|
* 1. All nodes in the graph will have an assigned "rank" attribute that has
|
|
* been optimized by the network simplex algorithm. Ranks start at 0.
|
|
*
|
|
*
|
|
* A rough sketch of the algorithm is as follows:
|
|
*
|
|
* 1. Assign initial ranks to each node. We use the longest path algorithm,
|
|
* which assigns ranks to the lowest position possible. In general this
|
|
* leads to very wide bottom ranks and unnecessarily long edges.
|
|
* 2. Construct a feasible tight tree. A tight tree is one such that all
|
|
* edges in the tree have no slack (difference between length of edge
|
|
* and minlen for the edge). This by itself greatly improves the assigned
|
|
* rankings by shorting edges.
|
|
* 3. Iteratively find edges that have negative cut values. Generally a
|
|
* negative cut value indicates that the edge could be removed and a new
|
|
* tree edge could be added to produce a more compact graph.
|
|
*
|
|
* Much of the algorithms here are derived from Gansner, et al., "A Technique
|
|
* for Drawing Directed Graphs." The structure of the file roughly follows the
|
|
* structure of the overall algorithm.
|
|
*/
|
|
function networkSimplex(g) {
|
|
g = simplify(g);
|
|
initRank(g);
|
|
var t = feasibleTree(g);
|
|
initLowLimValues(t);
|
|
initCutValues(t, g);
|
|
|
|
var e, f;
|
|
while ((e = leaveEdge(t))) {
|
|
f = enterEdge(t, g, e);
|
|
exchangeEdges(t, g, e, f);
|
|
}
|
|
}
|
|
|
|
/*
|
|
* Initializes cut values for all edges in the tree.
|
|
*/
|
|
function initCutValues(t, g) {
|
|
var vs = postorder(t, t.nodes());
|
|
vs = vs.slice(0, vs.length - 1);
|
|
_.each(vs, function(v) {
|
|
assignCutValue(t, g, v);
|
|
});
|
|
}
|
|
|
|
function assignCutValue(t, g, child) {
|
|
var childLab = t.node(child),
|
|
parent = childLab.parent;
|
|
t.edge(child, parent).cutvalue = calcCutValue(t, g, child);
|
|
}
|
|
|
|
/*
|
|
* Given the tight tree, its graph, and a child in the graph calculate and
|
|
* return the cut value for the edge between the child and its parent.
|
|
*/
|
|
function calcCutValue(t, g, child) {
|
|
var childLab = t.node(child),
|
|
parent = childLab.parent,
|
|
// True if the child is on the tail end of the edge in the directed graph
|
|
childIsTail = true,
|
|
// The graph's view of the tree edge we're inspecting
|
|
graphEdge = g.edge(child, parent),
|
|
// The accumulated cut value for the edge between this node and its parent
|
|
cutValue = 0;
|
|
|
|
if (!graphEdge) {
|
|
childIsTail = false;
|
|
graphEdge = g.edge(parent, child);
|
|
}
|
|
|
|
cutValue = graphEdge.weight;
|
|
|
|
_.each(g.nodeEdges(child), function(e) {
|
|
var isOutEdge = e.v === child,
|
|
other = isOutEdge ? e.w : e.v;
|
|
|
|
if (other !== parent) {
|
|
var pointsToHead = isOutEdge === childIsTail,
|
|
otherWeight = g.edge(e).weight;
|
|
|
|
cutValue += pointsToHead ? otherWeight : -otherWeight;
|
|
if (isTreeEdge(t, child, other)) {
|
|
var otherCutValue = t.edge(child, other).cutvalue;
|
|
cutValue += pointsToHead ? -otherCutValue : otherCutValue;
|
|
}
|
|
}
|
|
});
|
|
|
|
return cutValue;
|
|
}
|
|
|
|
function initLowLimValues(tree, root) {
|
|
if (arguments.length < 2) {
|
|
root = tree.nodes()[0];
|
|
}
|
|
dfsAssignLowLim(tree, {}, 1, root);
|
|
}
|
|
|
|
function dfsAssignLowLim(tree, visited, nextLim, v, parent) {
|
|
var low = nextLim,
|
|
label = tree.node(v);
|
|
|
|
visited[v] = true;
|
|
_.each(tree.neighbors(v), function(w) {
|
|
if (!_.has(visited, w)) {
|
|
nextLim = dfsAssignLowLim(tree, visited, nextLim, w, v);
|
|
}
|
|
});
|
|
|
|
label.low = low;
|
|
label.lim = nextLim++;
|
|
if (parent) {
|
|
label.parent = parent;
|
|
} else {
|
|
// TODO should be able to remove this when we incrementally update low lim
|
|
delete label.parent;
|
|
}
|
|
|
|
return nextLim;
|
|
}
|
|
|
|
function leaveEdge(tree) {
|
|
return _.find(tree.edges(), function(e) {
|
|
return tree.edge(e).cutvalue < 0;
|
|
});
|
|
}
|
|
|
|
function enterEdge(t, g, edge) {
|
|
var v = edge.v,
|
|
w = edge.w;
|
|
|
|
// For the rest of this function we assume that v is the tail and w is the
|
|
// head, so if we don't have this edge in the graph we should flip it to
|
|
// match the correct orientation.
|
|
if (!g.hasEdge(v, w)) {
|
|
v = edge.w;
|
|
w = edge.v;
|
|
}
|
|
|
|
var vLabel = t.node(v),
|
|
wLabel = t.node(w),
|
|
tailLabel = vLabel,
|
|
flip = false;
|
|
|
|
// If the root is in the tail of the edge then we need to flip the logic that
|
|
// checks for the head and tail nodes in the candidates function below.
|
|
if (vLabel.lim > wLabel.lim) {
|
|
tailLabel = wLabel;
|
|
flip = true;
|
|
}
|
|
|
|
var candidates = _.filter(g.edges(), function(edge) {
|
|
return flip === isDescendant(t, t.node(edge.v), tailLabel) &&
|
|
flip !== isDescendant(t, t.node(edge.w), tailLabel);
|
|
});
|
|
|
|
return _.min(candidates, function(edge) { return slack(g, edge); });
|
|
}
|
|
|
|
function exchangeEdges(t, g, e, f) {
|
|
var v = e.v,
|
|
w = e.w;
|
|
t.removeEdge(v, w);
|
|
t.setEdge(f.v, f.w, {});
|
|
initLowLimValues(t);
|
|
initCutValues(t, g);
|
|
updateRanks(t, g);
|
|
}
|
|
|
|
function updateRanks(t, g) {
|
|
var root = _.find(t.nodes(), function(v) { return !g.node(v).parent; }),
|
|
vs = preorder(t, root);
|
|
vs = vs.slice(1);
|
|
_.each(vs, function(v) {
|
|
var parent = t.node(v).parent,
|
|
edge = g.edge(v, parent),
|
|
flipped = false;
|
|
|
|
if (!edge) {
|
|
edge = g.edge(parent, v);
|
|
flipped = true;
|
|
}
|
|
|
|
g.node(v).rank = g.node(parent).rank + (flipped ? edge.minlen : -edge.minlen);
|
|
});
|
|
}
|
|
|
|
/*
|
|
* Returns true if the edge is in the tree.
|
|
*/
|
|
function isTreeEdge(tree, u, v) {
|
|
return tree.hasEdge(u, v);
|
|
}
|
|
|
|
/*
|
|
* Returns true if the specified node is descendant of the root node per the
|
|
* assigned low and lim attributes in the tree.
|
|
*/
|
|
function isDescendant(tree, vLabel, rootLabel) {
|
|
return rootLabel.low <= vLabel.lim && vLabel.lim <= rootLabel.lim;
|
|
}
|
|
|
|
},{"../graphlib":7,"../lodash":10,"../util":29,"./feasible-tree":25,"./util":28}],28:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("../lodash");
|
|
|
|
module.exports = {
|
|
longestPath: longestPath,
|
|
slack: slack
|
|
};
|
|
|
|
/*
|
|
* Initializes ranks for the input graph using the longest path algorithm. This
|
|
* algorithm scales well and is fast in practice, it yields rather poor
|
|
* solutions. Nodes are pushed to the lowest layer possible, leaving the bottom
|
|
* ranks wide and leaving edges longer than necessary. However, due to its
|
|
* speed, this algorithm is good for getting an initial ranking that can be fed
|
|
* into other algorithms.
|
|
*
|
|
* This algorithm does not normalize layers because it will be used by other
|
|
* algorithms in most cases. If using this algorithm directly, be sure to
|
|
* run normalize at the end.
|
|
*
|
|
* Pre-conditions:
|
|
*
|
|
* 1. Input graph is a DAG.
|
|
* 2. Input graph node labels can be assigned properties.
|
|
*
|
|
* Post-conditions:
|
|
*
|
|
* 1. Each node will be assign an (unnormalized) "rank" property.
|
|
*/
|
|
function longestPath(g) {
|
|
var visited = {};
|
|
|
|
function dfs(v) {
|
|
var label = g.node(v);
|
|
if (_.has(visited, v)) {
|
|
return label.rank;
|
|
}
|
|
visited[v] = true;
|
|
|
|
var rank = _.min(_.map(g.outEdges(v), function(e) {
|
|
return dfs(e.w) - g.edge(e).minlen;
|
|
}));
|
|
|
|
if (rank === Number.POSITIVE_INFINITY) {
|
|
rank = 0;
|
|
}
|
|
|
|
return (label.rank = rank);
|
|
}
|
|
|
|
_.each(g.sources(), dfs);
|
|
}
|
|
|
|
/*
|
|
* Returns the amount of slack for the given edge. The slack is defined as the
|
|
* difference between the length of the edge and its minimum length.
|
|
*/
|
|
function slack(g, e) {
|
|
return g.node(e.w).rank - g.node(e.v).rank - g.edge(e).minlen;
|
|
}
|
|
|
|
},{"../lodash":10}],29:[function(require,module,exports){
|
|
"use strict";
|
|
|
|
var _ = require("./lodash"),
|
|
Graph = require("./graphlib").Graph;
|
|
|
|
module.exports = {
|
|
addDummyNode: addDummyNode,
|
|
simplify: simplify,
|
|
asNonCompoundGraph: asNonCompoundGraph,
|
|
successorWeights: successorWeights,
|
|
predecessorWeights: predecessorWeights,
|
|
intersectRect: intersectRect,
|
|
buildLayerMatrix: buildLayerMatrix,
|
|
normalizeRanks: normalizeRanks,
|
|
removeEmptyRanks: removeEmptyRanks,
|
|
addBorderNode: addBorderNode,
|
|
maxRank: maxRank,
|
|
partition: partition,
|
|
time: time,
|
|
notime: notime
|
|
};
|
|
|
|
/*
|
|
* Adds a dummy node to the graph and return v.
|
|
*/
|
|
function addDummyNode(g, type, attrs, name) {
|
|
var v;
|
|
do {
|
|
v = _.uniqueId(name);
|
|
} while (g.hasNode(v));
|
|
|
|
attrs.dummy = type;
|
|
g.setNode(v, attrs);
|
|
return v;
|
|
}
|
|
|
|
/*
|
|
* Returns a new graph with only simple edges. Handles aggregation of data
|
|
* associated with multi-edges.
|
|
*/
|
|
function simplify(g) {
|
|
var simplified = new Graph().setGraph(g.graph());
|
|
_.each(g.nodes(), function(v) { simplified.setNode(v, g.node(v)); });
|
|
_.each(g.edges(), function(e) {
|
|
var simpleLabel = simplified.edge(e.v, e.w) || { weight: 0, minlen: 1 },
|
|
label = g.edge(e);
|
|
simplified.setEdge(e.v, e.w, {
|
|
weight: simpleLabel.weight + label.weight,
|
|
minlen: Math.max(simpleLabel.minlen, label.minlen)
|
|
});
|
|
});
|
|
return simplified;
|
|
}
|
|
|
|
function asNonCompoundGraph(g) {
|
|
var simplified = new Graph({ multigraph: g.isMultigraph() }).setGraph(g.graph());
|
|
_.each(g.nodes(), function(v) {
|
|
if (!g.children(v).length) {
|
|
simplified.setNode(v, g.node(v));
|
|
}
|
|
});
|
|
_.each(g.edges(), function(e) {
|
|
simplified.setEdge(e, g.edge(e));
|
|
});
|
|
return simplified;
|
|
}
|
|
|
|
function successorWeights(g) {
|
|
var weightMap = _.map(g.nodes(), function(v) {
|
|
var sucs = {};
|
|
_.each(g.outEdges(v), function(e) {
|
|
sucs[e.w] = (sucs[e.w] || 0) + g.edge(e).weight;
|
|
});
|
|
return sucs;
|
|
});
|
|
return _.zipObject(g.nodes(), weightMap);
|
|
}
|
|
|
|
function predecessorWeights(g) {
|
|
var weightMap = _.map(g.nodes(), function(v) {
|
|
var preds = {};
|
|
_.each(g.inEdges(v), function(e) {
|
|
preds[e.v] = (preds[e.v] || 0) + g.edge(e).weight;
|
|
});
|
|
return preds;
|
|
});
|
|
return _.zipObject(g.nodes(), weightMap);
|
|
}
|
|
|
|
/*
|
|
* Finds where a line starting at point ({x, y}) would intersect a rectangle
|
|
* ({x, y, width, height}) if it were pointing at the rectangle's center.
|
|
*/
|
|
function intersectRect(rect, point) {
|
|
var x = rect.x;
|
|
var y = rect.y;
|
|
|
|
// Rectangle intersection algorithm from:
|
|
// http://math.stackexchange.com/questions/108113/find-edge-between-two-boxes
|
|
var dx = point.x - x;
|
|
var dy = point.y - y;
|
|
var w = rect.width / 2;
|
|
var h = rect.height / 2;
|
|
|
|
if (!dx && !dy) {
|
|
throw new Error("Not possible to find intersection inside of the rectangle");
|
|
}
|
|
|
|
var sx, sy;
|
|
if (Math.abs(dy) * w > Math.abs(dx) * h) {
|
|
// Intersection is top or bottom of rect.
|
|
if (dy < 0) {
|
|
h = -h;
|
|
}
|
|
sx = h * dx / dy;
|
|
sy = h;
|
|
} else {
|
|
// Intersection is left or right of rect.
|
|
if (dx < 0) {
|
|
w = -w;
|
|
}
|
|
sx = w;
|
|
sy = w * dy / dx;
|
|
}
|
|
|
|
return { x: x + sx, y: y + sy };
|
|
}
|
|
|
|
/*
|
|
* Given a DAG with each node assigned "rank" and "order" properties, this
|
|
* function will produce a matrix with the ids of each node.
|
|
*/
|
|
function buildLayerMatrix(g) {
|
|
var layering = _.map(_.range(maxRank(g) + 1), function() { return []; });
|
|
_.each(g.nodes(), function(v) {
|
|
var node = g.node(v),
|
|
rank = node.rank;
|
|
if (!_.isUndefined(rank)) {
|
|
layering[rank][node.order] = v;
|
|
}
|
|
});
|
|
return layering;
|
|
}
|
|
|
|
/*
|
|
* Adjusts the ranks for all nodes in the graph such that all nodes v have
|
|
* rank(v) >= 0 and at least one node w has rank(w) = 0.
|
|
*/
|
|
function normalizeRanks(g) {
|
|
var min = _.min(_.map(g.nodes(), function(v) { return g.node(v).rank; }));
|
|
_.each(g.nodes(), function(v) {
|
|
var node = g.node(v);
|
|
if (_.has(node, "rank")) {
|
|
node.rank -= min;
|
|
}
|
|
});
|
|
}
|
|
|
|
function removeEmptyRanks(g) {
|
|
// Ranks may not start at 0, so we need to offset them
|
|
var offset = _.min(_.map(g.nodes(), function(v) { return g.node(v).rank; }));
|
|
|
|
var layers = [];
|
|
_.each(g.nodes(), function(v) {
|
|
var rank = g.node(v).rank - offset;
|
|
if (!_.has(layers, rank)) {
|
|
layers[rank] = [];
|
|
}
|
|
layers[rank].push(v);
|
|
});
|
|
|
|
var delta = 0,
|
|
nodeRankFactor = g.graph().nodeRankFactor;
|
|
_.each(layers, function(vs, i) {
|
|
if (_.isUndefined(vs) && i % nodeRankFactor !== 0) {
|
|
--delta;
|
|
} else if (delta) {
|
|
_.each(vs, function(v) { g.node(v).rank += delta; });
|
|
}
|
|
});
|
|
}
|
|
|
|
function addBorderNode(g, prefix, rank, order) {
|
|
var node = {
|
|
width: 0,
|
|
height: 0
|
|
};
|
|
if (arguments.length >= 4) {
|
|
node.rank = rank;
|
|
node.order = order;
|
|
}
|
|
return addDummyNode(g, "border", node, prefix);
|
|
}
|
|
|
|
function maxRank(g) {
|
|
return _.max(_.map(g.nodes(), function(v) {
|
|
var rank = g.node(v).rank;
|
|
if (!_.isUndefined(rank)) {
|
|
return rank;
|
|
}
|
|
}));
|
|
}
|
|
|
|
/*
|
|
* Partition a collection into two groups: `lhs` and `rhs`. If the supplied
|
|
* function returns true for an entry it goes into `lhs`. Otherwise it goes
|
|
* into `rhs.
|
|
*/
|
|
function partition(collection, fn) {
|
|
var result = { lhs: [], rhs: [] };
|
|
_.each(collection, function(value) {
|
|
if (fn(value)) {
|
|
result.lhs.push(value);
|
|
} else {
|
|
result.rhs.push(value);
|
|
}
|
|
});
|
|
return result;
|
|
}
|
|
|
|
/*
|
|
* Returns a new function that wraps `fn` with a timer. The wrapper logs the
|
|
* time it takes to execute the function.
|
|
*/
|
|
function time(name, fn) {
|
|
var start = _.now();
|
|
try {
|
|
return fn();
|
|
} finally {
|
|
console.log(name + " time: " + (_.now() - start) + "ms");
|
|
}
|
|
}
|
|
|
|
function notime(name, fn) {
|
|
return fn();
|
|
}
|
|
|
|
},{"./graphlib":7,"./lodash":10}],30:[function(require,module,exports){
|
|
module.exports = "0.6.4";
|
|
|
|
},{}]},{},[1])(1)
|
|
}); |