Bug 1032129 - Render AudioParam connections in web audio editor. r=vp

This commit is contained in:
Jordan Santell 2014-08-18 14:36:00 -04:00
parent aff21983d3
commit 8b097fa5a0
7 changed files with 225 additions and 42 deletions

View File

@ -9,6 +9,7 @@ support-files =
doc_destroy-nodes.html
doc_connect-toggle.html
doc_connect-param.html
doc_connect-multi-param.html
440hz_sine.ogg
head.js
@ -33,6 +34,7 @@ support-files =
[browser_wa_graph-render-01.js]
[browser_wa_graph-render-02.js]
[browser_wa_graph-render-03.js]
[browser_wa_graph-render-04.js]
[browser_wa_graph-markers.js]
[browser_wa_graph-selected.js]
[browser_wa_graph-zoom.js]

View File

@ -0,0 +1,40 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/**
* Tests audio param connection rendering.
*/
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(CONNECT_MULTI_PARAM_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS } = panelWin;
let started = once(gFront, "start-context");
reload(target);
let [actors] = yield Promise.all([
getN(gFront, "create-node", 5),
waitForGraphRendered(panelWin, 5, 2, 3)
]);
let nodeIDs = actors.map(actor => actor.actorID);
let [, carrier, gain, mod1, mod2] = nodeIDs;
let edges = [
[mod1, gain, "gain", "mod1 -> gain[gain]"],
[mod2, carrier, "frequency", "mod2 -> carrier[frequency]"],
[mod2, carrier, "detune", "mod2 -> carrier[detune]"]
];
edges.forEach(([source, target, param, msg], i) => {
let edge = findGraphEdge(panelWin, source, target, param);
ok(edge.classList.contains("param-connection"), "edge is classified as a param-connection");
});
yield teardown(panel);
finish();
}

View File

@ -0,0 +1,32 @@
<!-- Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ -->
<!doctype html>
<html>
<head>
<meta charset="utf-8"/>
<title>Web Audio Editor test page</title>
</head>
<body>
<script type="text/javascript;version=1.8">
"use strict";
let ctx = new AudioContext();
let carrier = ctx.createOscillator();
let gain = ctx.createGain();
let modulator = ctx.createOscillator();
let modulator2 = ctx.createOscillator();
carrier.connect(gain);
gain.connect(ctx.destination);
modulator.connect(gain.gain);
modulator2.connect(carrier.frequency);
modulator2.connect(carrier.detune);
modulator.start(0);
modulator2.start(0);
carrier.start(0);
</script>
</body>
</html>

View File

@ -29,6 +29,7 @@ const BUFFER_AND_ARRAY_URL = EXAMPLE_URL + "doc_buffer-and-array.html";
const DESTROY_NODES_URL = EXAMPLE_URL + "doc_destroy-nodes.html";
const CONNECT_TOGGLE_URL = EXAMPLE_URL + "doc_connect-toggle.html";
const CONNECT_PARAM_URL = EXAMPLE_URL + "doc_connect-param.html";
const CONNECT_MULTI_PARAM_URL = EXAMPLE_URL + "doc_connect-multi-param.html";
// All tests are asynchronous.
waitForExplicitFinish();
@ -204,11 +205,15 @@ function getNSpread (front, eventName, count) { return getN(front, eventName, co
* resolves when the graph was rendered with the correct count of
* nodes and edges.
*/
function waitForGraphRendered (front, nodeCount, edgeCount) {
function waitForGraphRendered (front, nodeCount, edgeCount, paramEdgeCount) {
let deferred = Promise.defer();
let eventName = front.EVENTS.UI_GRAPH_RENDERED;
front.on(eventName, function onGraphRendered (_, nodes, edges) {
if (nodes === nodeCount && edges === edgeCount) {
front.on(eventName, function onGraphRendered (_, nodes, edges, pEdges) {
info(nodes);
info(edges)
info(pEdges);
let paramEdgesDone = paramEdgeCount ? paramEdgeCount === pEdges : true;
if (nodes === nodeCount && edges === edgeCount && paramEdgesDone) {
front.off(eventName, onGraphRendered);
deferred.resolve();
}
@ -290,8 +295,11 @@ function modifyVariableView (win, view, index, prop, value) {
return deferred.promise;
}
function findGraphEdge (win, source, target) {
function findGraphEdge (win, source, target, param) {
let selector = ".edgePaths .edgePath[data-source='" + source + "'][data-target='" + target + "']";
if (param) {
selector += "[data-param='" + param + "']";
}
return win.document.querySelector(selector);
}

View File

@ -64,7 +64,8 @@ const EVENTS = {
// When the Audio Context graph finishes rendering.
// Is called with two arguments, first representing number of nodes
// rendered, second being the number of edges rendered.
// rendered, second being the number of edge connections rendering (not counting
// param edges), followed by the count of the param edges rendered.
UI_GRAPH_RENDERED: "WebAudioEditor:UIGraphRendered"
};
@ -77,8 +78,8 @@ let gToolbox, gTarget, gFront;
* Track an array of audio nodes
*/
let AudioNodes = [];
let AudioNodeConnections = new WeakMap();
let AudioNodeConnections = new WeakMap(); // <AudioNodeView, Set<AudioNodeView>>
let AudioParamConnections = new WeakMap(); // <AudioNodeView, Object>
// Light representation wrapping an AudioNode actor with additional properties
function AudioNodeView (actor) {
@ -109,9 +110,27 @@ AudioNodeView.prototype.connect = function (destination) {
return false;
};
// Helper method to create connections in the AudioNodeConnections
// WeakMap for rendering. Returns a boolean indicating
// if the connection was successfully created. Will return `false`
// when the connection was previously made.
AudioNodeView.prototype.connectParam = function (destination, param) {
let connections = AudioParamConnections.get(this) || {};
AudioParamConnections.set(this, connections);
let params = connections[destination.id] = connections[destination.id] || [];
if (!~params.indexOf(param)) {
params.push(param);
return true;
}
return false;
};
// Helper method to remove audio connections from the current AudioNodeView
AudioNodeView.prototype.disconnect = function () {
AudioNodeConnections.set(this, new Set());
AudioParamConnections.set(this, {});
};
// Returns a promise that resolves to an array of objects containing
@ -159,6 +178,7 @@ let WebAudioEditorController = {
gFront.on("start-context", this._onStartContext);
gFront.on("create-node", this._onCreateNode);
gFront.on("connect-node", this._onConnectNode);
gFront.on("connect-param", this._onConnectParam);
gFront.on("disconnect-node", this._onDisconnectNode);
gFront.on("change-param", this._onChangeParam);
gFront.on("destroy-node", this._onDestroyNode);
@ -173,6 +193,7 @@ let WebAudioEditorController = {
window.on(EVENTS.CONNECT_NODE, this._onUpdatedContext);
window.on(EVENTS.DISCONNECT_NODE, this._onUpdatedContext);
window.on(EVENTS.DESTROY_NODE, this._onUpdatedContext);
window.on(EVENTS.CONNECT_PARAM, this._onUpdatedContext);
},
/**
@ -185,6 +206,7 @@ let WebAudioEditorController = {
gFront.off("start-context", this._onStartContext);
gFront.off("create-node", this._onCreateNode);
gFront.off("connect-node", this._onConnectNode);
gFront.off("connect-param", this._onConnectParam);
gFront.off("disconnect-node", this._onDisconnectNode);
gFront.off("change-param", this._onChangeParam);
gFront.off("destroy-node", this._onDestroyNode);
@ -192,6 +214,7 @@ let WebAudioEditorController = {
window.off(EVENTS.CONNECT_NODE, this._onUpdatedContext);
window.off(EVENTS.DISCONNECT_NODE, this._onUpdatedContext);
window.off(EVENTS.DESTROY_NODE, this._onUpdatedContext);
window.off(EVENTS.CONNECT_PARAM, this._onUpdatedContext);
gDevTools.off("pref-changed", this._onThemeChange);
},
@ -291,38 +314,22 @@ let WebAudioEditorController = {
* Called when a node is connected to another node.
*/
_onConnectNode: Task.async(function* ({ source: sourceActor, dest: destActor }) {
// Since node create and connect are probably executed back to back,
// and the controller's `_onCreateNode` needs to look up type,
// the edge creation could be called before the graph node is actually
// created. This way, we can check and listen for the event before
// adding an edge.
let [source, dest] = yield waitForNodeCreation(sourceActor, destActor);
// Connect nodes, and only emit if it's a new connection.
if (source.connect(dest)) {
window.emit(EVENTS.CONNECT_NODE, source.id, dest.id);
}
}),
function waitForNodeCreation (sourceActor, destActor) {
let deferred = defer();
let source = getViewNodeByActor(sourceActor);
let dest = getViewNodeByActor(destActor);
/**
* Called when a node is conneceted to another node's AudioParam.
*/
_onConnectParam: Task.async(function* ({ source: sourceActor, dest: destActor, param }) {
let [source, dest] = yield waitForNodeCreation(sourceActor, destActor);
if (!source || !dest)
window.on(EVENTS.CREATE_NODE, function createNodeListener (_, id) {
let createdNode = getViewNodeById(id);
if (equalActors(sourceActor, createdNode.actor))
source = createdNode;
if (equalActors(destActor, createdNode.actor))
dest = createdNode;
if (source && dest) {
window.off(EVENTS.CREATE_NODE, createNodeListener);
deferred.resolve([source, dest]);
}
});
else
deferred.resolve([source, dest]);
return deferred.promise;
if (source.connectParam(dest, param)) {
window.emit(EVENTS.CONNECT_PARAM, source.id, dest.id, param);
}
}),
@ -379,3 +386,31 @@ function getViewNodeByActor (actor) {
function getViewNodeById (id) {
return getViewNodeByActor({ actorID: id });
}
// Since node create and connect are probably executed back to back,
// and the controller's `_onCreateNode` needs to look up type,
// the edge creation could be called before the graph node is actually
// created. This way, we can check and listen for the event before
// adding an edge.
function waitForNodeCreation (sourceActor, destActor) {
let deferred = defer();
let eventName = EVENTS.CREATE_NODE;
let source = getViewNodeByActor(sourceActor);
let dest = getViewNodeByActor(destActor);
if (!source || !dest)
window.on(eventName, function createNodeListener (_, id) {
let createdNode = getViewNodeById(id);
if (equalActors(sourceActor, createdNode.actor))
source = createdNode;
if (equalActors(destActor, createdNode.actor))
dest = createdNode;
if (source && dest) {
window.off(eventName, createNodeListener);
deferred.resolve([source, dest]);
}
});
else
deferred.resolve([source, dest]);
return deferred.promise;
}

View File

@ -144,14 +144,18 @@ let WebAudioGraphView = {
/**
* `draw` renders the ViewNodes currently available in `AudioNodes` with `AudioNodeConnections`,
* and is throttled to be called at most every `GRAPH_DEBOUNCE_TIMER` milliseconds. Is called
* whenever the audio context routing changes, after being debounced.
* and `AudioParamConnections` and is throttled to be called at most every
* `GRAPH_DEBOUNCE_TIMER` milliseconds. Is called whenever the audio context routing changes,
* after being debounced.
*/
draw: function () {
// Clear out previous SVG information
this.clearGraph();
let graph = new dagreD3.Digraph();
// An array of duples/tuples of pairs [sourceNode, destNode, param].
// `param` is optional, indicating a connection to an AudioParam, rather than
// an other AudioNode.
let edges = [];
AudioNodes.forEach(node => {
@ -166,12 +170,30 @@ let WebAudioGraphView = {
// after all the nodes are added, otherwise edges will attempted to be created
// for nodes that have not yet been added
AudioNodeConnections.get(node, new Set()).forEach(dest => edges.push([node, dest]));
let paramConnections = AudioParamConnections.get(node, {});
Object.keys(paramConnections).forEach(destId => {
let dest = getViewNodeById(destId);
let connections = paramConnections[destId] || [];
connections.forEach(param => edges.push([node, dest, param]));
});
});
edges.forEach(([node, dest]) => graph.addEdge(null, node.id, dest.id, {
source: node.id,
target: dest.id
}));
edges.forEach(([node, dest, param]) => {
let options = {
source: node.id,
target: dest.id
};
// Only add `label` if `param` specified, as this is an AudioParam connection then.
// `label` adds the magic to render with dagre-d3, and `param` is just more explicitly
// the param, ignoring implementation details.
if (param) {
options.label = param;
options.param = param;
}
graph.addEdge(null, node.id, dest.id, options);
});
let renderer = new dagreD3.Renderer();
@ -191,18 +213,33 @@ let WebAudioGraphView = {
});
// Post-render manipulation of edges
// TODO do all of this more efficiently, rather than
// using the direct D3 helper utilities to loop over each
// edge several times
let oldDrawEdgePaths = renderer.drawEdgePaths();
renderer.drawEdgePaths(function(graph, root) {
let svgNodes = oldDrawEdgePaths(graph, root);
svgNodes.attr("data-source", (n) => {
let svgEdges = oldDrawEdgePaths(graph, root);
svgEdges.attr("data-source", (n) => {
let edge = graph.edge(n);
return edge.source;
});
svgNodes.attr("data-target", (n) => {
svgEdges.attr("data-target", (n) => {
let edge = graph.edge(n);
return edge.target;
});
return svgNodes;
svgEdges.attr("data-param", (n) => {
let edge = graph.edge(n);
return edge.param ? edge.param : null;
});
// We have to manually specify the default classes on the edges
// as to not overwrite them
let defaultClasses = "edgePath enter";
svgEdges.attr("class", (n) => {
let edge = graph.edge(n);
return defaultClasses + (edge.param ? (" param-connection " + edge.param) : "");
});
return svgEdges;
});
// Override Dagre-d3's post render function by passing in our own.
@ -240,7 +277,8 @@ let WebAudioGraphView = {
}
// Fire an event upon completed rendering
window.emit(EVENTS.UI_GRAPH_RENDERED, AudioNodes.length, edges.length);
let paramEdgeCount = edges.filter(p => !!p[2]).length;
window.emit(EVENTS.UI_GRAPH_RENDERED, AudioNodes.length, edges.length - paramEdgeCount, paramEdgeCount);
});
let layout = dagreD3.layout().rankDir("LR");

View File

@ -53,6 +53,34 @@ svg {
stroke: #aaaaaa; /* Splitters */
}
/* AudioParam connection edges */
g.edgePath.param-connection {
stroke-dasharray: 5,5;
}
.theme-dark .edgePath.param-connection path {
stroke: #b6babf; /* Grey foreground text */
}
.theme-light .edgePath.param-connection path {
stroke: #aaaaaa; /* Splitters */
}
/* Labels in AudioParam connection should have background that match
* the main background so there's whitespace around the label, on top of the
* dotted lines. */
.theme-dark g.edgeLabel rect {
fill: #14171a;
}
.theme-light g.edgeLabel rect {
fill: #fcfcfc; /* Background - Editor */
}
.theme-dark g.edgeLabel tspan {
fill: #b6babf; /* Grey foreground text */
}
.theme-light g.edgeLabel tspan {
fill: #585959; /* Grey foreground text */
}
/* Audio Nodes */
.nodes rect {
stroke-width: 1px;