Merge fx-team to m-c. a=merge

This commit is contained in:
Ryan VanderMeulen 2014-09-24 15:58:32 -04:00
commit 81d60684ec
108 changed files with 2707 additions and 2479 deletions

View File

@ -1222,9 +1222,9 @@ var gBrowserInit = {
// Delay this a minute because there's no rush
setTimeout(() => {
this.gmpInstallManager = new GMPInstallManager();
// We don't really care about the results, if somenoe is interested they
// We don't really care about the results, if someone is interested they
// can check the log.
this.gmpInstallManager.simpleCheckAndInstall();
this.gmpInstallManager.simpleCheckAndInstall().then(null, () => {});
}, 1000 * 60);
SessionStore.promiseInitialized.then(() => {

View File

@ -316,7 +316,6 @@ skip-if = e10s # Bug ?????? - test directly manipulates content (tries to grab a
[browser_fullscreen-window-open.js]
skip-if = buildapp == 'mulet' || e10s || os == "linux" # Bug 933103 - mochitest's EventUtils.synthesizeMouse functions not e10s friendly. Linux: Intermittent failures - bug 941575.
[browser_fxa_oauth.js]
skip-if = e10s
[browser_gestureSupport.js]
skip-if = e10s # Bug 863514 - no gesture support.
[browser_getshortcutoruri.js]

View File

@ -1,7 +1,7 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
function test() {
waitForExplicitFinish();
@ -206,5 +206,53 @@ let tests = [
notification.remove();
goNext();
}
},
// panel updates should fire the showing and shown callbacks again.
{ id: "Test#11",
run: function() {
this.notifyObj = new BasicNotification(this.id);
this.notification = showNotification(this.notifyObj);
},
onShown: function (popup) {
checkPopup(popup, this.notifyObj);
this.notifyObj.showingCallbackTriggered = false;
this.notifyObj.shownCallbackTriggered = false;
// Force an update of the panel. This is typically called
// automatically when receiving 'activate' or 'TabSelect' events,
// but from a setTimeout, which is inconvenient for the test.
PopupNotifications._update();
checkPopup(popup, this.notifyObj);
this.notification.remove();
},
onHidden: function() { }
},
// A first dismissed notification shouldn't stop _update from showing a second notification
{ id: "Test#12",
run: function () {
this.notifyObj1 = new BasicNotification(this.id);
this.notifyObj1.id += "_1";
this.notifyObj1.anchorID = "default-notification-icon";
this.notifyObj1.options.dismissed = true;
this.notification1 = showNotification(this.notifyObj1);
this.notifyObj2 = new BasicNotification(this.id);
this.notifyObj2.id += "_2";
this.notifyObj2.anchorID = "geo-notification-icon";
this.notifyObj2.options.dismissed = true;
this.notification2 = showNotification(this.notifyObj2);
this.notification2.dismissed = false;
PopupNotifications._update();
},
onShown: function (popup) {
checkPopup(popup, this.notifyObj2);
this.notification1.remove();
this.notification2.remove();
},
onHidden: function(popup) { }
}
];

View File

@ -1379,6 +1379,6 @@ this.MozLoopService = {
*/
hawkRequest: function(sessionType, path, method, payloadObj) {
return MozLoopServiceInternal.hawkRequest(sessionType, path, method, payloadObj).catch(
error => {this._hawkRequestError(error);});
error => {MozLoopServiceInternal._hawkRequestError(error);});
},
};

View File

@ -27,13 +27,11 @@
<script type="text/javascript" src="loop/shared/js/utils.js"></script>
<script type="text/javascript" src="loop/shared/js/models.js"></script>
<script type="text/javascript" src="loop/shared/js/router.js"></script>
<script type="text/javascript" src="loop/shared/js/mixins.js"></script>
<script type="text/javascript" src="loop/shared/js/views.js"></script>
<script type="text/javascript" src="loop/shared/js/feedbackApiClient.js"></script>
<script type="text/javascript" src="loop/shared/js/websocket.js"></script>
<script type="text/javascript" src="loop/js/client.js"></script>
<script type="text/javascript" src="loop/js/desktopRouter.js"></script>
<script type="text/javascript" src="loop/js/conversation.js"></script>
</body>
</html>

View File

@ -8,16 +8,11 @@
/* global loop:true, React */
var loop = loop || {};
loop.conversation = (function(OT, mozL10n) {
loop.conversation = (function(mozL10n) {
"use strict";
var sharedViews = loop.shared.views;
/**
* App router.
* @type {loop.desktopRouter.DesktopConversationRouter}
*/
var router;
var sharedViews = loop.shared.views,
sharedModels = loop.shared.models;
var IncomingCallView = React.createClass({displayName: 'IncomingCallView',
@ -200,92 +195,183 @@ loop.conversation = (function(OT, mozL10n) {
});
/**
* Conversation router.
* This view manages the incoming conversation views - from
* call initiation through to the actual conversation and call end.
*
* Required options:
* - {loop.shared.models.ConversationModel} conversation Conversation model.
* - {loop.shared.models.NotificationCollection} notifications
*
* @type {loop.shared.router.BaseConversationRouter}
* At the moment, it does more than that, these parts need refactoring out.
*/
var ConversationRouter = loop.desktopRouter.DesktopConversationRouter.extend({
routes: {
"incoming/:callId": "incoming",
"call/accept": "accept",
"call/decline": "decline",
"call/ongoing": "conversation",
"call/declineAndBlock": "declineAndBlock",
"call/shutdown": "shutdown",
"call/feedback": "feedback"
var IncomingConversationView = React.createClass({displayName: 'IncomingConversationView',
propTypes: {
client: React.PropTypes.instanceOf(loop.Client).isRequired,
conversation: React.PropTypes.instanceOf(sharedModels.ConversationModel)
.isRequired,
notifications: React.PropTypes.instanceOf(sharedModels.NotificationCollection)
.isRequired,
sdk: React.PropTypes.object.isRequired
},
getInitialState: function() {
return {
callStatus: "start"
}
},
componentDidMount: function() {
this.props.conversation.on("accept", this.accept, this);
this.props.conversation.on("decline", this.decline, this);
this.props.conversation.on("declineAndBlock", this.declineAndBlock, this);
this.props.conversation.on("call:accepted", this.accepted, this);
this.props.conversation.on("change:publishedStream", this._checkConnected, this);
this.props.conversation.on("change:subscribedStream", this._checkConnected, this);
this.props.conversation.on("session:ended", this.endCall, this);
this.props.conversation.on("session:peer-hungup", this._onPeerHungup, this);
this.props.conversation.on("session:network-disconnected", this._onNetworkDisconnected, this);
this.props.conversation.on("session:connection-error", this._notifyError, this);
this.setupIncomingCall();
},
componentDidUnmount: function() {
this.props.conversation.off(null, null, this);
},
render: function() {
switch (this.state.callStatus) {
case "start": {
document.title = mozL10n.get("incoming_call_title2");
// XXX Don't render anything initially, though this should probably
// be some sort of pending view, whilst we connect the websocket.
return null;
}
case "incoming": {
document.title = mozL10n.get("incoming_call_title2");
return (
IncomingCallView({
model: this.props.conversation,
video: this.props.conversation.hasVideoStream("incoming")}
)
);
}
case "connected": {
// XXX This should be the caller id (bug 1020449)
document.title = mozL10n.get("incoming_call_title2");
var callType = this.props.conversation.get("selectedCallType");
return (
sharedViews.ConversationView({
initiate: true,
sdk: this.props.sdk,
model: this.props.conversation,
video: {enabled: callType !== "audio"}}
)
);
}
case "end": {
document.title = mozL10n.get("conversation_has_ended");
var feebackAPIBaseUrl = navigator.mozLoop.getLoopCharPref(
"feedback.baseUrl");
var appVersionInfo = navigator.mozLoop.appVersionInfo;
var feedbackClient = new loop.FeedbackAPIClient(feebackAPIBaseUrl, {
product: navigator.mozLoop.getLoopCharPref("feedback.product"),
platform: appVersionInfo.OS,
channel: appVersionInfo.channel,
version: appVersionInfo.version
});
return (
sharedViews.FeedbackView({
feedbackApiClient: feedbackClient,
onAfterFeedbackReceived: this.closeWindow.bind(this)}
)
);
}
case "close": {
window.close();
return (React.DOM.div(null));
}
}
},
/**
* @override {loop.shared.router.BaseConversationRouter.startCall}
* Notify the user that the connection was not possible
* @param {{code: number, message: string}} error
*/
startCall: function() {
this.navigate("call/ongoing", {trigger: true});
_notifyError: function(error) {
console.error(error);
this.props.notifications.errorL10n("connection_error_see_console_notification");
this.setState({callStatus: "end"});
},
/**
* @override {loop.shared.router.BaseConversationRouter.endCall}
* Peer hung up. Notifies the user and ends the call.
*
* Event properties:
* - {String} connectionId: OT session id
*/
endCall: function() {
navigator.mozLoop.releaseCallData(this._conversation.get("callId"));
this.navigate("call/feedback", {trigger: true});
_onPeerHungup: function() {
this.props.notifications.warnL10n("peer_ended_conversation2");
this.setState({callStatus: "end"});
},
shutdown: function() {
navigator.mozLoop.releaseCallData(this._conversation.get("callId"));
/**
* Network disconnected. Notifies the user and ends the call.
*/
_onNetworkDisconnected: function() {
this.props.notifications.warnL10n("network_disconnected");
this.setState({callStatus: "end"});
},
/**
* Incoming call route.
*
* @param {String} callId Identifier assigned by the LoopService
* to this incoming call.
*/
incoming: function(callId) {
setupIncomingCall: function() {
navigator.mozLoop.startAlerting();
this._conversation.once("accept", function() {
this.navigate("call/accept", {trigger: true});
}.bind(this));
this._conversation.once("decline", function() {
this.navigate("call/decline", {trigger: true});
}.bind(this));
this._conversation.once("declineAndBlock", function() {
this.navigate("call/declineAndBlock", {trigger: true});
}.bind(this));
this._conversation.once("call:incoming", this.startCall, this);
this._conversation.once("change:publishedStream", this._checkConnected, this);
this._conversation.once("change:subscribedStream", this._checkConnected, this);
var callData = navigator.mozLoop.getCallData(callId);
var callData = navigator.mozLoop.getCallData(this.props.conversation.get("callId"));
if (!callData) {
console.error("Failed to get the call data");
// XXX Not the ideal response, but bug 1047410 will be replacing
// this by better "call failed" UI.
this._notifications.errorL10n("cannot_start_call_session_not_ready");
this.props.notifications.errorL10n("cannot_start_call_session_not_ready");
return;
}
this._conversation.setIncomingSessionData(callData);
this._setupWebSocketAndCallView();
this.props.conversation.setIncomingSessionData(callData);
this._setupWebSocket();
},
/**
* Starts the actual conversation
*/
accepted: function() {
this.setState({callStatus: "connected"});
},
/**
* Moves the call to the end state
*/
endCall: function() {
navigator.mozLoop.releaseCallData(this.props.conversation.get("callId"));
this.setState({callStatus: "end"});
},
/**
* Used to set up the web socket connection and navigate to the
* call view if appropriate.
*/
_setupWebSocketAndCallView: function() {
_setupWebSocket: function() {
this._websocket = new loop.CallConnectionWebSocket({
url: this._conversation.get("progressURL"),
websocketToken: this._conversation.get("websocketToken"),
callId: this._conversation.get("callId"),
url: this.props.conversation.get("progressURL"),
websocketToken: this.props.conversation.get("websocketToken"),
callId: this.props.conversation.get("callId"),
});
this._websocket.promiseConnect().then(function() {
this.loadReactComponent(loop.conversation.IncomingCallView({
model: this._conversation,
video: this._conversation.hasVideoStream("incoming")
}));
this.setState({callStatus: "incoming"});
}.bind(this), function() {
this._handleSessionError();
return;
@ -301,7 +387,7 @@ loop.conversation = (function(OT, mozL10n) {
_checkConnected: function() {
// Check we've had both local and remote streams connected before
// sending the media up message.
if (this._conversation.streamsConnected()) {
if (this.props.conversation.streamsConnected()) {
this._websocket.mediaUp();
}
},
@ -337,6 +423,12 @@ loop.conversation = (function(OT, mozL10n) {
_abortIncomingCall: function() {
navigator.mozLoop.stopAlerting();
this._websocket.close();
// Having a timeout here lets the logging for the websocket complete and be
// displayed on the console if both are on.
setTimeout(this.closeWindow, 0);
},
closeWindow: function() {
window.close();
},
@ -346,7 +438,7 @@ loop.conversation = (function(OT, mozL10n) {
accept: function() {
navigator.mozLoop.stopAlerting();
this._websocket.accept();
this._conversation.incoming();
this.props.conversation.accepted();
},
/**
@ -354,13 +446,11 @@ loop.conversation = (function(OT, mozL10n) {
*/
_declineCall: function() {
this._websocket.decline();
navigator.mozLoop.releaseCallData(this._conversation.get("callId"));
// XXX Don't close the window straight away, but let any sends happen
// first. Ideally we'd wait to close the window until after we have a
// response from the server, to know that everything has completed
// successfully. However, that's quite difficult to ensure at the
// moment so we'll add it later.
setTimeout(window.close, 0);
navigator.mozLoop.releaseCallData(this.props.conversation.get("callId"));
this._websocket.close();
// Having a timeout here lets the logging for the websocket complete and be
// displayed on the console if both are on.
setTimeout(this.closeWindow, 0);
},
/**
@ -379,8 +469,8 @@ loop.conversation = (function(OT, mozL10n) {
*/
declineAndBlock: function() {
navigator.mozLoop.stopAlerting();
var token = this._conversation.get("callToken");
this._client.deleteCallUrl(token, function(error) {
var token = this.props.conversation.get("callToken");
this.props.client.deleteCallUrl(token, function(error) {
// XXX The conversation window will be closed when this cb is triggered
// figure out if there is a better way to report the error to the user
// (bug 1048909).
@ -389,62 +479,14 @@ loop.conversation = (function(OT, mozL10n) {
this._declineCall();
},
/**
* conversation is the route when the conversation is active. The start
* route should be navigated to first.
*/
conversation: function() {
if (!this._conversation.isSessionReady()) {
console.error("Error: navigated to conversation route without " +
"the start route to initialise the call first");
this._handleSessionError();
return;
}
var callType = this._conversation.get("selectedCallType");
var videoStream = callType === "audio" ? false : true;
/*jshint newcap:false*/
this.loadReactComponent(sharedViews.ConversationView({
initiate: true,
sdk: OT,
model: this._conversation,
video: {enabled: videoStream}
}));
},
/**
* Handles a error starting the session
*/
_handleSessionError: function() {
// XXX Not the ideal response, but bug 1047410 will be replacing
// this by better "call failed" UI.
this._notifications.errorL10n("cannot_start_call_session_not_ready");
this.props.notifications.errorL10n("cannot_start_call_session_not_ready");
},
/**
* Call has ended, display a feedback form.
*/
feedback: function() {
document.title = mozL10n.get("conversation_has_ended");
var feebackAPIBaseUrl = navigator.mozLoop.getLoopCharPref(
"feedback.baseUrl");
var appVersionInfo = navigator.mozLoop.appVersionInfo;
var feedbackClient = new loop.FeedbackAPIClient(feebackAPIBaseUrl, {
product: navigator.mozLoop.getLoopCharPref("feedback.product"),
platform: appVersionInfo.OS,
channel: appVersionInfo.channel,
version: appVersionInfo.version
});
this.loadReactComponent(sharedViews.FeedbackView({
feedbackApiClient: feedbackClient,
onAfterFeedbackReceived: window.close.bind(window)
}));
}
});
/**
@ -457,44 +499,50 @@ loop.conversation = (function(OT, mozL10n) {
// Plug in an alternate client ID mechanism, as localStorage and cookies
// don't work in the conversation window
if (OT && OT.hasOwnProperty("overrideGuidStorage")) {
OT.overrideGuidStorage({
get: function(callback) {
callback(null, navigator.mozLoop.getLoopCharPref("ot.guid"));
},
set: function(guid, callback) {
navigator.mozLoop.setLoopCharPref("ot.guid", guid);
callback(null);
}
});
}
document.title = mozL10n.get("incoming_call_title2");
window.OT.overrideGuidStorage({
get: function(callback) {
callback(null, navigator.mozLoop.getLoopCharPref("ot.guid"));
},
set: function(guid, callback) {
navigator.mozLoop.setLoopCharPref("ot.guid", guid);
callback(null);
}
});
document.body.classList.add(loop.shared.utils.getTargetPlatform());
var client = new loop.Client();
router = new ConversationRouter({
client: client,
conversation: new loop.shared.models.ConversationModel(
{}, // Model attributes
{sdk: OT}), // Model dependencies
notifications: new loop.shared.models.NotificationCollection()
});
var conversation = new sharedModels.ConversationModel(
{}, // Model attributes
{sdk: window.OT} // Model dependencies
);
var notifications = new sharedModels.NotificationCollection();
window.addEventListener("unload", function(event) {
// Handle direct close of dialog box via [x] control.
navigator.mozLoop.releaseCallData(router._conversation.get("callId"));
navigator.mozLoop.releaseCallData(conversation.get("callId"));
});
Backbone.history.start();
// Obtain the callId and pass it to the conversation
var helper = new loop.shared.utils.Helper();
var locationHash = helper.locationHash();
if (locationHash) {
conversation.set("callId", locationHash.match(/\#incoming\/(.*)/)[1]);
}
React.renderComponent(IncomingConversationView({
client: client,
conversation: conversation,
notifications: notifications,
sdk: window.OT}
), document.querySelector('#main'));
}
return {
ConversationRouter: ConversationRouter,
IncomingConversationView: IncomingConversationView,
IncomingCallView: IncomingCallView,
init: init
};
})(window.OT, document.mozL10n);
})(document.mozL10n);
document.addEventListener('DOMContentLoaded', loop.conversation.init);

View File

@ -8,16 +8,11 @@
/* global loop:true, React */
var loop = loop || {};
loop.conversation = (function(OT, mozL10n) {
loop.conversation = (function(mozL10n) {
"use strict";
var sharedViews = loop.shared.views;
/**
* App router.
* @type {loop.desktopRouter.DesktopConversationRouter}
*/
var router;
var sharedViews = loop.shared.views,
sharedModels = loop.shared.models;
var IncomingCallView = React.createClass({
@ -200,92 +195,183 @@ loop.conversation = (function(OT, mozL10n) {
});
/**
* Conversation router.
* This view manages the incoming conversation views - from
* call initiation through to the actual conversation and call end.
*
* Required options:
* - {loop.shared.models.ConversationModel} conversation Conversation model.
* - {loop.shared.models.NotificationCollection} notifications
*
* @type {loop.shared.router.BaseConversationRouter}
* At the moment, it does more than that, these parts need refactoring out.
*/
var ConversationRouter = loop.desktopRouter.DesktopConversationRouter.extend({
routes: {
"incoming/:callId": "incoming",
"call/accept": "accept",
"call/decline": "decline",
"call/ongoing": "conversation",
"call/declineAndBlock": "declineAndBlock",
"call/shutdown": "shutdown",
"call/feedback": "feedback"
var IncomingConversationView = React.createClass({
propTypes: {
client: React.PropTypes.instanceOf(loop.Client).isRequired,
conversation: React.PropTypes.instanceOf(sharedModels.ConversationModel)
.isRequired,
notifications: React.PropTypes.instanceOf(sharedModels.NotificationCollection)
.isRequired,
sdk: React.PropTypes.object.isRequired
},
getInitialState: function() {
return {
callStatus: "start"
}
},
componentDidMount: function() {
this.props.conversation.on("accept", this.accept, this);
this.props.conversation.on("decline", this.decline, this);
this.props.conversation.on("declineAndBlock", this.declineAndBlock, this);
this.props.conversation.on("call:accepted", this.accepted, this);
this.props.conversation.on("change:publishedStream", this._checkConnected, this);
this.props.conversation.on("change:subscribedStream", this._checkConnected, this);
this.props.conversation.on("session:ended", this.endCall, this);
this.props.conversation.on("session:peer-hungup", this._onPeerHungup, this);
this.props.conversation.on("session:network-disconnected", this._onNetworkDisconnected, this);
this.props.conversation.on("session:connection-error", this._notifyError, this);
this.setupIncomingCall();
},
componentDidUnmount: function() {
this.props.conversation.off(null, null, this);
},
render: function() {
switch (this.state.callStatus) {
case "start": {
document.title = mozL10n.get("incoming_call_title2");
// XXX Don't render anything initially, though this should probably
// be some sort of pending view, whilst we connect the websocket.
return null;
}
case "incoming": {
document.title = mozL10n.get("incoming_call_title2");
return (
<IncomingCallView
model={this.props.conversation}
video={this.props.conversation.hasVideoStream("incoming")}
/>
);
}
case "connected": {
// XXX This should be the caller id (bug 1020449)
document.title = mozL10n.get("incoming_call_title2");
var callType = this.props.conversation.get("selectedCallType");
return (
<sharedViews.ConversationView
initiate={true}
sdk={this.props.sdk}
model={this.props.conversation}
video={{enabled: callType !== "audio"}}
/>
);
}
case "end": {
document.title = mozL10n.get("conversation_has_ended");
var feebackAPIBaseUrl = navigator.mozLoop.getLoopCharPref(
"feedback.baseUrl");
var appVersionInfo = navigator.mozLoop.appVersionInfo;
var feedbackClient = new loop.FeedbackAPIClient(feebackAPIBaseUrl, {
product: navigator.mozLoop.getLoopCharPref("feedback.product"),
platform: appVersionInfo.OS,
channel: appVersionInfo.channel,
version: appVersionInfo.version
});
return (
<sharedViews.FeedbackView
feedbackApiClient={feedbackClient}
onAfterFeedbackReceived={this.closeWindow.bind(this)}
/>
);
}
case "close": {
window.close();
return (<div/>);
}
}
},
/**
* @override {loop.shared.router.BaseConversationRouter.startCall}
* Notify the user that the connection was not possible
* @param {{code: number, message: string}} error
*/
startCall: function() {
this.navigate("call/ongoing", {trigger: true});
_notifyError: function(error) {
console.error(error);
this.props.notifications.errorL10n("connection_error_see_console_notification");
this.setState({callStatus: "end"});
},
/**
* @override {loop.shared.router.BaseConversationRouter.endCall}
* Peer hung up. Notifies the user and ends the call.
*
* Event properties:
* - {String} connectionId: OT session id
*/
endCall: function() {
navigator.mozLoop.releaseCallData(this._conversation.get("callId"));
this.navigate("call/feedback", {trigger: true});
_onPeerHungup: function() {
this.props.notifications.warnL10n("peer_ended_conversation2");
this.setState({callStatus: "end"});
},
shutdown: function() {
navigator.mozLoop.releaseCallData(this._conversation.get("callId"));
/**
* Network disconnected. Notifies the user and ends the call.
*/
_onNetworkDisconnected: function() {
this.props.notifications.warnL10n("network_disconnected");
this.setState({callStatus: "end"});
},
/**
* Incoming call route.
*
* @param {String} callId Identifier assigned by the LoopService
* to this incoming call.
*/
incoming: function(callId) {
setupIncomingCall: function() {
navigator.mozLoop.startAlerting();
this._conversation.once("accept", function() {
this.navigate("call/accept", {trigger: true});
}.bind(this));
this._conversation.once("decline", function() {
this.navigate("call/decline", {trigger: true});
}.bind(this));
this._conversation.once("declineAndBlock", function() {
this.navigate("call/declineAndBlock", {trigger: true});
}.bind(this));
this._conversation.once("call:incoming", this.startCall, this);
this._conversation.once("change:publishedStream", this._checkConnected, this);
this._conversation.once("change:subscribedStream", this._checkConnected, this);
var callData = navigator.mozLoop.getCallData(callId);
var callData = navigator.mozLoop.getCallData(this.props.conversation.get("callId"));
if (!callData) {
console.error("Failed to get the call data");
// XXX Not the ideal response, but bug 1047410 will be replacing
// this by better "call failed" UI.
this._notifications.errorL10n("cannot_start_call_session_not_ready");
this.props.notifications.errorL10n("cannot_start_call_session_not_ready");
return;
}
this._conversation.setIncomingSessionData(callData);
this._setupWebSocketAndCallView();
this.props.conversation.setIncomingSessionData(callData);
this._setupWebSocket();
},
/**
* Starts the actual conversation
*/
accepted: function() {
this.setState({callStatus: "connected"});
},
/**
* Moves the call to the end state
*/
endCall: function() {
navigator.mozLoop.releaseCallData(this.props.conversation.get("callId"));
this.setState({callStatus: "end"});
},
/**
* Used to set up the web socket connection and navigate to the
* call view if appropriate.
*/
_setupWebSocketAndCallView: function() {
_setupWebSocket: function() {
this._websocket = new loop.CallConnectionWebSocket({
url: this._conversation.get("progressURL"),
websocketToken: this._conversation.get("websocketToken"),
callId: this._conversation.get("callId"),
url: this.props.conversation.get("progressURL"),
websocketToken: this.props.conversation.get("websocketToken"),
callId: this.props.conversation.get("callId"),
});
this._websocket.promiseConnect().then(function() {
this.loadReactComponent(loop.conversation.IncomingCallView({
model: this._conversation,
video: this._conversation.hasVideoStream("incoming")
}));
this.setState({callStatus: "incoming"});
}.bind(this), function() {
this._handleSessionError();
return;
@ -301,7 +387,7 @@ loop.conversation = (function(OT, mozL10n) {
_checkConnected: function() {
// Check we've had both local and remote streams connected before
// sending the media up message.
if (this._conversation.streamsConnected()) {
if (this.props.conversation.streamsConnected()) {
this._websocket.mediaUp();
}
},
@ -337,6 +423,12 @@ loop.conversation = (function(OT, mozL10n) {
_abortIncomingCall: function() {
navigator.mozLoop.stopAlerting();
this._websocket.close();
// Having a timeout here lets the logging for the websocket complete and be
// displayed on the console if both are on.
setTimeout(this.closeWindow, 0);
},
closeWindow: function() {
window.close();
},
@ -346,7 +438,7 @@ loop.conversation = (function(OT, mozL10n) {
accept: function() {
navigator.mozLoop.stopAlerting();
this._websocket.accept();
this._conversation.incoming();
this.props.conversation.accepted();
},
/**
@ -354,13 +446,11 @@ loop.conversation = (function(OT, mozL10n) {
*/
_declineCall: function() {
this._websocket.decline();
navigator.mozLoop.releaseCallData(this._conversation.get("callId"));
// XXX Don't close the window straight away, but let any sends happen
// first. Ideally we'd wait to close the window until after we have a
// response from the server, to know that everything has completed
// successfully. However, that's quite difficult to ensure at the
// moment so we'll add it later.
setTimeout(window.close, 0);
navigator.mozLoop.releaseCallData(this.props.conversation.get("callId"));
this._websocket.close();
// Having a timeout here lets the logging for the websocket complete and be
// displayed on the console if both are on.
setTimeout(this.closeWindow, 0);
},
/**
@ -379,8 +469,8 @@ loop.conversation = (function(OT, mozL10n) {
*/
declineAndBlock: function() {
navigator.mozLoop.stopAlerting();
var token = this._conversation.get("callToken");
this._client.deleteCallUrl(token, function(error) {
var token = this.props.conversation.get("callToken");
this.props.client.deleteCallUrl(token, function(error) {
// XXX The conversation window will be closed when this cb is triggered
// figure out if there is a better way to report the error to the user
// (bug 1048909).
@ -389,62 +479,14 @@ loop.conversation = (function(OT, mozL10n) {
this._declineCall();
},
/**
* conversation is the route when the conversation is active. The start
* route should be navigated to first.
*/
conversation: function() {
if (!this._conversation.isSessionReady()) {
console.error("Error: navigated to conversation route without " +
"the start route to initialise the call first");
this._handleSessionError();
return;
}
var callType = this._conversation.get("selectedCallType");
var videoStream = callType === "audio" ? false : true;
/*jshint newcap:false*/
this.loadReactComponent(sharedViews.ConversationView({
initiate: true,
sdk: OT,
model: this._conversation,
video: {enabled: videoStream}
}));
},
/**
* Handles a error starting the session
*/
_handleSessionError: function() {
// XXX Not the ideal response, but bug 1047410 will be replacing
// this by better "call failed" UI.
this._notifications.errorL10n("cannot_start_call_session_not_ready");
this.props.notifications.errorL10n("cannot_start_call_session_not_ready");
},
/**
* Call has ended, display a feedback form.
*/
feedback: function() {
document.title = mozL10n.get("conversation_has_ended");
var feebackAPIBaseUrl = navigator.mozLoop.getLoopCharPref(
"feedback.baseUrl");
var appVersionInfo = navigator.mozLoop.appVersionInfo;
var feedbackClient = new loop.FeedbackAPIClient(feebackAPIBaseUrl, {
product: navigator.mozLoop.getLoopCharPref("feedback.product"),
platform: appVersionInfo.OS,
channel: appVersionInfo.channel,
version: appVersionInfo.version
});
this.loadReactComponent(sharedViews.FeedbackView({
feedbackApiClient: feedbackClient,
onAfterFeedbackReceived: window.close.bind(window)
}));
}
});
/**
@ -457,44 +499,50 @@ loop.conversation = (function(OT, mozL10n) {
// Plug in an alternate client ID mechanism, as localStorage and cookies
// don't work in the conversation window
if (OT && OT.hasOwnProperty("overrideGuidStorage")) {
OT.overrideGuidStorage({
get: function(callback) {
callback(null, navigator.mozLoop.getLoopCharPref("ot.guid"));
},
set: function(guid, callback) {
navigator.mozLoop.setLoopCharPref("ot.guid", guid);
callback(null);
}
});
}
document.title = mozL10n.get("incoming_call_title2");
window.OT.overrideGuidStorage({
get: function(callback) {
callback(null, navigator.mozLoop.getLoopCharPref("ot.guid"));
},
set: function(guid, callback) {
navigator.mozLoop.setLoopCharPref("ot.guid", guid);
callback(null);
}
});
document.body.classList.add(loop.shared.utils.getTargetPlatform());
var client = new loop.Client();
router = new ConversationRouter({
client: client,
conversation: new loop.shared.models.ConversationModel(
{}, // Model attributes
{sdk: OT}), // Model dependencies
notifications: new loop.shared.models.NotificationCollection()
});
var conversation = new sharedModels.ConversationModel(
{}, // Model attributes
{sdk: window.OT} // Model dependencies
);
var notifications = new sharedModels.NotificationCollection();
window.addEventListener("unload", function(event) {
// Handle direct close of dialog box via [x] control.
navigator.mozLoop.releaseCallData(router._conversation.get("callId"));
navigator.mozLoop.releaseCallData(conversation.get("callId"));
});
Backbone.history.start();
// Obtain the callId and pass it to the conversation
var helper = new loop.shared.utils.Helper();
var locationHash = helper.locationHash();
if (locationHash) {
conversation.set("callId", locationHash.match(/\#incoming\/(.*)/)[1]);
}
React.renderComponent(<IncomingConversationView
client={client}
conversation={conversation}
notifications={notifications}
sdk={window.OT}
/>, document.querySelector('#main'));
}
return {
ConversationRouter: ConversationRouter,
IncomingConversationView: IncomingConversationView,
IncomingCallView: IncomingCallView,
init: init
};
})(window.OT, document.mozL10n);
})(document.mozL10n);
document.addEventListener('DOMContentLoaded', loop.conversation.init);

View File

@ -1,35 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
/* jshint esnext:true */
/* global loop:true */
var loop = loop || {};
loop.desktopRouter = (function() {
"use strict";
/**
* On the desktop app, the use of about: uris prevents us from changing the
* url of the location. As a result, we change the navigate function to simply
* activate the new routes, and not try changing the url.
*
* XXX It is conceivable we might be able to remove this in future, if we
* can either swap to resource uris or remove the limitation on the about uris.
*/
var extendedRouter = {
navigate: function(to) {
this[this.routes[to]]();
}
};
var DesktopRouter = loop.shared.router.BaseRouter.extend(extendedRouter);
var DesktopConversationRouter =
loop.shared.router.BaseConversationRouter.extend(extendedRouter);
return {
DesktopRouter: DesktopRouter,
DesktopConversationRouter: DesktopConversationRouter
};
})();

View File

@ -17,12 +17,6 @@ loop.panel = (function(_, mozL10n) {
var ContactsList = loop.contacts.ContactsList;
var __ = mozL10n.get; // aliasing translation function as __ for concision
/**
* Panel router.
* @type {loop.desktopRouter.DesktopRouter}
*/
var router;
var TabView = React.createClass({displayName: 'TabView',
getInitialState: function() {
return {

View File

@ -17,12 +17,6 @@ loop.panel = (function(_, mozL10n) {
var ContactsList = loop.contacts.ContactsList;
var __ = mozL10n.get; // aliasing translation function as __ for concision
/**
* Panel router.
* @type {loop.desktopRouter.DesktopRouter}
*/
var router;
var TabView = React.createClass({
getInitialState: function() {
return {

View File

@ -77,10 +77,10 @@ loop.shared.models = (function(l10n) {
},
/**
* Starts an incoming conversation.
* Indicates an incoming conversation has been accepted.
*/
incoming: function() {
this.trigger("call:incoming");
accepted: function() {
this.trigger("call:accepted");
},
/**

View File

@ -1,153 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
/* global loop:true */
var loop = loop || {};
loop.shared = loop.shared || {};
loop.shared.router = (function() {
"use strict";
/**
* Base Router. Allows defining a main active view and ease toggling it when
* the active route changes.
*
* @link http://mikeygee.com/blog/backbone.html
*/
var BaseRouter = Backbone.Router.extend({
/**
* Notifications collection.
* @type {loop.shared.models.NotificationCollection}
*/
_notifications: undefined,
/**
* Constructor.
*
* Required options:
* - {loop.shared.models.NotificationCollection} notifications
*
* @param {Object} options Options object.
*/
constructor: function(options) {
options = options || {};
if (!options.notifications) {
throw new Error("missing required notifications");
}
this._notifications = options.notifications;
Backbone.Router.apply(this, arguments);
},
/**
* Renders a React component as current active view.
*
* @param {React} reactComponent React component.
*/
loadReactComponent: function(reactComponent) {
this.clearActiveView();
React.renderComponent(reactComponent,
document.querySelector("#main"));
},
/**
* Clears current active view.
*/
clearActiveView: function() {
React.unmountComponentAtNode(document.querySelector("#main"));
}
});
/**
* Base conversation router, implementing common behaviors when handling
* a conversation.
*/
var BaseConversationRouter = BaseRouter.extend({
/**
* Current conversation.
* @type {loop.shared.models.ConversationModel}
*/
_conversation: undefined,
/**
* Constructor. Defining it as `constructor` allows implementing an
* `initialize` method in child classes without needing calling this parent
* one. See http://backbonejs.org/#Model-constructor (same for Router)
*
* Required options:
* - {loop.shared.model.ConversationModel} model Conversation model.
*
* @param {Object} options Options object.
*/
constructor: function(options) {
options = options || {};
if (!options.conversation) {
throw new Error("missing required conversation");
}
if (!options.client) {
throw new Error("missing required client");
}
this._conversation = options.conversation;
this._client = options.client;
this.listenTo(this._conversation, "session:ended", this._onSessionEnded);
this.listenTo(this._conversation, "session:peer-hungup",
this._onPeerHungup);
this.listenTo(this._conversation, "session:network-disconnected",
this._onNetworkDisconnected);
this.listenTo(this._conversation, "session:connection-error",
this._notifyError);
BaseRouter.apply(this, arguments);
},
/**
* Notify the user that the connection was not possible
* @param {{code: number, message: string}} error
*/
_notifyError: function(error) {
console.log(error);
this._notifications.errorL10n("connection_error_see_console_notification");
this.endCall();
},
/**
* Ends the call. This method should be overriden.
*/
endCall: function() {},
/**
* Session has ended. Notifies the user and ends the call.
*/
_onSessionEnded: function() {
this.endCall();
},
/**
* Peer hung up. Notifies the user and ends the call.
*
* Event properties:
* - {String} connectionId: OT session id
*
* @param {Object} event
*/
_onPeerHungup: function() {
this._notifications.warnL10n("peer_ended_conversation2");
this.endCall();
},
/**
* Network disconnected. Notifies the user and ends the call.
*/
_onNetworkDisconnected: function() {
this._notifications.warnL10n("network_disconnected");
this.endCall();
}
});
return {
BaseRouter: BaseRouter,
BaseConversationRouter: BaseConversationRouter
};
})();

View File

@ -46,7 +46,29 @@ loop.shared.utils = (function() {
return !!localStorage.getItem(prefName);
}
/**
* Helper for general things
*/
function Helper() {
this._iOSRegex = /^(iPad|iPhone|iPod)/;
}
Helper.prototype = {
isFirefox: function(platform) {
return platform.indexOf("Firefox") !== -1;
},
isIOS: function(platform) {
return this._iOSRegex.test(platform);
},
locationHash: function() {
return window.location.hash;
}
};
return {
Helper: Helper,
getTargetPlatform: getTargetPlatform,
getBoolPreference: getBoolPreference
};

View File

@ -12,7 +12,6 @@ browser.jar:
# Desktop script
content/browser/loop/js/client.js (content/js/client.js)
content/browser/loop/js/desktopRouter.js (content/js/desktopRouter.js)
content/browser/loop/js/conversation.js (content/js/conversation.js)
content/browser/loop/js/otconfig.js (content/js/otconfig.js)
content/browser/loop/js/panel.js (content/js/panel.js)
@ -55,7 +54,6 @@ browser.jar:
# Shared scripts
content/browser/loop/shared/js/feedbackApiClient.js (content/shared/js/feedbackApiClient.js)
content/browser/loop/shared/js/models.js (content/shared/js/models.js)
content/browser/loop/shared/js/router.js (content/shared/js/router.js)
content/browser/loop/shared/js/mixins.js (content/shared/js/mixins.js)
content/browser/loop/shared/js/views.js (content/shared/js/views.js)
content/browser/loop/shared/js/utils.js (content/shared/js/utils.js)

View File

@ -15,7 +15,8 @@ loop.webapp = (function($, _, OT, mozL10n) {
loop.config.serverUrl = loop.config.serverUrl || "http://localhost:5000";
var sharedModels = loop.shared.models,
sharedViews = loop.shared.views;
sharedViews = loop.shared.views,
sharedUtils = loop.shared.utils;
/**
* Homepage view.
@ -314,7 +315,7 @@ loop.webapp = (function($, _, OT, mozL10n) {
var privacy_notice_name = mozL10n.get("privacy_notice_link_text");
var tosHTML = mozL10n.get("legal_text_and_links", {
"terms_of_use_url": "<a target=_blank href='/legal/terms'>" +
"terms_of_use_url": "<a target=_blank href='/legal/terms/'>" +
tos_link_name + "</a>",
"privacy_notice_url": "<a target=_blank href='" +
"https://www.mozilla.org/privacy/'>" + privacy_notice_name + "</a>"
@ -435,7 +436,7 @@ loop.webapp = (function($, _, OT, mozL10n) {
client: React.PropTypes.instanceOf(loop.StandaloneClient).isRequired,
conversation: React.PropTypes.instanceOf(sharedModels.ConversationModel)
.isRequired,
helper: React.PropTypes.instanceOf(WebappHelper).isRequired,
helper: React.PropTypes.instanceOf(sharedUtils.Helper).isRequired,
notifications: React.PropTypes.instanceOf(sharedModels.NotificationCollection)
.isRequired,
sdk: React.PropTypes.object.isRequired,
@ -690,7 +691,7 @@ loop.webapp = (function($, _, OT, mozL10n) {
client: React.PropTypes.instanceOf(loop.StandaloneClient).isRequired,
conversation: React.PropTypes.instanceOf(sharedModels.ConversationModel)
.isRequired,
helper: React.PropTypes.instanceOf(WebappHelper).isRequired,
helper: React.PropTypes.instanceOf(sharedUtils.Helper).isRequired,
notifications: React.PropTypes.instanceOf(sharedModels.NotificationCollection)
.isRequired,
sdk: React.PropTypes.object.isRequired,
@ -726,32 +727,11 @@ loop.webapp = (function($, _, OT, mozL10n) {
}
});
/**
* Local helpers.
*/
function WebappHelper() {
this._iOSRegex = /^(iPad|iPhone|iPod)/;
}
WebappHelper.prototype = {
isFirefox: function(platform) {
return platform.indexOf("Firefox") !== -1;
},
isIOS: function(platform) {
return this._iOSRegex.test(platform);
},
locationHash: function() {
return window.location.hash;
}
};
/**
* App initialization.
*/
function init() {
var helper = new WebappHelper();
var helper = new sharedUtils.Helper();
var client = new loop.StandaloneClient({
baseServerUrl: loop.config.serverUrl
});
@ -797,7 +777,6 @@ loop.webapp = (function($, _, OT, mozL10n) {
UnsupportedDeviceView: UnsupportedDeviceView,
init: init,
PromoteFirefoxView: PromoteFirefoxView,
WebappHelper: WebappHelper,
WebappRootView: WebappRootView
};
})(jQuery, _, window.OT, navigator.mozL10n);

View File

@ -15,7 +15,8 @@ loop.webapp = (function($, _, OT, mozL10n) {
loop.config.serverUrl = loop.config.serverUrl || "http://localhost:5000";
var sharedModels = loop.shared.models,
sharedViews = loop.shared.views;
sharedViews = loop.shared.views,
sharedUtils = loop.shared.utils;
/**
* Homepage view.
@ -314,7 +315,7 @@ loop.webapp = (function($, _, OT, mozL10n) {
var privacy_notice_name = mozL10n.get("privacy_notice_link_text");
var tosHTML = mozL10n.get("legal_text_and_links", {
"terms_of_use_url": "<a target=_blank href='/legal/terms'>" +
"terms_of_use_url": "<a target=_blank href='/legal/terms/'>" +
tos_link_name + "</a>",
"privacy_notice_url": "<a target=_blank href='" +
"https://www.mozilla.org/privacy/'>" + privacy_notice_name + "</a>"
@ -435,7 +436,7 @@ loop.webapp = (function($, _, OT, mozL10n) {
client: React.PropTypes.instanceOf(loop.StandaloneClient).isRequired,
conversation: React.PropTypes.instanceOf(sharedModels.ConversationModel)
.isRequired,
helper: React.PropTypes.instanceOf(WebappHelper).isRequired,
helper: React.PropTypes.instanceOf(sharedUtils.Helper).isRequired,
notifications: React.PropTypes.instanceOf(sharedModels.NotificationCollection)
.isRequired,
sdk: React.PropTypes.object.isRequired,
@ -690,7 +691,7 @@ loop.webapp = (function($, _, OT, mozL10n) {
client: React.PropTypes.instanceOf(loop.StandaloneClient).isRequired,
conversation: React.PropTypes.instanceOf(sharedModels.ConversationModel)
.isRequired,
helper: React.PropTypes.instanceOf(WebappHelper).isRequired,
helper: React.PropTypes.instanceOf(sharedUtils.Helper).isRequired,
notifications: React.PropTypes.instanceOf(sharedModels.NotificationCollection)
.isRequired,
sdk: React.PropTypes.object.isRequired,
@ -726,32 +727,11 @@ loop.webapp = (function($, _, OT, mozL10n) {
}
});
/**
* Local helpers.
*/
function WebappHelper() {
this._iOSRegex = /^(iPad|iPhone|iPod)/;
}
WebappHelper.prototype = {
isFirefox: function(platform) {
return platform.indexOf("Firefox") !== -1;
},
isIOS: function(platform) {
return this._iOSRegex.test(platform);
},
locationHash: function() {
return window.location.hash;
}
};
/**
* App initialization.
*/
function init() {
var helper = new WebappHelper();
var helper = new sharedUtils.Helper();
var client = new loop.StandaloneClient({
baseServerUrl: loop.config.serverUrl
});
@ -797,7 +777,6 @@ loop.webapp = (function($, _, OT, mozL10n) {
UnsupportedDeviceView: UnsupportedDeviceView,
init: init,
PromoteFirefoxView: PromoteFirefoxView,
WebappHelper: WebappHelper,
WebappRootView: WebappRootView
};
})(jQuery, _, window.OT, navigator.mozL10n);

View File

@ -9,10 +9,24 @@ var expect = chai.expect;
describe("loop.conversation", function() {
"use strict";
var ConversationRouter = loop.conversation.ConversationRouter,
var sharedModels = loop.shared.models,
sharedView = loop.shared.views,
sandbox,
notifications;
// XXX refactor to Just Work with "sandbox.stubComponent" or else
// just pass in the sandbox and put somewhere generally usable
function stubComponent(obj, component, mockTagName){
var reactClass = React.createClass({
render: function() {
var mockTagName = mockTagName || "div";
return React.DOM[mockTagName](null, this.props.children);
}
});
return sandbox.stub(obj, component, reactClass);
}
beforeEach(function() {
sandbox = sinon.sandbox.create();
sandbox.useFakeTimers();
@ -26,14 +40,14 @@ describe("loop.conversation", function() {
get locale() {
return "en-US";
},
setLoopCharPref: sandbox.stub(),
getLoopCharPref: sandbox.stub(),
getLoopBoolPref: sandbox.stub(),
getCallData: sandbox.stub(),
releaseCallData: function() {},
startAlerting: function() {},
stopAlerting: function() {},
ensureRegistered: function() {},
setLoopCharPref: sinon.stub(),
getLoopCharPref: sinon.stub(),
getLoopBoolPref: sinon.stub(),
getCallData: sinon.stub(),
releaseCallData: sinon.stub(),
startAlerting: sinon.stub(),
stopAlerting: sinon.stub(),
ensureRegistered: sinon.stub(),
get appVersionInfo() {
return {
version: "42",
@ -57,21 +71,19 @@ describe("loop.conversation", function() {
var oldTitle;
beforeEach(function() {
oldTitle = document.title;
sandbox.stub(React, "renderComponent");
sandbox.stub(document.mozL10n, "initialize");
sandbox.stub(document.mozL10n, "get").returns("Fake title");
sandbox.stub(loop.conversation.ConversationRouter.prototype,
"initialize");
sandbox.stub(loop.shared.models.ConversationModel.prototype,
"initialize");
sandbox.stub(Backbone.history, "start");
window.OT = {
overrideGuidStorage: sinon.stub()
};
});
afterEach(function() {
document.title = oldTitle;
delete window.OT;
});
it("should initalize L10n", function() {
@ -82,300 +94,256 @@ describe("loop.conversation", function() {
navigator.mozLoop);
});
it("should set the document title", function() {
it("should create the IncomingConversationView", function() {
loop.conversation.init();
expect(document.title).to.be.equal("Fake title");
sinon.assert.calledOnce(React.renderComponent);
sinon.assert.calledWith(React.renderComponent,
sinon.match(function(value) {
return TestUtils.isDescriptorOfType(value,
loop.conversation.IncomingConversationView);
}));
});
it("should create the router", function() {
loop.conversation.init();
sinon.assert.calledOnce(
loop.conversation.ConversationRouter.prototype.initialize);
});
it("should start Backbone history", function() {
loop.conversation.init();
sinon.assert.calledOnce(Backbone.history.start);
});
});
describe("ConversationRouter", function() {
var conversation, client;
describe("IncomingConversationView", function() {
var conversation, client, icView, oldTitle;
function mountTestComponent() {
return TestUtils.renderIntoDocument(
loop.conversation.IncomingConversationView({
client: client,
conversation: conversation,
notifications: notifications,
sdk: {}
}));
}
beforeEach(function() {
oldTitle = document.title;
client = new loop.Client();
conversation = new loop.shared.models.ConversationModel({}, {
sdk: {}
});
sandbox.spy(conversation, "setIncomingSessionData");
conversation.set({callId: 42});
sandbox.stub(conversation, "setOutgoingSessionData");
});
describe("Routes", function() {
var router;
afterEach(function() {
icView = undefined;
document.title = oldTitle;
});
describe("start", function() {
it("should set the title to incoming_call_title2", function() {
sandbox.stub(document.mozL10n, "get", function(x) {
return x;
});
icView = mountTestComponent();
expect(document.title).eql("incoming_call_title2");
});
});
describe("componentDidMount", function() {
var fakeSessionData;
beforeEach(function() {
router = new ConversationRouter({
client: client,
conversation: conversation,
notifications: notifications
});
sandbox.stub(conversation, "incoming");
fakeSessionData = {
sessionId: "sessionId",
sessionToken: "sessionToken",
apiKey: "apiKey",
callType: "callType",
callId: "Hello",
progressURL: "http://progress.example.com",
websocketToken: "7b"
};
navigator.mozLoop.getCallData.returns(fakeSessionData);
stubComponent(loop.conversation, "IncomingCallView");
stubComponent(sharedView, "ConversationView");
});
describe("#incoming", function() {
it("should start alerting", function() {
icView = mountTestComponent();
// XXX refactor to Just Work with "sandbox.stubComponent" or else
// just pass in the sandbox and put somewhere generally usable
sinon.assert.calledOnce(navigator.mozLoop.startAlerting);
});
function stubComponent(obj, component, mockTagName){
var reactClass = React.createClass({
render: function() {
var mockTagName = mockTagName || "div";
return React.DOM[mockTagName](null, this.props.children);
}
});
return sandbox.stub(obj, component, reactClass);
}
it("should call getCallData on navigator.mozLoop", function() {
icView = mountTestComponent();
beforeEach(function() {
sandbox.stub(router, "loadReactComponent");
stubComponent(loop.conversation, "IncomingCallView");
});
sinon.assert.calledOnce(navigator.mozLoop.getCallData);
sinon.assert.calledWith(navigator.mozLoop.getCallData, 42);
});
it("should start alerting", function() {
sandbox.stub(navigator.mozLoop, "startAlerting");
router.incoming("fakeVersion");
sinon.assert.calledOnce(navigator.mozLoop.startAlerting);
});
it("should call getCallData on navigator.mozLoop",
function() {
router.incoming(42);
sinon.assert.calledOnce(navigator.mozLoop.getCallData);
sinon.assert.calledWith(navigator.mozLoop.getCallData, 42);
});
describe("getCallData successful", function() {
var fakeSessionData, resolvePromise, rejectPromise;
describe("getCallData successful", function() {
var promise, resolveWebSocketConnect,
rejectWebSocketConnect;
describe("Session Data setup", function() {
beforeEach(function() {
fakeSessionData = {
sessionId: "sessionId",
sessionToken: "sessionToken",
apiKey: "apiKey",
callType: "callType",
callId: "Hello",
progressURL: "http://progress.example.com",
websocketToken: 123
};
sandbox.stub(router, "_setupWebSocketAndCallView");
navigator.mozLoop.getCallData.returns(fakeSessionData);
sandbox.stub(loop, "CallConnectionWebSocket").returns({
promiseConnect: function () {
promise = new Promise(function(resolve, reject) {
resolveWebSocketConnect = resolve;
rejectWebSocketConnect = reject;
});
return promise;
},
on: sinon.stub()
});
});
it("should store the session data", function() {
router.incoming("fakeVersion");
sandbox.stub(conversation, "setIncomingSessionData");
icView = mountTestComponent();
sinon.assert.calledOnce(conversation.setIncomingSessionData);
sinon.assert.calledWithExactly(conversation.setIncomingSessionData,
fakeSessionData);
});
it("should call #_setupWebSocketAndCallView", function() {
it("should setup the websocket connection", function() {
icView = mountTestComponent();
router.incoming("fakeVersion");
sinon.assert.calledOnce(router._setupWebSocketAndCallView);
sinon.assert.calledWithExactly(router._setupWebSocketAndCallView);
sinon.assert.calledOnce(loop.CallConnectionWebSocket);
sinon.assert.calledWithExactly(loop.CallConnectionWebSocket, {
callId: "Hello",
url: "http://progress.example.com",
websocketToken: "7b"
});
});
});
describe("#_setupWebSocketAndCallView", function() {
describe("WebSocket Handling", function() {
beforeEach(function() {
conversation.setIncomingSessionData({
sessionId: "sessionId",
sessionToken: "sessionToken",
apiKey: "apiKey",
callType: "callType",
callId: "Hello",
progressURL: "http://progress.example.com",
websocketToken: 123
promise = new Promise(function(resolve, reject) {
resolveWebSocketConnect = resolve;
rejectWebSocketConnect = reject;
});
sandbox.stub(loop.CallConnectionWebSocket.prototype, "promiseConnect").returns(promise);
});
it("should set the state to incoming on success", function(done) {
icView = mountTestComponent();
resolveWebSocketConnect();
promise.then(function () {
expect(icView.state.callStatus).eql("incoming");
done();
});
});
describe("Websocket connection successful", function() {
var promise;
it("should display an error if the websocket failed to connect", function(done) {
sandbox.stub(notifications, "errorL10n");
icView = mountTestComponent();
rejectWebSocketConnect();
promise.then(function() {
}, function () {
sinon.assert.calledOnce(notifications.errorL10n);
sinon.assert.calledWithExactly(notifications.errorL10n,
"cannot_start_call_session_not_ready");
done();
});
});
});
describe("WebSocket Events", function() {
describe("Call cancelled or timed out before acceptance", function() {
beforeEach(function() {
sandbox.stub(loop, "CallConnectionWebSocket").returns({
promiseConnect: function() {
promise = new Promise(function(resolve, reject) {
resolve();
icView = mountTestComponent();
promise = new Promise(function(resolve, reject) {
resolve();
});
sandbox.stub(loop.CallConnectionWebSocket.prototype, "promiseConnect").returns(promise);
sandbox.stub(loop.CallConnectionWebSocket.prototype, "close");
sandbox.stub(window, "close");
});
describe("progress - terminated - cancel", function() {
it("should stop alerting", function(done) {
promise.then(function() {
icView._websocket.trigger("progress", {
state: "terminated",
reason: "cancel"
});
return promise;
},
on: sinon.spy()
sinon.assert.calledOnce(navigator.mozLoop.stopAlerting);
done();
});
});
it("should close the websocket", function(done) {
promise.then(function() {
icView._websocket.trigger("progress", {
state: "terminated",
reason: "cancel"
});
sinon.assert.calledOnce(icView._websocket.close);
done();
});
});
it("should close the window", function(done) {
promise.then(function() {
icView._websocket.trigger("progress", {
state: "terminated",
reason: "cancel"
});
sandbox.clock.tick(1);
sinon.assert.calledOnce(window.close);
done();
});
});
});
it("should create a CallConnectionWebSocket", function(done) {
router._setupWebSocketAndCallView();
describe("progress - terminated - timeout (previousState = alerting)", function() {
it("should stop alerting", function(done) {
promise.then(function() {
icView._websocket.trigger("progress", {
state: "terminated",
reason: "timeout"
}, "alerting");
promise.then(function () {
sinon.assert.calledOnce(loop.CallConnectionWebSocket);
sinon.assert.calledWithExactly(loop.CallConnectionWebSocket, {
callId: "Hello",
url: "http://progress.example.com",
// The websocket token is converted to a hex string.
websocketToken: "7b"
});
done();
});
});
it("should create the view with video=false", function(done) {
sandbox.stub(conversation, "get").withArgs("callType").returns("audio");
router._setupWebSocketAndCallView();
promise.then(function () {
sinon.assert.called(conversation.get);
sinon.assert.calledOnce(loop.conversation.IncomingCallView);
sinon.assert.calledWithExactly(loop.conversation.IncomingCallView,
{model: conversation,
video: false});
done();
});
});
});
describe("Websocket connection failed", function() {
var promise;
beforeEach(function() {
sandbox.stub(loop, "CallConnectionWebSocket").returns({
promiseConnect: function() {
promise = new Promise(function(resolve, reject) {
reject();
});
return promise;
},
on: sinon.spy()
});
});
it("should display an error", function(done) {
sandbox.stub(notifications, "errorL10n");
router._setupWebSocketAndCallView();
promise.then(function() {
}, function () {
sinon.assert.calledOnce(router._notifications.errorL10n);
sinon.assert.calledWithExactly(router._notifications.errorL10n,
"cannot_start_call_session_not_ready");
done();
});
});
});
describe("Events", function() {
describe("Call cancelled or timed out before acceptance", function() {
var promise;
beforeEach(function() {
sandbox.stub(loop.CallConnectionWebSocket.prototype, "promiseConnect", function() {
promise = new Promise(function(resolve, reject) {
resolve();
});
return promise;
});
sandbox.stub(loop.CallConnectionWebSocket.prototype, "close");
sandbox.stub(navigator.mozLoop, "stopAlerting");
sandbox.stub(window, "close");
router._setupWebSocketAndCallView();
});
describe("progress - terminated - cancel", function() {
it("should stop alerting", function(done) {
promise.then(function() {
router._websocket.trigger("progress", {
state: "terminated",
reason: "cancel"
});
sinon.assert.calledOnce(navigator.mozLoop.stopAlerting);
done();
});
});
it("should close the websocket", function(done) {
promise.then(function() {
router._websocket.trigger("progress", {
state: "terminated",
reason: "cancel"
});
sinon.assert.calledOnce(router._websocket.close);
done();
});
});
it("should close the window", function(done) {
promise.then(function() {
router._websocket.trigger("progress", {
state: "terminated",
reason: "cancel"
});
sinon.assert.calledOnce(window.close);
done();
});
sinon.assert.calledOnce(navigator.mozLoop.stopAlerting);
done();
});
});
describe("progress - terminated - timeout (previousState = alerting)", function() {
it("should stop alerting", function(done) {
promise.then(function() {
router._websocket.trigger("progress", {
state: "terminated",
reason: "timeout"
}, "alerting");
it("should close the websocket", function(done) {
promise.then(function() {
icView._websocket.trigger("progress", {
state: "terminated",
reason: "timeout"
}, "alerting");
sinon.assert.calledOnce(navigator.mozLoop.stopAlerting);
done();
});
sinon.assert.calledOnce(icView._websocket.close);
done();
});
});
it("should close the websocket", function(done) {
promise.then(function() {
router._websocket.trigger("progress", {
state: "terminated",
reason: "timeout"
}, "alerting");
it("should close the window", function(done) {
promise.then(function() {
icView._websocket.trigger("progress", {
state: "terminated",
reason: "timeout"
}, "alerting");
sinon.assert.calledOnce(router._websocket.close);
done();
});
});
sandbox.clock.tick(1);
it("should close the window", function(done) {
promise.then(function() {
router._websocket.trigger("progress", {
state: "terminated",
reason: "timeout"
}, "alerting");
sinon.assert.calledOnce(window.close);
done();
});
sinon.assert.calledOnce(window.close);
done();
});
});
});
@ -385,6 +353,7 @@ describe("loop.conversation", function() {
describe("#accept", function() {
beforeEach(function() {
icView = mountTestComponent();
conversation.setIncomingSessionData({
sessionId: "sessionId",
sessionToken: "sessionToken",
@ -394,72 +363,38 @@ describe("loop.conversation", function() {
progressURL: "http://progress.example.com",
websocketToken: 123
});
router._setupWebSocketAndCallView();
sandbox.stub(router._websocket, "accept");
sandbox.stub(navigator.mozLoop, "stopAlerting");
sandbox.stub(icView._websocket, "accept");
sandbox.stub(icView.props.conversation, "accepted");
});
it("should initiate the conversation", function() {
router.accept();
icView.accept();
sinon.assert.calledOnce(conversation.incoming);
sinon.assert.calledOnce(icView.props.conversation.accepted);
});
it("should notify the websocket of the user acceptance", function() {
router.accept();
icView.accept();
sinon.assert.calledOnce(router._websocket.accept);
sinon.assert.calledOnce(icView._websocket.accept);
});
it("should stop alerting", function() {
router.accept();
icView.accept();
sinon.assert.calledOnce(navigator.mozLoop.stopAlerting);
});
});
describe("#conversation", function() {
beforeEach(function() {
sandbox.stub(router, "loadReactComponent");
});
it("should load the ConversationView if session is set", function() {
conversation.set("sessionId", "fakeSessionId");
router.conversation();
sinon.assert.calledOnce(router.loadReactComponent);
sinon.assert.calledWith(router.loadReactComponent,
sinon.match(function(value) {
return TestUtils.isDescriptorOfType(value,
loop.shared.views.ConversationView);
}));
});
it("should not load the ConversationView if session is not set",
function() {
router.conversation();
sinon.assert.notCalled(router.loadReactComponent);
});
it("should notify the user when session is not set",
function() {
sandbox.stub(notifications, "errorL10n");
router.conversation();
sinon.assert.calledOnce(router._notifications.errorL10n);
sinon.assert.calledWithExactly(router._notifications.errorL10n,
"cannot_start_call_session_not_ready");
});
});
describe("#decline", function() {
beforeEach(function() {
icView = mountTestComponent();
sandbox.stub(window, "close");
router._websocket = {
decline: sandbox.spy()
icView._websocket = {
decline: sinon.stub(),
close: sinon.stub()
};
conversation.setIncomingSessionData({
callId: 8699,
@ -468,76 +403,40 @@ describe("loop.conversation", function() {
});
it("should close the window", function() {
router.decline();
icView.decline();
sandbox.clock.tick(1);
sinon.assert.calledOnce(window.close);
});
it("should stop alerting", function() {
sandbox.stub(navigator.mozLoop, "stopAlerting");
router.decline();
icView.decline();
sinon.assert.calledOnce(navigator.mozLoop.stopAlerting);
});
it("should release callData", function() {
sandbox.stub(navigator.mozLoop, "releaseCallData");
router.decline();
icView.decline();
sinon.assert.calledOnce(navigator.mozLoop.releaseCallData);
sinon.assert.calledWithExactly(navigator.mozLoop.releaseCallData, 8699);
});
});
describe("#feedback", function() {
var oldTitle;
beforeEach(function() {
oldTitle = document.title;
sandbox.stub(document.mozL10n, "get").returns("Call ended");
});
beforeEach(function() {
sandbox.stub(loop, "FeedbackAPIClient");
sandbox.stub(router, "loadReactComponent");
});
afterEach(function() {
document.title = oldTitle;
});
// XXX When the call is ended gracefully, we should check that we
// close connections nicely (see bug 1046744)
it("should display a feedback form view", function() {
router.feedback();
sinon.assert.calledOnce(router.loadReactComponent);
sinon.assert.calledWith(router.loadReactComponent,
sinon.match(function(value) {
return TestUtils.isDescriptorOfType(value,
loop.shared.views.FeedbackView);
}));
});
it("should update the conversation window title", function() {
router.feedback();
expect(document.title).eql("Call ended");
});
});
describe("#blocked", function() {
beforeEach(function() {
router._websocket = {
decline: sandbox.spy()
icView = mountTestComponent();
icView._websocket = {
decline: sinon.spy(),
close: sinon.stub()
};
sandbox.stub(window, "close");
});
it("should call mozLoop.stopAlerting", function() {
sandbox.stub(navigator.mozLoop, "stopAlerting");
router.declineAndBlock();
icView.declineAndBlock();
sinon.assert.calledOnce(navigator.mozLoop.stopAlerting);
});
@ -547,7 +446,7 @@ describe("loop.conversation", function() {
.returns("fakeToken");
var deleteCallUrl = sandbox.stub(loop.Client.prototype,
"deleteCallUrl");
router.declineAndBlock();
icView.declineAndBlock();
sinon.assert.calledOnce(deleteCallUrl);
sinon.assert.calledWithExactly(deleteCallUrl, "fakeToken",
@ -556,7 +455,7 @@ describe("loop.conversation", function() {
it("should get callToken from conversation model", function() {
sandbox.stub(conversation, "get");
router.declineAndBlock();
icView.declineAndBlock();
sinon.assert.calledTwice(conversation.get);
sinon.assert.calledWithExactly(conversation.get, "callToken");
@ -572,14 +471,14 @@ describe("loop.conversation", function() {
sandbox.stub(loop.Client.prototype, "deleteCallUrl", function(_, cb) {
cb(fakeError);
});
router.declineAndBlock();
icView.declineAndBlock();
sinon.assert.calledOnce(log);
sinon.assert.calledWithExactly(log, fakeError);
});
it("should close the window", function() {
router.declineAndBlock();
icView.declineAndBlock();
sandbox.clock.tick(1);
@ -589,63 +488,66 @@ describe("loop.conversation", function() {
});
describe("Events", function() {
var router, fakeSessionData;
var fakeSessionData;
beforeEach(function() {
icView = mountTestComponent();
fakeSessionData = {
sessionId: "sessionId",
sessionToken: "sessionToken",
apiKey: "apiKey"
};
sandbox.stub(loop.conversation.ConversationRouter.prototype,
"navigate");
conversation.set("loopToken", "fakeToken");
router = new loop.conversation.ConversationRouter({
client: client,
conversation: conversation,
notifications: notifications
});
navigator.mozLoop.getLoopCharPref.returns("http://fake");
stubComponent(sharedView, "ConversationView");
});
it("should navigate to call/ongoing once the call is ready",
function() {
router.incoming(42);
describe("call:accepted", function() {
it("should display the ConversationView",
function() {
conversation.accepted();
conversation.incoming();
sinon.assert.calledOnce(router.navigate);
sinon.assert.calledWith(router.navigate, "call/ongoing");
});
it("should navigate to call/feedback when the call session ends",
function() {
conversation.trigger("session:ended");
sinon.assert.calledOnce(router.navigate);
sinon.assert.calledWith(router.navigate, "call/feedback");
});
it("should navigate to call/feedback when peer hangs up", function() {
conversation.trigger("session:peer-hungup");
sinon.assert.calledOnce(router.navigate);
sinon.assert.calledWith(router.navigate, "call/feedback");
TestUtils.findRenderedComponentWithType(icView,
sharedView.ConversationView);
});
});
it("should navigate to call/feedback when network disconnects",
function() {
conversation.trigger("session:network-disconnected");
describe("session:ended", function() {
it("should display the feedback view when the call session ends",
function() {
conversation.trigger("session:ended");
sinon.assert.calledOnce(router.navigate);
sinon.assert.calledWith(router.navigate, "call/feedback");
});
TestUtils.findRenderedComponentWithType(icView,
sharedView.FeedbackView);
});
});
describe("session:peer-hungup", function() {
it("should display the feedback view when the peer hangs up",
function() {
conversation.trigger("session:peer-hungup");
TestUtils.findRenderedComponentWithType(icView,
sharedView.FeedbackView);
});
});
describe("session:peer-hungup", function() {
it("should navigate to call/feedback when network disconnects",
function() {
conversation.trigger("session:network-disconnected");
TestUtils.findRenderedComponentWithType(icView,
sharedView.FeedbackView);
});
});
describe("Published and Subscribed Streams", function() {
beforeEach(function() {
router._websocket = {
icView._websocket = {
mediaUp: sinon.spy()
};
router.incoming("fakeVersion");
});
describe("publishStream", function() {
@ -653,7 +555,7 @@ describe("loop.conversation", function() {
function() {
conversation.set("publishedStream", true);
sinon.assert.notCalled(router._websocket.mediaUp);
sinon.assert.notCalled(icView._websocket.mediaUp);
});
it("should notify the websocket that media is up if both streams" +
@ -661,7 +563,7 @@ describe("loop.conversation", function() {
conversation.set("subscribedStream", true);
conversation.set("publishedStream", true);
sinon.assert.calledOnce(router._websocket.mediaUp);
sinon.assert.calledOnce(icView._websocket.mediaUp);
});
});
@ -670,7 +572,7 @@ describe("loop.conversation", function() {
function() {
conversation.set("subscribedStream", true);
sinon.assert.notCalled(router._websocket.mediaUp);
sinon.assert.notCalled(icView._websocket.mediaUp);
});
it("should notify the websocket that media is up if both streams" +
@ -678,7 +580,7 @@ describe("loop.conversation", function() {
conversation.set("publishedStream", true);
conversation.set("subscribedStream", true);
sinon.assert.calledOnce(router._websocket.mediaUp);
sinon.assert.calledOnce(icView._websocket.mediaUp);
});
});
});

View File

@ -35,12 +35,10 @@
<script src="../../content/shared/js/utils.js"></script>
<script src="../../content/shared/js/feedbackApiClient.js"></script>
<script src="../../content/shared/js/models.js"></script>
<script src="../../content/shared/js/router.js"></script>
<script src="../../content/shared/js/mixins.js"></script>
<script src="../../content/shared/js/views.js"></script>
<script src="../../content/shared/js/websocket.js"></script>
<script src="../../content/js/client.js"></script>
<script src="../../content/js/desktopRouter.js"></script>
<script src="../../content/js/conversation.js"></script>
<script type="text/javascript;version=1.8" src="../../content/js/contacts.js"></script>
<script src="../../content/js/panel.js"></script>

View File

@ -13,13 +13,6 @@ describe("loop.panel", function() {
var sandbox, notifications, fakeXHR, requests = [];
function createTestRouter(fakeDocument) {
return new loop.panel.PanelRouter({
notifications: notifications,
document: fakeDocument
});
}
beforeEach(function() {
sandbox = sinon.sandbox.create();
fakeXHR = sandbox.useFakeXMLHttpRequest();

View File

@ -37,7 +37,6 @@
<script src="../../content/shared/js/models.js"></script>
<script src="../../content/shared/js/mixins.js"></script>
<script src="../../content/shared/js/views.js"></script>
<script src="../../content/shared/js/router.js"></script>
<script src="../../content/shared/js/websocket.js"></script>
<script src="../../content/shared/js/feedbackApiClient.js"></script>
@ -46,7 +45,6 @@
<script src="mixins_test.js"></script>
<script src="utils_test.js"></script>
<script src="views_test.js"></script>
<script src="router_test.js"></script>
<script src="websocket_test.js"></script>
<script src="feedbackApiClient_test.js"></script>
<script>

View File

@ -65,13 +65,13 @@ describe("loop.shared.models", function() {
conversation.set("loopToken", "fakeToken");
});
describe("#incoming", function() {
it("should trigger a `call:incoming` event", function(done) {
conversation.once("call:incoming", function() {
describe("#accepted", function() {
it("should trigger a `call:accepted` event", function(done) {
conversation.once("call:accepted", function() {
done();
});
conversation.incoming();
conversation.accepted();
});
});

View File

@ -1,150 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
/* global loop, sinon */
var expect = chai.expect;
describe("loop.shared.router", function() {
"use strict";
var sandbox, notifications;
beforeEach(function() {
sandbox = sinon.sandbox.create();
notifications = new loop.shared.models.NotificationCollection();
sandbox.stub(notifications, "errorL10n");
sandbox.stub(notifications, "warnL10n");
});
afterEach(function() {
sandbox.restore();
});
describe("BaseRouter", function() {
beforeEach(function() {
$("#fixtures").html('<div id="main"></div>');
});
afterEach(function() {
$("#fixtures").empty();
});
describe("#constructor", function() {
it("should require a notifications collection", function() {
expect(function() {
new loop.shared.router.BaseRouter();
}).to.Throw(Error, /missing required notifications/);
});
describe("inherited", function() {
var ExtendedRouter = loop.shared.router.BaseRouter.extend({});
it("should require a notifications collection", function() {
expect(function() {
new ExtendedRouter();
}).to.Throw(Error, /missing required notifications/);
});
});
});
});
describe("BaseConversationRouter", function() {
var conversation, TestRouter;
beforeEach(function() {
TestRouter = loop.shared.router.BaseConversationRouter.extend({
endCall: sandbox.spy()
});
conversation = new loop.shared.models.ConversationModel({
loopToken: "fakeToken"
}, {
sdk: {}
});
});
describe("#constructor", function() {
it("should require a ConversationModel instance", function() {
expect(function() {
new TestRouter({ client: {} });
}).to.Throw(Error, /missing required conversation/);
});
it("should require a Client instance", function() {
expect(function() {
new TestRouter({ conversation: {} });
}).to.Throw(Error, /missing required client/);
});
});
describe("Events", function() {
var router, fakeSessionData;
beforeEach(function() {
fakeSessionData = {
sessionId: "sessionId",
sessionToken: "sessionToken",
apiKey: "apiKey"
};
router = new TestRouter({
conversation: conversation,
notifications: notifications,
client: {}
});
});
describe("session:connection-error", function() {
it("should warn the user when .connect() call fails", function() {
conversation.trigger("session:connection-error");
sinon.assert.calledOnce(notifications.errorL10n);
sinon.assert.calledWithExactly(notifications.errorL10n, sinon.match.string);
});
it("should invoke endCall()", function() {
conversation.trigger("session:connection-error");
sinon.assert.calledOnce(router.endCall);
sinon.assert.calledWithExactly(router.endCall);
});
});
it("should call endCall() when conversation ended", function() {
conversation.trigger("session:ended");
sinon.assert.calledOnce(router.endCall);
});
it("should warn the user when peer hangs up", function() {
conversation.trigger("session:peer-hungup");
sinon.assert.calledOnce(notifications.warnL10n);
sinon.assert.calledWithExactly(notifications.warnL10n,
"peer_ended_conversation2");
});
it("should call endCall() when peer hangs up", function() {
conversation.trigger("session:peer-hungup");
sinon.assert.calledOnce(router.endCall);
});
it("should warn the user when network disconnects", function() {
conversation.trigger("session:network-disconnected");
sinon.assert.calledOnce(notifications.warnL10n);
sinon.assert.calledWithExactly(notifications.warnL10n,
"network_disconnected");
});
it("should call endCall() when network disconnects", function() {
conversation.trigger("session:network-disconnected");
sinon.assert.calledOnce(router.endCall);
});
});
});
});

View File

@ -21,6 +21,40 @@ describe("loop.shared.utils", function() {
sandbox.restore();
});
describe("Helper", function() {
var helper;
beforeEach(function() {
helper = new sharedUtils.Helper();
});
describe("#isIOS", function() {
it("should detect iOS", function() {
expect(helper.isIOS("iPad")).eql(true);
expect(helper.isIOS("iPod")).eql(true);
expect(helper.isIOS("iPhone")).eql(true);
expect(helper.isIOS("iPhone Simulator")).eql(true);
});
it("shouldn't detect iOS with other platforms", function() {
expect(helper.isIOS("MacIntel")).eql(false);
});
});
describe("#isFirefox", function() {
it("should detect Firefox", function() {
expect(helper.isFirefox("Firefox")).eql(true);
expect(helper.isFirefox("Gecko/Firefox")).eql(true);
expect(helper.isFirefox("Firefox/Gecko")).eql(true);
expect(helper.isFirefox("Gecko/Firefox/Chuck Norris")).eql(true);
});
it("shouldn't detect Firefox with other platforms", function() {
expect(helper.isFirefox("Opera")).eql(false);
});
});
});
describe("#getBoolPreference", function() {
afterEach(function() {
navigator.mozLoop = undefined;

View File

@ -12,6 +12,7 @@ describe("loop.webapp", function() {
var sharedModels = loop.shared.models,
sharedViews = loop.shared.views,
sharedUtils = loop.shared.utils,
sandbox,
notifications,
feedbackApiClient;
@ -33,7 +34,7 @@ describe("loop.webapp", function() {
beforeEach(function() {
sandbox.stub(React, "renderComponent");
sandbox.stub(loop.webapp.WebappHelper.prototype,
sandbox.stub(sharedUtils.Helper.prototype,
"locationHash").returns("#call/fake-Token");
loop.config.feedbackApiUrl = "http://fake.invalid";
conversationSetStub =
@ -78,7 +79,7 @@ describe("loop.webapp", function() {
});
conversation.set("loopToken", "fakeToken");
ocView = mountTestComponent({
helper: new loop.webapp.WebappHelper(),
helper: new sharedUtils.Helper(),
client: client,
conversation: conversation,
notifications: notifications,
@ -473,13 +474,13 @@ describe("loop.webapp", function() {
});
describe("WebappRootView", function() {
var webappHelper, sdk, conversationModel, client, props;
var helper, sdk, conversationModel, client, props;
function mountTestComponent() {
return TestUtils.renderIntoDocument(
loop.webapp.WebappRootView({
client: client,
helper: webappHelper,
helper: helper,
notifications: notifications,
sdk: sdk,
conversation: conversationModel,
@ -488,7 +489,7 @@ describe("loop.webapp", function() {
}
beforeEach(function() {
webappHelper = new loop.webapp.WebappHelper();
helper = new sharedUtils.Helper();
sdk = {
checkSystemRequirements: function() { return true; }
};
@ -505,7 +506,7 @@ describe("loop.webapp", function() {
it("should mount the unsupportedDevice view if the device is running iOS",
function() {
sandbox.stub(webappHelper, "isIOS").returns(true);
sandbox.stub(helper, "isIOS").returns(true);
var webappRootView = mountTestComponent();
@ -825,38 +826,4 @@ describe("loop.webapp", function() {
});
});
});
describe("WebappHelper", function() {
var helper;
beforeEach(function() {
helper = new loop.webapp.WebappHelper();
});
describe("#isIOS", function() {
it("should detect iOS", function() {
expect(helper.isIOS("iPad")).eql(true);
expect(helper.isIOS("iPod")).eql(true);
expect(helper.isIOS("iPhone")).eql(true);
expect(helper.isIOS("iPhone Simulator")).eql(true);
});
it("shouldn't detect iOS with other platforms", function() {
expect(helper.isIOS("MacIntel")).eql(false);
});
});
describe("#isFirefox", function() {
it("should detect Firefox", function() {
expect(helper.isFirefox("Firefox")).eql(true);
expect(helper.isFirefox("Gecko/Firefox")).eql(true);
expect(helper.isFirefox("Firefox/Gecko")).eql(true);
expect(helper.isFirefox("Gecko/Firefox/Chuck Norris")).eql(true);
});
it("shouldn't detect Firefox with other platforms", function() {
expect(helper.isFirefox("Opera")).eql(false);
});
});
});
});

View File

@ -34,11 +34,9 @@
<script src="../content/shared/js/feedbackApiClient.js"></script>
<script src="../content/shared/js/utils.js"></script>
<script src="../content/shared/js/models.js"></script>
<script src="../content/shared/js/router.js"></script>
<script src="../content/shared/js/mixins.js"></script>
<script src="../content/shared/js/views.js"></script>
<script src="../content/js/client.js"></script>
<script src="../content/js/desktopRouter.js"></script>
<script src="../standalone/content/js/webapp.js"></script>
<script type="text/javascript;version=1.8" src="../content/js/contacts.js"></script>
<script>

View File

@ -55,7 +55,7 @@ function openAndLoadSubDialog(aURL, aFeatures = null, aParams = null, aClosingCa
// Check that stylesheets were injected
let expectedStyleSheetURLs = content.gSubDialog._injectedStyleSheets.slice(0);
for (let styleSheet of content.document.styleSheets) {
for (let styleSheet of content.gSubDialog._frame.contentDocument.styleSheets) {
let i = expectedStyleSheetURLs.indexOf(styleSheet.href);
if (i >= 0) {
info("found " + styleSheet.href);

View File

@ -167,7 +167,7 @@ let test = asyncTest(function* () {
yield consoleOpened;
let webconsoleUI = toolbox.getPanel("webconsole").hud.ui;
let messagesAdded = webconsoleUI.once("messages-added");
let messagesAdded = webconsoleUI.once("new-messages");
yield messagesAdded;
info("Checking if 'inspect($0)' was evaluated");

View File

@ -73,11 +73,15 @@ browser.jar:
content/browser/devtools/shadereditor.js (shadereditor/shadereditor.js)
content/browser/devtools/canvasdebugger.xul (canvasdebugger/canvasdebugger.xul)
content/browser/devtools/canvasdebugger.js (canvasdebugger/canvasdebugger.js)
content/browser/devtools/webaudioeditor.xul (webaudioeditor/webaudioeditor.xul)
content/browser/devtools/d3.js (shared/d3.js)
content/browser/devtools/webaudioeditor.xul (webaudioeditor/webaudioeditor.xul)
content/browser/devtools/dagre-d3.js (webaudioeditor/lib/dagre-d3.js)
content/browser/devtools/webaudioeditor-controller.js (webaudioeditor/webaudioeditor-controller.js)
content/browser/devtools/webaudioeditor-view.js (webaudioeditor/webaudioeditor-view.js)
content/browser/devtools/webaudioeditor/includes.js (webaudioeditor/includes.js)
content/browser/devtools/webaudioeditor/models.js (webaudioeditor/models.js)
content/browser/devtools/webaudioeditor/controller.js (webaudioeditor/controller.js)
content/browser/devtools/webaudioeditor/views/utils.js (webaudioeditor/views/utils.js)
content/browser/devtools/webaudioeditor/views/context.js (webaudioeditor/views/context.js)
content/browser/devtools/webaudioeditor/views/inspector.js (webaudioeditor/views/inspector.js)
content/browser/devtools/profiler.xul (profiler/profiler.xul)
content/browser/devtools/profiler.js (profiler/profiler.js)
content/browser/devtools/ui-recordings.js (profiler/ui-recordings.js)

View File

@ -358,7 +358,7 @@ Tools.webAudioEditor = {
tooltip: l10n("ToolboxWebAudioEditor1.tooltip", webAudioEditorStrings),
isTargetSupported: function(target) {
return !target.isAddon;
return !target.isAddon && !target.chrome;
},
build: function(iframeWindow, toolbox) {

View File

@ -47,8 +47,9 @@ function testFocus(sw, hud) {
function onMessage(event, messages) {
let msg = [...messages][0];
let node = msg.node;
var loc = msg.querySelector(".message-location");
var loc = node.querySelector(".message-location");
ok(loc, "location element exists");
is(loc.textContent.trim(), sw.Scratchpad.uniqueName + ":1",
"location value is correct");
@ -74,7 +75,7 @@ function testFocus(sw, hud) {
// Sending messages to web console is an asynchronous operation. That's
// why we have to setup an observer here.
hud.ui.once("messages-added", onMessage);
hud.ui.once("new-messages", onMessage);
sp.setText("console.log('foo');");
sp.run().then(function ([selection, error, result]) {

View File

@ -0,0 +1,223 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
/**
* A collection of `AudioNodeModel`s used throughout the editor
* to keep track of audio nodes within the audio context.
*/
let gAudioNodes = new AudioNodesCollection();
/**
* Initializes the web audio editor views
*/
function startupWebAudioEditor() {
return all([
WebAudioEditorController.initialize(),
ContextView.initialize(),
InspectorView.initialize()
]);
}
/**
* Destroys the web audio editor controller and views.
*/
function shutdownWebAudioEditor() {
return all([
WebAudioEditorController.destroy(),
ContextView.destroy(),
InspectorView.destroy(),
]);
}
/**
* Functions handling target-related lifetime events.
*/
let WebAudioEditorController = {
/**
* Listen for events emitted by the current tab target.
*/
initialize: function() {
telemetry.toolOpened("webaudioeditor");
this._onTabNavigated = this._onTabNavigated.bind(this);
this._onThemeChange = this._onThemeChange.bind(this);
gTarget.on("will-navigate", this._onTabNavigated);
gTarget.on("navigate", this._onTabNavigated);
gFront.on("start-context", this._onStartContext);
gFront.on("create-node", this._onCreateNode);
gFront.on("connect-node", this._onConnectNode);
gFront.on("connect-param", this._onConnectParam);
gFront.on("disconnect-node", this._onDisconnectNode);
gFront.on("change-param", this._onChangeParam);
gFront.on("destroy-node", this._onDestroyNode);
// Hook into theme change so we can change
// the graph's marker styling, since we can't do this
// with CSS
gDevTools.on("pref-changed", this._onThemeChange);
},
/**
* Remove events emitted by the current tab target.
*/
destroy: function() {
telemetry.toolClosed("webaudioeditor");
gTarget.off("will-navigate", this._onTabNavigated);
gTarget.off("navigate", this._onTabNavigated);
gFront.off("start-context", this._onStartContext);
gFront.off("create-node", this._onCreateNode);
gFront.off("connect-node", this._onConnectNode);
gFront.off("connect-param", this._onConnectParam);
gFront.off("disconnect-node", this._onDisconnectNode);
gFront.off("change-param", this._onChangeParam);
gFront.off("destroy-node", this._onDestroyNode);
gDevTools.off("pref-changed", this._onThemeChange);
},
/**
* Called when page is reloaded to show the reload notice and waiting
* for an audio context notice.
*/
reset: function () {
$("#content").hidden = true;
ContextView.resetUI();
InspectorView.resetUI();
},
// Since node create and connect are probably executed back to back,
// and the controller's `_onCreateNode` needs to look up type,
// the edge creation could be called before the graph node is actually
// created. This way, we can check and listen for the event before
// adding an edge.
_waitForNodeCreation: function (sourceActor, destActor) {
let deferred = defer();
let source = gAudioNodes.get(sourceActor.actorID);
let dest = gAudioNodes.get(destActor.actorID);
if (!source || !dest) {
gAudioNodes.on("add", function createNodeListener (createdNode) {
if (sourceActor.actorID === createdNode.id)
source = createdNode;
if (destActor.actorID === createdNode.id)
dest = createdNode;
if (source && dest) {
gAudioNodes.off("add", createNodeListener);
deferred.resolve([source, dest]);
}
});
}
else {
deferred.resolve([source, dest]);
}
return deferred.promise;
},
/**
* Fired when the devtools theme changes (light, dark, etc.)
* so that the graph can update marker styling, as that
* cannot currently be done with CSS.
*/
_onThemeChange: function (event, data) {
window.emit(EVENTS.THEME_CHANGE, data.newValue);
},
/**
* Called for each location change in the debugged tab.
*/
_onTabNavigated: Task.async(function* (event, {isFrameSwitching}) {
switch (event) {
case "will-navigate": {
// Make sure the backend is prepared to handle audio contexts.
if (!isFrameSwitching) {
yield gFront.setup({ reload: false });
}
// Clear out current UI.
this.reset();
// When switching to an iframe, ensure displaying the reload button.
// As the document has already been loaded without being hooked.
if (isFrameSwitching) {
$("#reload-notice").hidden = false;
$("#waiting-notice").hidden = true;
} else {
// Otherwise, we are loading a new top level document,
// so we don't need to reload anymore and should receive
// new node events.
$("#reload-notice").hidden = true;
$("#waiting-notice").hidden = false;
}
// Clear out stored audio nodes
gAudioNodes.reset();
window.emit(EVENTS.UI_RESET);
break;
}
case "navigate": {
// TODO Case of bfcache, needs investigating
// bug 994250
break;
}
}
}),
/**
* Called after the first audio node is created in an audio context,
* signaling that the audio context is being used.
*/
_onStartContext: function() {
$("#reload-notice").hidden = true;
$("#waiting-notice").hidden = true;
$("#content").hidden = false;
window.emit(EVENTS.START_CONTEXT);
},
/**
* Called when a new node is created. Creates an `AudioNodeView` instance
* for tracking throughout the editor.
*/
_onCreateNode: Task.async(function* (nodeActor) {
yield gAudioNodes.add(nodeActor);
}),
/**
* Called on `destroy-node` when an AudioNode is GC'd. Removes
* from the AudioNode array and fires an event indicating the removal.
*/
_onDestroyNode: function (nodeActor) {
gAudioNodes.remove(gAudioNodes.get(nodeActor.actorID));
},
/**
* Called when a node is connected to another node.
*/
_onConnectNode: Task.async(function* ({ source: sourceActor, dest: destActor }) {
let [source, dest] = yield WebAudioEditorController._waitForNodeCreation(sourceActor, destActor);
source.connect(dest);
}),
/**
* Called when a node is conneceted to another node's AudioParam.
*/
_onConnectParam: Task.async(function* ({ source: sourceActor, dest: destActor, param }) {
let [source, dest] = yield WebAudioEditorController._waitForNodeCreation(sourceActor, destActor);
source.connect(dest, param);
}),
/**
* Called when a node is disconnected.
*/
_onDisconnectNode: function(nodeActor) {
let node = gAudioNodes.get(nodeActor.actorID);
node.disconnect();
},
/**
* Called when a node param is changed.
*/
_onChangeParam: function({ actor, param, value }) {
window.emit(EVENTS.CHANGE_PARAM, gAudioNodes.get(actor.actorID), param, value);
}
};

View File

@ -0,0 +1,98 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource:///modules/devtools/ViewHelpers.jsm");
Cu.import("resource:///modules/devtools/gDevTools.jsm");
const require = Cu.import("resource://gre/modules/devtools/Loader.jsm", {}).devtools.require;
let { console } = Cu.import("resource://gre/modules/devtools/Console.jsm", {});
let { EventTarget } = require("sdk/event/target");
const { Task } = Cu.import("resource://gre/modules/Task.jsm", {});
const { Class } = require("sdk/core/heritage");
const EventEmitter = require("devtools/toolkit/event-emitter");
const STRINGS_URI = "chrome://browser/locale/devtools/webaudioeditor.properties"
const L10N = new ViewHelpers.L10N(STRINGS_URI);
const Telemetry = require("devtools/shared/telemetry");
const telemetry = new Telemetry();
// Override DOM promises with Promise.jsm helpers
const { defer, all } = Cu.import("resource://gre/modules/Promise.jsm", {}).Promise;
/* Events fired on `window` to indicate state or actions*/
const EVENTS = {
// Fired when the first AudioNode has been created, signifying
// that the AudioContext is being used and should be tracked via the editor.
START_CONTEXT: "WebAudioEditor:StartContext",
// When the devtools theme changes.
THEME_CHANGE: "WebAudioEditor:ThemeChange",
// When the UI is reset from tab navigation.
UI_RESET: "WebAudioEditor:UIReset",
// When a param has been changed via the UI and successfully
// pushed via the actor to the raw audio node.
UI_SET_PARAM: "WebAudioEditor:UISetParam",
// When a node is to be set in the InspectorView.
UI_SELECT_NODE: "WebAudioEditor:UISelectNode",
// When the inspector is finished setting a new node.
UI_INSPECTOR_NODE_SET: "WebAudioEditor:UIInspectorNodeSet",
// When the inspector is finished rendering in or out of view.
UI_INSPECTOR_TOGGLED: "WebAudioEditor:UIInspectorToggled",
// When an audio node is finished loading in the Properties tab.
UI_PROPERTIES_TAB_RENDERED: "WebAudioEditor:UIPropertiesTabRendered",
// When the Audio Context graph finishes rendering.
// Is called with two arguments, first representing number of nodes
// rendered, second being the number of edge connections rendering (not counting
// param edges), followed by the count of the param edges rendered.
UI_GRAPH_RENDERED: "WebAudioEditor:UIGraphRendered"
};
/**
* The current target and the Web Audio Editor front, set by this tool's host.
*/
let gToolbox, gTarget, gFront;
/**
* Convenient way of emitting events from the panel window.
*/
EventEmitter.decorate(this);
/**
* DOM query helper.
*/
function $(selector, target = document) { return target.querySelector(selector); }
function $$(selector, target = document) { return target.querySelectorAll(selector); }
/**
* Takes an iterable collection, and a hash. Return the first
* object in the collection that matches the values in the hash.
* From Backbone.Collection#findWhere
* http://backbonejs.org/#Collection-findWhere
*/
function findWhere (collection, attrs) {
let keys = Object.keys(attrs);
for (let model of collection) {
if (keys.every(key => model[key] === attrs[key])) {
return model;
}
}
return void 0;
}
function mixin (source, ...args) {
args.forEach(obj => Object.keys(obj).forEach(prop => source[prop] = obj[prop]));
return source;
}

View File

@ -0,0 +1,274 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
// Import as different name `coreEmit`, so we don't conflict
// with the global `window` listener itself.
const { emit: coreEmit } = require("sdk/event/core");
/**
* Representational wrapper around AudioNodeActors. Adding and destroying
* AudioNodes should be performed through the AudioNodes collection.
*
* Events:
* - `connect`: node, destinationNode, parameter
* - `disconnect`: node
*/
const AudioNodeModel = Class({
extends: EventTarget,
// Will be added via AudioNodes `add`
collection: null,
initialize: function (actor) {
this.actor = actor;
this.id = actor.actorID;
this.connections = [];
},
/**
* After instantiating the AudioNodeModel, calling `setup` caches values
* from the actor onto the model. In this case, only the type of audio node.
*
* @return promise
*/
setup: Task.async(function* () {
yield this.getType();
}),
/**
* A proxy for the underlying AudioNodeActor to fetch its type
* and subsequently assign the type to the instance.
*
* @return Promise->String
*/
getType: Task.async(function* () {
this.type = yield this.actor.getType();
return this.type;
}),
/**
* Stores connection data inside this instance of this audio node connecting
* to another node (destination). If connecting to another node's AudioParam,
* the second argument (param) must be populated with a string.
*
* Connecting nodes is idempotent. Upon new connection, emits "connect" event.
*
* @param AudioNodeModel destination
* @param String param
*/
connect: function (destination, param) {
let edge = findWhere(this.connections, { destination: destination.id, param: param });
if (!edge) {
this.connections.push({ source: this.id, destination: destination.id, param: param });
coreEmit(this, "connect", this, destination, param);
}
},
/**
* Clears out all internal connection data. Emits "disconnect" event.
*/
disconnect: function () {
this.connections.length = 0;
coreEmit(this, "disconnect", this);
},
/**
* Returns a promise that resolves to an array of objects containing
* both a `param` name property and a `value` property.
*
* @return Promise->Object
*/
getParams: function () {
return this.actor.getParams();
},
/**
* Takes a `dagreD3.Digraph` object and adds this node to
* the graph to be rendered.
*
* @param dagreD3.Digraph
*/
addToGraph: function (graph) {
graph.addNode(this.id, {
type: this.type,
label: this.type.replace(/Node$/, ""),
id: this.id
});
},
/**
* Takes a `dagreD3.Digraph` object and adds edges to
* the graph to be rendered. Separate from `addToGraph`,
* as while we depend on D3/Dagre's constraints, we cannot
* add edges for nodes that have not yet been added to the graph.
*
* @param dagreD3.Digraph
*/
addEdgesToGraph: function (graph) {
for (let edge of this.connections) {
let options = {
source: this.id,
target: edge.destination
};
// Only add `label` if `param` specified, as this is an AudioParam
// connection then. `label` adds the magic to render with dagre-d3,
// and `param` is just more explicitly the param, ignoring
// implementation details.
if (edge.param) {
options.label = options.param = edge.param;
}
graph.addEdge(null, this.id, edge.destination, options);
}
}
});
/**
* Constructor for a Collection of `AudioNodeModel` models.
*
* Events:
* - `add`: node
* - `remove`: node
* - `connect`: node, destinationNode, parameter
* - `disconnect`: node
*/
const AudioNodesCollection = Class({
extends: EventTarget,
model: AudioNodeModel,
initialize: function () {
this.models = new Set();
this._onModelEvent = this._onModelEvent.bind(this);
},
/**
* Iterates over all models within the collection, calling `fn` with the
* model as the first argument.
*
* @param Function fn
*/
forEach: function (fn) {
this.models.forEach(fn);
},
/**
* Creates a new AudioNodeModel, passing through arguments into the AudioNodeModel
* constructor, and adds the model to the internal collection store of this
* instance.
*
* Also calls `setup` on the model itself, and sets up event piping, so that
* events emitted on each model propagate to the collection itself.
*
* Emits "add" event on instance when completed.
*
* @param Object obj
* @return Promise->AudioNodeModel
*/
add: Task.async(function* (obj) {
let node = new this.model(obj);
node.collection = this;
yield node.setup();
this.models.add(node);
node.on("*", this._onModelEvent);
coreEmit(this, "add", node);
return node;
}),
/**
* Removes an AudioNodeModel from the internal collection. Calls `delete` method
* on the model, and emits "remove" on this instance.
*
* @param AudioNodeModel node
*/
remove: function (node) {
this.models.delete(node);
coreEmit(this, "remove", node);
},
/**
* Empties out the internal collection of all AudioNodeModels.
*/
reset: function () {
this.models.clear();
},
/**
* Takes an `id` from an AudioNodeModel and returns the corresponding
* AudioNodeModel within the collection that matches that id. Returns `null`
* if not found.
*
* @param Number id
* @return AudioNodeModel|null
*/
get: function (id) {
return findWhere(this.models, { id: id });
},
/**
* Returns the count for how many models are a part of this collection.
*
* @return Number
*/
get length() {
return this.models.size;
},
/**
* Returns detailed information about the collection. used during tests
* to query state. Returns an object with information on node count,
* how many edges are within the data graph, as well as how many of those edges
* are for AudioParams.
*
* @return Object
*/
getInfo: function () {
let info = {
nodes: this.length,
edges: 0,
paramEdges: 0
};
this.models.forEach(node => {
let paramEdgeCount = node.connections.filter(edge => edge.param).length;
info.edges += node.connections.length - paramEdgeCount;
info.paramEdges += paramEdgeCount;
});
return info;
},
/**
* Adds all nodes within the collection to the passed in graph,
* as well as their corresponding edges.
*
* @param dagreD3.Digraph
*/
populateGraph: function (graph) {
this.models.forEach(node => node.addToGraph(graph));
this.models.forEach(node => node.addEdgesToGraph(graph));
},
/**
* Called when a stored model emits any event. Used to manage
* event propagation, or listening to model events to react, like
* removing a model from the collection when it's destroyed.
*/
_onModelEvent: function (eventName, node, ...args) {
if (eventName === "remove") {
// If a `remove` event from the model, remove it
// from the collection, and let the method handle the emitting on
// the collection
this.remove(node);
} else {
// Pipe the event to the collection
coreEmit(this, eventName, [node].concat(args));
}
}
});

View File

@ -35,6 +35,7 @@ WebAudioEditorPanel.prototype = {
.then(() => {
this.panelWin.gToolbox = this._toolbox;
this.panelWin.gTarget = this.target;
this.panelWin.gFront = new WebAudioFront(this.target.client, this.target.form);
return this.panelWin.startupWebAudioEditor();
})

View File

@ -8,6 +8,7 @@ support-files =
doc_media-node-creation.html
doc_destroy-nodes.html
doc_connect-toggle.html
doc_connect-toggle-param.html
doc_connect-param.html
doc_connect-multi-param.html
doc_iframe-context.html
@ -20,9 +21,10 @@ support-files =
[browser_audionode-actor-get-set-param.js]
[browser_audionode-actor-get-type.js]
[browser_audionode-actor-is-source.js]
[browser_webaudio-actor-connect-param.js]
[browser_webaudio-actor-destroy-node.js]
[browser_audionode-actor-bypass.js]
[browser_webaudio-actor-simple.js]
[browser_webaudio-actor-destroy-node.js]
[browser_webaudio-actor-connect-param.js]
[browser_wa_destroy-node-01.js]
@ -39,6 +41,7 @@ support-files =
[browser_wa_graph-render-02.js]
[browser_wa_graph-render-03.js]
[browser_wa_graph-render-04.js]
[browser_wa_graph-render-05.js]
[browser_wa_graph-selected.js]
[browser_wa_graph-zoom.js]

View File

@ -0,0 +1,29 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/**
* Test AudioNode#bypass(), AudioNode#isBypassed()
*/
function spawnTest () {
let [target, debuggee, front] = yield initBackend(SIMPLE_CONTEXT_URL);
let [_, [destNode, oscNode, gainNode]] = yield Promise.all([
front.setup({ reload: true }),
get3(front, "create-node")
]);
is((yield gainNode.isBypassed()), false, "Nodes start off unbypassed.");
info("Calling node#bypass(true)");
yield gainNode.bypass(true);
is((yield gainNode.isBypassed()), true, "Node is now bypassed.");
info("Calling node#bypass(false)");
yield gainNode.bypass(false);
is((yield gainNode.isBypassed()), false, "Node back to being unbypassed.");
yield removeTab(target.tab);
finish();
}

View File

@ -12,24 +12,24 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(DESTROY_NODES_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS } = panelWin;
let { gFront, $, $$, gAudioNodes } = panelWin;
let started = once(gFront, "start-context");
reload(target);
let destroyed = getN(panelWin, EVENTS.DESTROY_NODE, 10);
let destroyed = getN(gAudioNodes, "remove", 10);
forceCC();
let [created] = yield Promise.all([
getNSpread(panelWin, EVENTS.CREATE_NODE, 13),
getNSpread(gAudioNodes, "add", 13),
waitForGraphRendered(panelWin, 13, 2)
]);
// Since CREATE_NODE emits several arguments (eventName and actorID), let's
// flatten it to just the actorIDs
let actorIDs = created.map(ev => ev[1]);
// Flatten arrays of event arguments and take the first (AudioNodeModel)
// and get its ID.
let actorIDs = created.map(ev => ev[0].id);
// Click a soon-to-be dead buffer node
yield clickGraphNode(panelWin, actorIDs[5]);
@ -40,7 +40,7 @@ function spawnTest() {
yield Promise.all([destroyed, waitForGraphRendered(panelWin, 3, 2)]);
// Test internal storage
is(panelWin.AudioNodes.length, 3, "All nodes should be GC'd except one gain, osc and dest node.");
is(panelWin.gAudioNodes.length, 3, "All nodes should be GC'd except one gain, osc and dest node.");
// Test graph rendering
ok(findGraphNode(panelWin, actorIDs[0]), "dest should be in graph");

View File

@ -6,13 +6,10 @@
* the correct node in the InspectorView
*/
let EVENTS = null;
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(COMPLEX_CONTEXT_URL);
let panelWin = panel.panelWin;
let { gFront, $, $$, WebAudioInspectorView } = panelWin;
EVENTS = panelWin.EVENTS;
let { gFront, $, $$, InspectorView } = panelWin;
let started = once(gFront, "start-context");
@ -25,28 +22,28 @@ function spawnTest() {
let nodeIds = actors.map(actor => actor.actorID);
ok(!WebAudioInspectorView.isVisible(), "InspectorView hidden on start.");
ok(!InspectorView.isVisible(), "InspectorView hidden on start.");
yield clickGraphNode(panelWin, nodeIds[1], true);
ok(WebAudioInspectorView.isVisible(), "InspectorView visible after selecting a node.");
is(WebAudioInspectorView.getCurrentAudioNode().id, nodeIds[1], "InspectorView has correct node set.");
ok(InspectorView.isVisible(), "InspectorView visible after selecting a node.");
is(InspectorView.getCurrentAudioNode().id, nodeIds[1], "InspectorView has correct node set.");
yield clickGraphNode(panelWin, nodeIds[2]);
ok(WebAudioInspectorView.isVisible(), "InspectorView still visible after selecting another node.");
is(WebAudioInspectorView.getCurrentAudioNode().id, nodeIds[2], "InspectorView has correct node set on second node.");
ok(InspectorView.isVisible(), "InspectorView still visible after selecting another node.");
is(InspectorView.getCurrentAudioNode().id, nodeIds[2], "InspectorView has correct node set on second node.");
yield clickGraphNode(panelWin, nodeIds[2]);
is(WebAudioInspectorView.getCurrentAudioNode().id, nodeIds[2], "Clicking the same node again works (idempotent).");
is(InspectorView.getCurrentAudioNode().id, nodeIds[2], "Clicking the same node again works (idempotent).");
yield clickGraphNode(panelWin, $("rect", findGraphNode(panelWin, nodeIds[3])));
is(WebAudioInspectorView.getCurrentAudioNode().id, nodeIds[3], "Clicking on a <rect> works as expected.");
is(InspectorView.getCurrentAudioNode().id, nodeIds[3], "Clicking on a <rect> works as expected.");
yield clickGraphNode(panelWin, $("tspan", findGraphNode(panelWin, nodeIds[4])));
is(WebAudioInspectorView.getCurrentAudioNode().id, nodeIds[4], "Clicking on a <tspan> works as expected.");
is(InspectorView.getCurrentAudioNode().id, nodeIds[4], "Clicking on a <tspan> works as expected.");
ok(WebAudioInspectorView.isVisible(),
ok(InspectorView.isVisible(),
"InspectorView still visible after several nodes have been clicked.");
yield teardown(panel);

View File

@ -8,7 +8,7 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, MARKER_STYLING } = panelWin;
let { gFront, $, $$, MARKER_STYLING } = panelWin;
let currentTheme = Services.prefs.getCharPref("devtools.theme");

View File

@ -10,13 +10,13 @@ let connectCount = 0;
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS } = panelWin;
let { gFront, $, $$, EVENTS, gAudioNodes } = panelWin;
let started = once(gFront, "start-context");
reload(target);
panelWin.on(EVENTS.CONNECT_NODE, onConnectNode);
gAudioNodes.on("connect", onConnectNode);
let [actors] = yield Promise.all([
get3(gFront, "create-node"),
@ -35,7 +35,7 @@ function spawnTest() {
is(connectCount, 2, "Only two node connect events should be fired.");
panelWin.off(EVENTS.CONNECT_NODE, onConnectNode);
gAudioNodes.off("connect", onConnectNode);
yield teardown(panel);
finish();

View File

@ -8,7 +8,7 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(COMPLEX_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS } = panelWin;
let { gFront, $, $$ } = panelWin;
let started = once(gFront, "start-context");

View File

@ -0,0 +1,27 @@
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
/**
* Tests to ensure that param connections trigger graph redraws
*/
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(CONNECT_TOGGLE_PARAM_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS } = panelWin;
reload(target);
let [actors] = yield Promise.all([
getN(gFront, "create-node", 3),
waitForGraphRendered(panelWin, 3, 1, 0)
]);
ok(true, "Graph rendered without param connection");
yield waitForGraphRendered(panelWin, 3, 1, 1);
ok(true, "Graph re-rendered upon param connection");
yield teardown(panel);
finish();
}

View File

@ -8,7 +8,7 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioGraphView } = panelWin;
let { gFront, $, $$, EVENTS, ContextView } = panelWin;
let started = once(gFront, "start-context");
@ -17,27 +17,27 @@ function spawnTest() {
waitForGraphRendered(panelWin, 3, 2)
]);
is(WebAudioGraphView.getCurrentScale(), 1, "Default graph scale is 1.");
is(WebAudioGraphView.getCurrentTranslation()[0], 20, "Default x-translation is 20.");
is(WebAudioGraphView.getCurrentTranslation()[1], 20, "Default y-translation is 20.");
is(ContextView.getCurrentScale(), 1, "Default graph scale is 1.");
is(ContextView.getCurrentTranslation()[0], 20, "Default x-translation is 20.");
is(ContextView.getCurrentTranslation()[1], 20, "Default y-translation is 20.");
// Change both attribute and D3's internal store
panelWin.d3.select("#graph-target").attr("transform", "translate([100, 400]) scale(10)");
WebAudioGraphView._zoomBinding.scale(10);
WebAudioGraphView._zoomBinding.translate([100, 400]);
ContextView._zoomBinding.scale(10);
ContextView._zoomBinding.translate([100, 400]);
is(WebAudioGraphView.getCurrentScale(), 10, "After zoom, scale is 10.");
is(WebAudioGraphView.getCurrentTranslation()[0], 100, "After zoom, x-translation is 100.");
is(WebAudioGraphView.getCurrentTranslation()[1], 400, "After zoom, y-translation is 400.");
is(ContextView.getCurrentScale(), 10, "After zoom, scale is 10.");
is(ContextView.getCurrentTranslation()[0], 100, "After zoom, x-translation is 100.");
is(ContextView.getCurrentTranslation()[1], 400, "After zoom, y-translation is 400.");
yield Promise.all([
reload(target),
waitForGraphRendered(panelWin, 3, 2)
]);
is(WebAudioGraphView.getCurrentScale(), 1, "After refresh, graph scale is 1.");
is(WebAudioGraphView.getCurrentTranslation()[0], 20, "After refresh, x-translation is 20.");
is(WebAudioGraphView.getCurrentTranslation()[1], 20, "After refresh, y-translation is 20.");
is(ContextView.getCurrentScale(), 1, "After refresh, graph scale is 1.");
is(ContextView.getCurrentTranslation()[0], 20, "After refresh, x-translation is 20.");
is(ContextView.getCurrentTranslation()[1], 20, "After refresh, y-translation is 20.");
yield teardown(panel);
finish();

View File

@ -9,8 +9,8 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
let started = once(gFront, "start-context");
@ -22,13 +22,13 @@ function spawnTest() {
]);
let nodeIds = actors.map(actor => actor.actorID);
ok(!WebAudioInspectorView.isVisible(), "InspectorView hidden on start.");
ok(!InspectorView.isVisible(), "InspectorView hidden on start.");
// Open inspector pane
$("#inspector-pane-toggle").click();
yield once(panelWin, EVENTS.UI_INSPECTOR_TOGGLED);
ok(WebAudioInspectorView.isVisible(), "InspectorView shown after toggling.");
ok(InspectorView.isVisible(), "InspectorView shown after toggling.");
ok(isVisible($("#web-audio-editor-details-pane-empty")),
"InspectorView empty message should still be visible.");
@ -41,13 +41,13 @@ function spawnTest() {
$("#inspector-pane-toggle").click();
yield once(panelWin, EVENTS.UI_INSPECTOR_TOGGLED);
ok(!WebAudioInspectorView.isVisible(), "InspectorView back to being hidden.");
ok(!InspectorView.isVisible(), "InspectorView back to being hidden.");
// Open again to test node loading while open
$("#inspector-pane-toggle").click();
yield once(panelWin, EVENTS.UI_INSPECTOR_TOGGLED);
ok(WebAudioInspectorView.isVisible(), "InspectorView being shown.");
ok(InspectorView.isVisible(), "InspectorView being shown.");
ok(!isVisible($("#web-audio-editor-tabs")),
"InspectorView tabs are still hidden.");

View File

@ -9,8 +9,8 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
let started = once(gFront, "start-context");
@ -22,7 +22,7 @@ function spawnTest() {
]);
let nodeIds = actors.map(actor => actor.actorID);
ok(!WebAudioInspectorView.isVisible(), "InspectorView hidden on start.");
ok(!InspectorView.isVisible(), "InspectorView hidden on start.");
ok(isVisible($("#web-audio-editor-details-pane-empty")),
"InspectorView empty message should show when no node's selected.");
ok(!isVisible($("#web-audio-editor-tabs")),
@ -37,7 +37,7 @@ function spawnTest() {
once(panelWin, EVENTS.UI_INSPECTOR_TOGGLED)
]);
ok(WebAudioInspectorView.isVisible(), "InspectorView shown once node selected.");
ok(InspectorView.isVisible(), "InspectorView shown once node selected.");
ok(!isVisible($("#web-audio-editor-details-pane-empty")),
"InspectorView empty message hidden when node selected.");
ok(isVisible($("#web-audio-editor-tabs")),

View File

@ -8,8 +8,8 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
let started = once(gFront, "start-context");

View File

@ -8,8 +8,8 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(COMPLEX_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
let started = once(gFront, "start-context");

View File

@ -37,8 +37,8 @@ function waitForDeviceClosed() {
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(MEDIA_NODES_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
// Auto enable getUserMedia
let mediaPermissionPref = Services.prefs.getBoolPref(MEDIA_PERMISSION);

View File

@ -9,8 +9,8 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(BUFFER_AND_ARRAY_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
let started = once(gFront, "start-context");

View File

@ -9,8 +9,8 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_NODES_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
let started = once(gFront, "start-context");

View File

@ -8,8 +8,8 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, $$, EVENTS, WebAudioInspectorView } = panelWin;
let gVars = WebAudioInspectorView._propsView;
let { gFront, $, $$, EVENTS, InspectorView } = panelWin;
let gVars = InspectorView._propsView;
let started = once(gFront, "start-context");

View File

@ -9,7 +9,7 @@
function spawnTest() {
let [target, debuggee, panel] = yield initWebAudioEditor(SIMPLE_CONTEXT_URL);
let { panelWin } = panel;
let { gFront, $, WebAudioInspectorView } = panelWin;
let { gFront, $, InspectorView } = panelWin;
reload(target);
@ -20,8 +20,8 @@ function spawnTest() {
let nodeIds = actors.map(actor => actor.actorID);
yield clickGraphNode(panelWin, nodeIds[1], true);
ok(WebAudioInspectorView.isVisible(), "InspectorView visible after selecting a node.");
is(WebAudioInspectorView.getCurrentAudioNode().id, nodeIds[1], "InspectorView has correct node set.");
ok(InspectorView.isVisible(), "InspectorView visible after selecting a node.");
is(InspectorView.getCurrentAudioNode().id, nodeIds[1], "InspectorView has correct node set.");
/**
* Reload
@ -35,14 +35,14 @@ function spawnTest() {
]);
nodeIds = actors.map(actor => actor.actorID);
ok(!WebAudioInspectorView.isVisible(), "InspectorView hidden on start.");
ise(WebAudioInspectorView.getCurrentAudioNode(), null,
ok(!InspectorView.isVisible(), "InspectorView hidden on start.");
ise(InspectorView.getCurrentAudioNode(), null,
"InspectorView has no current node set on reset.");
yield clickGraphNode(panelWin, nodeIds[2], true);
ok(WebAudioInspectorView.isVisible(),
ok(InspectorView.isVisible(),
"InspectorView visible after selecting a node after a reset.");
is(WebAudioInspectorView.getCurrentAudioNode().id, nodeIds[2], "InspectorView has correct node set upon clicking graph node after a reset.");
is(InspectorView.getCurrentAudioNode().id, nodeIds[2], "InspectorView has correct node set upon clicking graph node after a reset.");
yield teardown(panel);
finish();

View File

@ -0,0 +1,27 @@
<!-- Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ -->
<!doctype html>
<html>
<head>
<meta charset="utf-8"/>
<title>Web Audio Editor test page</title>
</head>
<body>
<script type="text/javascript;version=1.8">
"use strict";
let i = 0;
let ctx = new AudioContext();
let osc = ctx.createOscillator();
let gain = ctx.createGain();
gain.gain.value = 0;
gain.connect(ctx.destination);
osc.start(0);
setTimeout(() => osc.connect(gain.gain), 500);
</script>
</body>
</html>

View File

@ -28,6 +28,7 @@ const MEDIA_NODES_URL = EXAMPLE_URL + "doc_media-node-creation.html";
const BUFFER_AND_ARRAY_URL = EXAMPLE_URL + "doc_buffer-and-array.html";
const DESTROY_NODES_URL = EXAMPLE_URL + "doc_destroy-nodes.html";
const CONNECT_TOGGLE_URL = EXAMPLE_URL + "doc_connect-toggle.html";
const CONNECT_TOGGLE_PARAM_URL = EXAMPLE_URL + "doc_connect-toggle-param.html";
const CONNECT_PARAM_URL = EXAMPLE_URL + "doc_connect-param.html";
const CONNECT_MULTI_PARAM_URL = EXAMPLE_URL + "doc_connect-multi-param.html";
const IFRAME_CONTEXT_URL = EXAMPLE_URL + "doc_iframe-context.html";
@ -37,7 +38,10 @@ waitForExplicitFinish();
let gToolEnabled = Services.prefs.getBoolPref("devtools.webaudioeditor.enabled");
gDevTools.testing = true;
registerCleanupFunction(() => {
gDevTools.testing = false;
info("finish() was called, cleaning up...");
Services.prefs.setBoolPref("devtools.debugger.log", gEnableLogging);
Services.prefs.setBoolPref("devtools.webaudioeditor.enabled", gToolEnabled);
@ -215,10 +219,7 @@ function waitForGraphRendered (front, nodeCount, edgeCount, paramEdgeCount) {
let deferred = Promise.defer();
let eventName = front.EVENTS.UI_GRAPH_RENDERED;
front.on(eventName, function onGraphRendered (_, nodes, edges, pEdges) {
info(nodes);
info(edges)
info(pEdges);
let paramEdgesDone = paramEdgeCount ? paramEdgeCount === pEdges : true;
let paramEdgesDone = paramEdgeCount != null ? paramEdgeCount === pEdges : true;
if (nodes === nodeCount && edges === edgeCount && paramEdgesDone) {
front.off(eventName, onGraphRendered);
deferred.resolve();

View File

@ -0,0 +1,305 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const { debounce } = require("sdk/lang/functional");
// Globals for d3 stuff
// Default properties of the graph on rerender
const GRAPH_DEFAULTS = {
translate: [20, 20],
scale: 1
};
// Sizes of SVG arrows in graph
const ARROW_HEIGHT = 5;
const ARROW_WIDTH = 8;
// Styles for markers as they cannot be done with CSS.
const MARKER_STYLING = {
light: "#AAA",
dark: "#CED3D9"
};
const GRAPH_DEBOUNCE_TIMER = 100;
// `gAudioNodes` events that should require the graph
// to redraw
const GRAPH_REDRAW_EVENTS = ["add", "connect", "disconnect", "remove"];
/**
* Functions handling the graph UI.
*/
let ContextView = {
/**
* Initialization function, called when the tool is started.
*/
initialize: function() {
this._onGraphNodeClick = this._onGraphNodeClick.bind(this);
this._onThemeChange = this._onThemeChange.bind(this);
this._onNodeSelect = this._onNodeSelect.bind(this);
this._onStartContext = this._onStartContext.bind(this);
this._onEvent = this._onEvent.bind(this);
this.draw = debounce(this.draw.bind(this), GRAPH_DEBOUNCE_TIMER);
$('#graph-target').addEventListener('click', this._onGraphNodeClick, false);
window.on(EVENTS.THEME_CHANGE, this._onThemeChange);
window.on(EVENTS.UI_INSPECTOR_NODE_SET, this._onNodeSelect);
window.on(EVENTS.START_CONTEXT, this._onStartContext);
gAudioNodes.on("*", this._onEvent);
},
/**
* Destruction function, called when the tool is closed.
*/
destroy: function() {
// If the graph was rendered at all, then the handler
// for zooming in will be set. We must remove it to prevent leaks.
if (this._zoomBinding) {
this._zoomBinding.on("zoom", null);
}
$('#graph-target').removeEventListener('click', this._onGraphNodeClick, false);
window.off(EVENTS.THEME_CHANGE, this._onThemeChange);
window.off(EVENTS.UI_INSPECTOR_NODE_SET, this._onNodeSelect);
window.off(EVENTS.START_CONTEXT, this._onStartContext);
gAudioNodes.off("*", this._onEvent);
},
/**
* Called when a page is reloaded and waiting for a "start-context" event
* and clears out old content
*/
resetUI: function () {
this.clearGraph();
this.resetGraphTransform();
},
/**
* Clears out the rendered graph, called when resetting the SVG elements to draw again,
* or when resetting the entire UI tool
*/
clearGraph: function () {
$("#graph-target").innerHTML = "";
},
/**
* Moves the graph back to its original scale and translation.
*/
resetGraphTransform: function () {
// Only reset if the graph was ever drawn.
if (this._zoomBinding) {
let { translate, scale } = GRAPH_DEFAULTS;
// Must set the `zoomBinding` so the next `zoom` event is in sync with
// where the graph is visually (set by the `transform` attribute).
this._zoomBinding.scale(scale);
this._zoomBinding.translate(translate);
d3.select("#graph-target")
.attr("transform", "translate(" + translate + ") scale(" + scale + ")");
}
},
getCurrentScale: function () {
return this._zoomBinding ? this._zoomBinding.scale() : null;
},
getCurrentTranslation: function () {
return this._zoomBinding ? this._zoomBinding.translate() : null;
},
/**
* Makes the corresponding graph node appear "focused", removing
* focused styles from all other nodes. If no `actorID` specified,
* make all nodes appear unselected.
* Called from UI_INSPECTOR_NODE_SELECT.
*/
focusNode: function (actorID) {
// Remove class "selected" from all nodes
Array.forEach($$(".nodes > g"), $node => $node.classList.remove("selected"));
// Add to "selected"
if (actorID) {
this._getNodeByID(actorID).classList.add("selected");
}
},
/**
* Takes an actorID and returns the corresponding DOM SVG element in the graph
*/
_getNodeByID: function (actorID) {
return $(".nodes > g[data-id='" + actorID + "']");
},
/**
* This method renders the nodes currently available in `gAudioNodes` and is
* throttled to be called at most every `GRAPH_DEBOUNCE_TIMER` milliseconds.
* It's called whenever the audio context routing changes, after being debounced.
*/
draw: function () {
// Clear out previous SVG information
this.clearGraph();
let graph = new dagreD3.Digraph();
let renderer = new dagreD3.Renderer();
gAudioNodes.populateGraph(graph);
// Post-render manipulation of the nodes
let oldDrawNodes = renderer.drawNodes();
renderer.drawNodes(function(graph, root) {
let svgNodes = oldDrawNodes(graph, root);
svgNodes.attr("class", (n) => {
let node = graph.node(n);
return "audionode type-" + node.type;
});
svgNodes.attr("data-id", (n) => {
let node = graph.node(n);
return node.id;
});
return svgNodes;
});
// Post-render manipulation of edges
// TODO do all of this more efficiently, rather than
// using the direct D3 helper utilities to loop over each
// edge several times
let oldDrawEdgePaths = renderer.drawEdgePaths();
renderer.drawEdgePaths(function(graph, root) {
let svgEdges = oldDrawEdgePaths(graph, root);
svgEdges.attr("data-source", (n) => {
let edge = graph.edge(n);
return edge.source;
});
svgEdges.attr("data-target", (n) => {
let edge = graph.edge(n);
return edge.target;
});
svgEdges.attr("data-param", (n) => {
let edge = graph.edge(n);
return edge.param ? edge.param : null;
});
// We have to manually specify the default classes on the edges
// as to not overwrite them
let defaultClasses = "edgePath enter";
svgEdges.attr("class", (n) => {
let edge = graph.edge(n);
return defaultClasses + (edge.param ? (" param-connection " + edge.param) : "");
});
return svgEdges;
});
// Override Dagre-d3's post render function by passing in our own.
// This way we can leave styles out of it.
renderer.postRender((graph, root) => {
// We have to manually set the marker styling since we cannot
// do this currently with CSS, although it is in spec for SVG2
// https://svgwg.org/svg2-draft/painting.html#VertexMarkerProperties
// For now, manually set it on creation, and the `_onThemeChange`
// function will fire when the devtools theme changes to update the
// styling manually.
let theme = Services.prefs.getCharPref("devtools.theme");
let markerColor = MARKER_STYLING[theme];
if (graph.isDirected() && root.select("#arrowhead").empty()) {
root
.append("svg:defs")
.append("svg:marker")
.attr("id", "arrowhead")
.attr("viewBox", "0 0 10 10")
.attr("refX", ARROW_WIDTH)
.attr("refY", ARROW_HEIGHT)
.attr("markerUnits", "strokewidth")
.attr("markerWidth", ARROW_WIDTH)
.attr("markerHeight", ARROW_HEIGHT)
.attr("orient", "auto")
.attr("style", "fill: " + markerColor)
.append("svg:path")
.attr("d", "M 0 0 L 10 5 L 0 10 z");
}
// Reselect the previously selected audio node
let currentNode = InspectorView.getCurrentAudioNode();
if (currentNode) {
this.focusNode(currentNode.id);
}
// Fire an event upon completed rendering, with extra information
// if in testing mode only.
let info = {};
if (gDevTools.testing) {
info = gAudioNodes.getInfo();
}
window.emit(EVENTS.UI_GRAPH_RENDERED, info.nodes, info.edges, info.paramEdges);
});
let layout = dagreD3.layout().rankDir("LR");
renderer.layout(layout).run(graph, d3.select("#graph-target"));
// Handle the sliding and zooming of the graph,
// store as `this._zoomBinding` so we can unbind during destruction
if (!this._zoomBinding) {
this._zoomBinding = d3.behavior.zoom().on("zoom", function () {
var ev = d3.event;
d3.select("#graph-target")
.attr("transform", "translate(" + ev.translate + ") scale(" + ev.scale + ")");
});
d3.select("svg").call(this._zoomBinding);
// Set initial translation and scale -- this puts D3's awareness of
// the graph in sync with what the user sees originally.
this.resetGraphTransform();
}
},
/**
* Event handlers
*/
/**
* Called once "start-context" is fired, indicating that there is an audio
* context being created to view so render the graph.
*/
_onStartContext: function () {
this.draw();
},
/**
* Called when `gAudioNodes` fires an event -- most events (listed
* in GRAPH_REDRAW_EVENTS) qualify as a redraw event.
*/
_onEvent: function (eventName, ...args) {
if (~GRAPH_REDRAW_EVENTS.indexOf(eventName)) {
this.draw();
}
},
_onNodeSelect: function (eventName, id) {
this.focusNode(id);
},
/**
* Fired when the devtools theme changes.
*/
_onThemeChange: function (eventName, theme) {
let markerColor = MARKER_STYLING[theme];
let marker = $("#arrowhead");
if (marker) {
marker.setAttribute("style", "fill: " + markerColor);
}
},
/**
* Fired when a node in the svg graph is clicked. Used to handle triggering the AudioNodePane.
*
* @param Event e
* Click event.
*/
_onGraphNodeClick: function (e) {
let node = findGraphNodeParent(e.target);
// If node not found (clicking outside of an audio node in the graph),
// then ignore this event
if (!node)
return;
window.emit(EVENTS.UI_SELECT_NODE, node.getAttribute("data-id"));
}
};

View File

@ -0,0 +1,240 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
Cu.import("resource:///modules/devtools/VariablesView.jsm");
Cu.import("resource:///modules/devtools/VariablesViewController.jsm");
// Strings for rendering
const EXPAND_INSPECTOR_STRING = L10N.getStr("expandInspector");
const COLLAPSE_INSPECTOR_STRING = L10N.getStr("collapseInspector");
// Store width as a preference rather than hardcode
// TODO bug 1009056
const INSPECTOR_WIDTH = 300;
const GENERIC_VARIABLES_VIEW_SETTINGS = {
searchEnabled: false,
editableValueTooltip: "",
editableNameTooltip: "",
preventDisableOnChange: true,
preventDescriptorModifiers: false,
eval: () => {}
};
/**
* Functions handling the audio node inspector UI.
*/
let InspectorView = {
_currentNode: null,
// Set up config for view toggling
_collapseString: COLLAPSE_INSPECTOR_STRING,
_expandString: EXPAND_INSPECTOR_STRING,
_toggleEvent: EVENTS.UI_INSPECTOR_TOGGLED,
_animated: true,
_delayed: true,
/**
* Initialization function called when the tool starts up.
*/
initialize: function () {
this._tabsPane = $("#web-audio-editor-tabs");
// Set up view controller
this.el = $("#web-audio-inspector");
this.el.setAttribute("width", INSPECTOR_WIDTH);
this.button = $("#inspector-pane-toggle");
mixin(this, ToggleMixin);
this.bindToggle();
// Hide inspector view on startup
this.hideImmediately();
this._onEval = this._onEval.bind(this);
this._onNodeSelect = this._onNodeSelect.bind(this);
this._onDestroyNode = this._onDestroyNode.bind(this);
this._propsView = new VariablesView($("#properties-tabpanel-content"), GENERIC_VARIABLES_VIEW_SETTINGS);
this._propsView.eval = this._onEval;
window.on(EVENTS.UI_SELECT_NODE, this._onNodeSelect);
gAudioNodes.on("remove", this._onDestroyNode);
},
/**
* Destruction function called when the tool cleans up.
*/
destroy: function () {
this.unbindToggle();
window.off(EVENTS.UI_SELECT_NODE, this._onNodeSelect);
gAudioNodes.off("remove", this._onDestroyNode);
this.el = null;
this.button = null;
this._tabsPane = null;
},
/**
* Takes a AudioNodeView `node` and sets it as the current
* node and scaffolds the inspector view based off of the new node.
*/
setCurrentAudioNode: function (node) {
this._currentNode = node || null;
// If no node selected, set the inspector back to "no AudioNode selected"
// view.
if (!node) {
$("#web-audio-editor-details-pane-empty").removeAttribute("hidden");
$("#web-audio-editor-tabs").setAttribute("hidden", "true");
window.emit(EVENTS.UI_INSPECTOR_NODE_SET, null);
}
// Otherwise load up the tabs view and hide the empty placeholder
else {
$("#web-audio-editor-details-pane-empty").setAttribute("hidden", "true");
$("#web-audio-editor-tabs").removeAttribute("hidden");
this._setTitle();
this._buildPropertiesView()
.then(() => window.emit(EVENTS.UI_INSPECTOR_NODE_SET, this._currentNode.id));
}
},
/**
* Returns the current AudioNodeView.
*/
getCurrentAudioNode: function () {
return this._currentNode;
},
/**
* Empties out the props view.
*/
resetUI: function () {
this._propsView.empty();
// Set current node to empty to load empty view
this.setCurrentAudioNode();
// Reset AudioNode inspector and hide
this.hideImmediately();
},
/**
* Sets the title of the Inspector view
*/
_setTitle: function () {
let node = this._currentNode;
let title = node.type.replace(/Node$/, "");
$("#web-audio-inspector-title").setAttribute("value", title);
},
/**
* Reconstructs the `Properties` tab in the inspector
* with the `this._currentNode` as it's source.
*/
_buildPropertiesView: Task.async(function* () {
let propsView = this._propsView;
let node = this._currentNode;
propsView.empty();
let audioParamsScope = propsView.addScope("AudioParams");
let props = yield node.getParams();
// Disable AudioParams VariableView expansion
// when there are no props i.e. AudioDestinationNode
this._togglePropertiesView(!!props.length);
props.forEach(({ param, value, flags }) => {
let descriptor = {
value: value,
writable: !flags || !flags.readonly,
};
audioParamsScope.addItem(param, descriptor);
});
audioParamsScope.expanded = true;
window.emit(EVENTS.UI_PROPERTIES_TAB_RENDERED, node.id);
}),
_togglePropertiesView: function (show) {
let propsView = $("#properties-tabpanel-content");
let emptyView = $("#properties-tabpanel-content-empty");
(show ? propsView : emptyView).removeAttribute("hidden");
(show ? emptyView : propsView).setAttribute("hidden", "true");
},
/**
* Returns the scope for AudioParams in the
* VariablesView.
*
* @return Scope
*/
_getAudioPropertiesScope: function () {
return this._propsView.getScopeAtIndex(0);
},
/**
* Event handlers
*/
/**
* Executed when an audio prop is changed in the UI.
*/
_onEval: Task.async(function* (variable, value) {
let ownerScope = variable.ownerView;
let node = this._currentNode;
let propName = variable.name;
let error;
if (!variable._initialDescriptor.writable) {
error = new Error("Variable " + propName + " is not writable.");
} else {
// Cast value to proper type
try {
let number = parseFloat(value);
if (!isNaN(number)) {
value = number;
} else {
value = JSON.parse(value);
}
error = yield node.actor.setParam(propName, value);
}
catch (e) {
error = e;
}
}
// TODO figure out how to handle and display set prop errors
// and enable `test/brorwser_wa_properties-view-edit.js`
// Bug 994258
if (!error) {
ownerScope.get(propName).setGrip(value);
window.emit(EVENTS.UI_SET_PARAM, node.id, propName, value);
} else {
window.emit(EVENTS.UI_SET_PARAM_ERROR, node.id, propName, value);
}
}),
/**
* Called on EVENTS.UI_SELECT_NODE, and takes an actorID `id`
* and calls `setCurrentAudioNode` to scaffold the inspector view.
*/
_onNodeSelect: function (_, id) {
this.setCurrentAudioNode(gAudioNodes.get(id));
// Ensure inspector is visible when selecting a new node
this.show();
},
/**
* Called when `DESTROY_NODE` is fired to remove the node from props view if
* it's currently selected.
*/
_onDestroyNode: function (node) {
if (this._currentNode && this._currentNode.id === node.id) {
this.setCurrentAudioNode(null);
}
}
};

View File

@ -0,0 +1,103 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
/**
* Takes an element in an SVG graph and iterates over
* ancestors until it finds the graph node container. If not found,
* returns null.
*/
function findGraphNodeParent (el) {
// Some targets may not contain `classList` property
if (!el.classList)
return null;
while (!el.classList.contains("nodes")) {
if (el.classList.contains("audionode"))
return el;
else
el = el.parentNode;
}
return null;
}
/**
* Object for use with `mix` into a view.
* Must have the following properties defined on the view:
* - `el`
* - `button`
* - `_collapseString`
* - `_expandString`
* - `_toggleEvent`
*
* Optional properties on the view can be defined to specify default
* visibility options.
* - `_animated`
* - `_delayed`
*/
let ToggleMixin = {
bindToggle: function () {
this._onToggle = this._onToggle.bind(this);
this.button.addEventListener("mousedown", this._onToggle, false);
},
unbindToggle: function () {
this.button.removeEventListener("mousedown", this._onToggle);
},
show: function () {
this._viewController({ visible: true });
},
hide: function () {
this._viewController({ visible: false });
},
hideImmediately: function () {
this._viewController({ visible: false, delayed: false, animated: false });
},
/**
* Returns a boolean indicating whether or not the view.
* is currently being shown.
*/
isVisible: function () {
return !this.el.hasAttribute("pane-collapsed");
},
/**
* Toggles the visibility of the view.
*
* @param object visible
* - visible: boolean indicating whether the panel should be shown or not
* - animated: boolean indiciating whether the pane should be animated
* - delayed: boolean indicating whether the pane's opening should wait
* a few cycles or not
*/
_viewController: function ({ visible, animated, delayed }) {
let flags = {
visible: visible,
animated: animated != null ? animated : !!this._animated,
delayed: delayed != null ? delayed : !!this._delayed,
callback: () => window.emit(this._toggleEvent, visible)
};
ViewHelpers.togglePane(flags, this.el);
if (flags.visible) {
this.button.removeAttribute("pane-collapsed");
this.button.setAttribute("tooltiptext", this._collapseString);
}
else {
this.button.setAttribute("pane-collapsed", "");
this.button.setAttribute("tooltiptext", this._expandString);
}
},
_onToggle: function () {
this._viewController({ visible: !this.isVisible() });
}
}

View File

@ -1,428 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource:///modules/devtools/ViewHelpers.jsm");
Cu.import("resource:///modules/devtools/gDevTools.jsm");
// Override DOM promises with Promise.jsm helpers
const { defer, all } = Cu.import("resource://gre/modules/Promise.jsm", {}).Promise;
const { Task } = Cu.import("resource://gre/modules/Task.jsm", {});
const require = Cu.import("resource://gre/modules/devtools/Loader.jsm", {}).devtools.require;
const EventEmitter = require("devtools/toolkit/event-emitter");
const STRINGS_URI = "chrome://browser/locale/devtools/webaudioeditor.properties"
const L10N = new ViewHelpers.L10N(STRINGS_URI);
const Telemetry = require("devtools/shared/telemetry");
const telemetry = new Telemetry();
let { console } = Cu.import("resource://gre/modules/devtools/Console.jsm", {});
// The panel's window global is an EventEmitter firing the following events:
const EVENTS = {
// Fired when the first AudioNode has been created, signifying
// that the AudioContext is being used and should be tracked via the editor.
START_CONTEXT: "WebAudioEditor:StartContext",
// On node creation, connect and disconnect.
CREATE_NODE: "WebAudioEditor:CreateNode",
CONNECT_NODE: "WebAudioEditor:ConnectNode",
DISCONNECT_NODE: "WebAudioEditor:DisconnectNode",
// When a node gets GC'd.
DESTROY_NODE: "WebAudioEditor:DestroyNode",
// On a node parameter's change.
CHANGE_PARAM: "WebAudioEditor:ChangeParam",
// When the devtools theme changes.
THEME_CHANGE: "WebAudioEditor:ThemeChange",
// When the UI is reset from tab navigation.
UI_RESET: "WebAudioEditor:UIReset",
// When a param has been changed via the UI and successfully
// pushed via the actor to the raw audio node.
UI_SET_PARAM: "WebAudioEditor:UISetParam",
// When a node is to be set in the InspectorView.
UI_SELECT_NODE: "WebAudioEditor:UISelectNode",
// When the inspector is finished setting a new node.
UI_INSPECTOR_NODE_SET: "WebAudioEditor:UIInspectorNodeSet",
// When the inspector is finished rendering in or out of view.
UI_INSPECTOR_TOGGLED: "WebAudioEditor:UIInspectorToggled",
// When an audio node is finished loading in the Properties tab.
UI_PROPERTIES_TAB_RENDERED: "WebAudioEditor:UIPropertiesTabRendered",
// When the Audio Context graph finishes rendering.
// Is called with two arguments, first representing number of nodes
// rendered, second being the number of edge connections rendering (not counting
// param edges), followed by the count of the param edges rendered.
UI_GRAPH_RENDERED: "WebAudioEditor:UIGraphRendered"
};
/**
* The current target and the Web Audio Editor front, set by this tool's host.
*/
let gToolbox, gTarget, gFront;
/**
* Track an array of audio nodes
*/
let AudioNodes = [];
let AudioNodeConnections = new WeakMap(); // <AudioNodeView, Set<AudioNodeView>>
let AudioParamConnections = new WeakMap(); // <AudioNodeView, Object>
// Light representation wrapping an AudioNode actor with additional properties
function AudioNodeView (actor) {
this.actor = actor;
this.id = actor.actorID;
}
// A proxy for the underlying AudioNodeActor to fetch its type
// and subsequently assign the type to the instance.
AudioNodeView.prototype.getType = Task.async(function* () {
this.type = yield this.actor.getType();
return this.type;
});
// Helper method to create connections in the AudioNodeConnections
// WeakMap for rendering. Returns a boolean indicating
// if the connection was successfully created. Will return `false`
// when the connection was previously made.
AudioNodeView.prototype.connect = function (destination) {
let connections = AudioNodeConnections.get(this) || new Set();
AudioNodeConnections.set(this, connections);
// Don't duplicate add.
if (!connections.has(destination)) {
connections.add(destination);
return true;
}
return false;
};
// Helper method to create connections in the AudioNodeConnections
// WeakMap for rendering. Returns a boolean indicating
// if the connection was successfully created. Will return `false`
// when the connection was previously made.
AudioNodeView.prototype.connectParam = function (destination, param) {
let connections = AudioParamConnections.get(this) || {};
AudioParamConnections.set(this, connections);
let params = connections[destination.id] = connections[destination.id] || [];
if (!~params.indexOf(param)) {
params.push(param);
return true;
}
return false;
};
// Helper method to remove audio connections from the current AudioNodeView
AudioNodeView.prototype.disconnect = function () {
AudioNodeConnections.set(this, new Set());
AudioParamConnections.set(this, {});
};
// Returns a promise that resolves to an array of objects containing
// both a `param` name property and a `value` property.
AudioNodeView.prototype.getParams = function () {
return this.actor.getParams();
};
/**
* Initializes the web audio editor views
*/
function startupWebAudioEditor() {
return all([
WebAudioEditorController.initialize(),
WebAudioGraphView.initialize(),
WebAudioInspectorView.initialize(),
]);
}
/**
* Destroys the web audio editor controller and views.
*/
function shutdownWebAudioEditor() {
return all([
WebAudioEditorController.destroy(),
WebAudioGraphView.destroy(),
WebAudioInspectorView.destroy(),
]);
}
/**
* Functions handling target-related lifetime events.
*/
let WebAudioEditorController = {
/**
* Listen for events emitted by the current tab target.
*/
initialize: function() {
telemetry.toolOpened("webaudioeditor");
this._onTabNavigated = this._onTabNavigated.bind(this);
this._onThemeChange = this._onThemeChange.bind(this);
gTarget.on("will-navigate", this._onTabNavigated);
gTarget.on("navigate", this._onTabNavigated);
gFront.on("start-context", this._onStartContext);
gFront.on("create-node", this._onCreateNode);
gFront.on("connect-node", this._onConnectNode);
gFront.on("connect-param", this._onConnectParam);
gFront.on("disconnect-node", this._onDisconnectNode);
gFront.on("change-param", this._onChangeParam);
gFront.on("destroy-node", this._onDestroyNode);
// Hook into theme change so we can change
// the graph's marker styling, since we can't do this
// with CSS
gDevTools.on("pref-changed", this._onThemeChange);
// Set up events to refresh the Graph view
window.on(EVENTS.CREATE_NODE, this._onUpdatedContext);
window.on(EVENTS.CONNECT_NODE, this._onUpdatedContext);
window.on(EVENTS.DISCONNECT_NODE, this._onUpdatedContext);
window.on(EVENTS.DESTROY_NODE, this._onUpdatedContext);
window.on(EVENTS.CONNECT_PARAM, this._onUpdatedContext);
},
/**
* Remove events emitted by the current tab target.
*/
destroy: function() {
telemetry.toolClosed("webaudioeditor");
gTarget.off("will-navigate", this._onTabNavigated);
gTarget.off("navigate", this._onTabNavigated);
gFront.off("start-context", this._onStartContext);
gFront.off("create-node", this._onCreateNode);
gFront.off("connect-node", this._onConnectNode);
gFront.off("connect-param", this._onConnectParam);
gFront.off("disconnect-node", this._onDisconnectNode);
gFront.off("change-param", this._onChangeParam);
gFront.off("destroy-node", this._onDestroyNode);
window.off(EVENTS.CREATE_NODE, this._onUpdatedContext);
window.off(EVENTS.CONNECT_NODE, this._onUpdatedContext);
window.off(EVENTS.DISCONNECT_NODE, this._onUpdatedContext);
window.off(EVENTS.DESTROY_NODE, this._onUpdatedContext);
window.off(EVENTS.CONNECT_PARAM, this._onUpdatedContext);
gDevTools.off("pref-changed", this._onThemeChange);
},
/**
* Called when page is reloaded to show the reload notice and waiting
* for an audio context notice.
*/
reset: function () {
$("#content").hidden = true;
WebAudioGraphView.resetUI();
WebAudioInspectorView.resetUI();
},
/**
* Called when a new audio node is created, or the audio context
* routing changes.
*/
_onUpdatedContext: function () {
WebAudioGraphView.draw();
},
/**
* Fired when the devtools theme changes (light, dark, etc.)
* so that the graph can update marker styling, as that
* cannot currently be done with CSS.
*/
_onThemeChange: function (event, data) {
window.emit(EVENTS.THEME_CHANGE, data.newValue);
},
/**
* Called for each location change in the debugged tab.
*/
_onTabNavigated: Task.async(function* (event, {isFrameSwitching}) {
switch (event) {
case "will-navigate": {
// Make sure the backend is prepared to handle audio contexts.
if (!isFrameSwitching) {
yield gFront.setup({ reload: false });
}
// Clear out current UI.
this.reset();
// When switching to an iframe, ensure displaying the reload button.
// As the document has already been loaded without being hooked.
if (isFrameSwitching) {
$("#reload-notice").hidden = false;
$("#waiting-notice").hidden = true;
} else {
// Otherwise, we are loading a new top level document,
// so we don't need to reload anymore and should receive
// new node events.
$("#reload-notice").hidden = true;
$("#waiting-notice").hidden = false;
}
// Clear out stored audio nodes
AudioNodes.length = 0;
AudioNodeConnections.clear();
window.emit(EVENTS.UI_RESET);
break;
}
case "navigate": {
// TODO Case of bfcache, needs investigating
// bug 994250
break;
}
}
}),
/**
* Called after the first audio node is created in an audio context,
* signaling that the audio context is being used.
*/
_onStartContext: function() {
$("#reload-notice").hidden = true;
$("#waiting-notice").hidden = true;
$("#content").hidden = false;
window.emit(EVENTS.START_CONTEXT);
},
/**
* Called when a new node is created. Creates an `AudioNodeView` instance
* for tracking throughout the editor.
*/
_onCreateNode: Task.async(function* (nodeActor) {
let node = new AudioNodeView(nodeActor);
yield node.getType();
AudioNodes.push(node);
window.emit(EVENTS.CREATE_NODE, node.id);
}),
/**
* Called on `destroy-node` when an AudioNode is GC'd. Removes
* from the AudioNode array and fires an event indicating the removal.
*/
_onDestroyNode: function (nodeActor) {
for (let i = 0; i < AudioNodes.length; i++) {
if (equalActors(AudioNodes[i].actor, nodeActor)) {
AudioNodes.splice(i, 1);
window.emit(EVENTS.DESTROY_NODE, nodeActor.actorID);
break;
}
}
},
/**
* Called when a node is connected to another node.
*/
_onConnectNode: Task.async(function* ({ source: sourceActor, dest: destActor }) {
let [source, dest] = yield waitForNodeCreation(sourceActor, destActor);
// Connect nodes, and only emit if it's a new connection.
if (source.connect(dest)) {
window.emit(EVENTS.CONNECT_NODE, source.id, dest.id);
}
}),
/**
* Called when a node is conneceted to another node's AudioParam.
*/
_onConnectParam: Task.async(function* ({ source: sourceActor, dest: destActor, param }) {
let [source, dest] = yield waitForNodeCreation(sourceActor, destActor);
if (source.connectParam(dest, param)) {
window.emit(EVENTS.CONNECT_PARAM, source.id, dest.id, param);
}
}),
/**
* Called when a node is disconnected.
*/
_onDisconnectNode: function(nodeActor) {
let node = getViewNodeByActor(nodeActor);
node.disconnect();
window.emit(EVENTS.DISCONNECT_NODE, node.id);
},
/**
* Called when a node param is changed.
*/
_onChangeParam: function({ actor, param, value }) {
window.emit(EVENTS.CHANGE_PARAM, getViewNodeByActor(actor), param, value);
}
};
/**
* Convenient way of emitting events from the panel window.
*/
EventEmitter.decorate(this);
/**
* DOM query helper.
*/
function $(selector, target = document) { return target.querySelector(selector); }
function $$(selector, target = document) { return target.querySelectorAll(selector); }
/**
* Compare `actorID` between two actors to determine if they're corresponding
* to the same underlying actor.
*/
function equalActors (actor1, actor2) {
return actor1.actorID === actor2.actorID;
}
/**
* Returns the corresponding ViewNode by actor
*/
function getViewNodeByActor (actor) {
for (let i = 0; i < AudioNodes.length; i++) {
if (equalActors(AudioNodes[i].actor, actor))
return AudioNodes[i];
}
return null;
}
/**
* Returns the corresponding ViewNode by actorID
*/
function getViewNodeById (id) {
return getViewNodeByActor({ actorID: id });
}
// Since node create and connect are probably executed back to back,
// and the controller's `_onCreateNode` needs to look up type,
// the edge creation could be called before the graph node is actually
// created. This way, we can check and listen for the event before
// adding an edge.
function waitForNodeCreation (sourceActor, destActor) {
let deferred = defer();
let eventName = EVENTS.CREATE_NODE;
let source = getViewNodeByActor(sourceActor);
let dest = getViewNodeByActor(destActor);
if (!source || !dest)
window.on(eventName, function createNodeListener (_, id) {
let createdNode = getViewNodeById(id);
if (equalActors(sourceActor, createdNode.actor))
source = createdNode;
if (equalActors(destActor, createdNode.actor))
dest = createdNode;
if (source && dest) {
window.off(eventName, createNodeListener);
deferred.resolve([source, dest]);
}
});
else
deferred.resolve([source, dest]);
return deferred.promise;
}

View File

@ -1,636 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
Cu.import("resource:///modules/devtools/VariablesView.jsm");
Cu.import("resource:///modules/devtools/VariablesViewController.jsm");
const { debounce } = require("sdk/lang/functional");
// Strings for rendering
const EXPAND_INSPECTOR_STRING = L10N.getStr("expandInspector");
const COLLAPSE_INSPECTOR_STRING = L10N.getStr("collapseInspector");
// Store width as a preference rather than hardcode
// TODO bug 1009056
const INSPECTOR_WIDTH = 300;
// Globals for d3 stuff
// Default properties of the graph on rerender
const GRAPH_DEFAULTS = {
translate: [20, 20],
scale: 1
};
// Sizes of SVG arrows in graph
const ARROW_HEIGHT = 5;
const ARROW_WIDTH = 8;
// Styles for markers as they cannot be done with CSS.
const MARKER_STYLING = {
light: "#AAA",
dark: "#CED3D9"
};
const GRAPH_DEBOUNCE_TIMER = 100;
const GENERIC_VARIABLES_VIEW_SETTINGS = {
searchEnabled: false,
editableValueTooltip: "",
editableNameTooltip: "",
preventDisableOnChange: true,
preventDescriptorModifiers: false,
eval: () => {}
};
/**
* Functions handling the graph UI.
*/
let WebAudioGraphView = {
/**
* Initialization function, called when the tool is started.
*/
initialize: function() {
this._onGraphNodeClick = this._onGraphNodeClick.bind(this);
this._onThemeChange = this._onThemeChange.bind(this);
this._onNodeSelect = this._onNodeSelect.bind(this);
this._onStartContext = this._onStartContext.bind(this);
this._onDestroyNode = this._onDestroyNode.bind(this);
this.draw = debounce(this.draw.bind(this), GRAPH_DEBOUNCE_TIMER);
$('#graph-target').addEventListener('click', this._onGraphNodeClick, false);
window.on(EVENTS.THEME_CHANGE, this._onThemeChange);
window.on(EVENTS.UI_INSPECTOR_NODE_SET, this._onNodeSelect);
window.on(EVENTS.START_CONTEXT, this._onStartContext);
window.on(EVENTS.DESTROY_NODE, this._onDestroyNode);
},
/**
* Destruction function, called when the tool is closed.
*/
destroy: function() {
if (this._zoomBinding) {
this._zoomBinding.on("zoom", null);
}
$('#graph-target').removeEventListener('click', this._onGraphNodeClick, false);
window.off(EVENTS.THEME_CHANGE, this._onThemeChange);
window.off(EVENTS.UI_INSPECTOR_NODE_SET, this._onNodeSelect);
window.off(EVENTS.START_CONTEXT, this._onStartContext);
window.off(EVENTS.DESTROY_NODE, this._onDestroyNode);
},
/**
* Called when a page is reloaded and waiting for a "start-context" event
* and clears out old content
*/
resetUI: function () {
this.clearGraph();
this.resetGraphPosition();
},
/**
* Clears out the rendered graph, called when resetting the SVG elements to draw again,
* or when resetting the entire UI tool
*/
clearGraph: function () {
$("#graph-target").innerHTML = "";
},
/**
* Moves the graph back to its original scale and translation.
*/
resetGraphPosition: function () {
if (this._zoomBinding) {
let { translate, scale } = GRAPH_DEFAULTS;
// Must set the `zoomBinding` so the next `zoom` event is in sync with
// where the graph is visually (set by the `transform` attribute).
this._zoomBinding.scale(scale);
this._zoomBinding.translate(translate);
d3.select("#graph-target")
.attr("transform", "translate(" + translate + ") scale(" + scale + ")");
}
},
getCurrentScale: function () {
return this._zoomBinding ? this._zoomBinding.scale() : null;
},
getCurrentTranslation: function () {
return this._zoomBinding ? this._zoomBinding.translate() : null;
},
/**
* Makes the corresponding graph node appear "focused", removing
* focused styles from all other nodes. If no `actorID` specified,
* make all nodes appear unselected.
* Called from UI_INSPECTOR_NODE_SELECT.
*/
focusNode: function (actorID) {
// Remove class "selected" from all nodes
Array.forEach($$(".nodes > g"), $node => $node.classList.remove("selected"));
// Add to "selected"
if (actorID) {
this._getNodeByID(actorID).classList.add("selected");
}
},
/**
* Takes an actorID and returns the corresponding DOM SVG element in the graph
*/
_getNodeByID: function (actorID) {
return $(".nodes > g[data-id='" + actorID + "']");
},
/**
* `draw` renders the ViewNodes currently available in `AudioNodes` with `AudioNodeConnections`,
* and `AudioParamConnections` and is throttled to be called at most every
* `GRAPH_DEBOUNCE_TIMER` milliseconds. Is called whenever the audio context routing changes,
* after being debounced.
*/
draw: function () {
// Clear out previous SVG information
this.clearGraph();
let graph = new dagreD3.Digraph();
// An array of duples/tuples of pairs [sourceNode, destNode, param].
// `param` is optional, indicating a connection to an AudioParam, rather than
// an other AudioNode.
let edges = [];
AudioNodes.forEach(node => {
// Add node to graph
graph.addNode(node.id, {
type: node.type, // Just for storing type data
label: node.type.replace(/Node$/, ""), // Displayed in SVG node
id: node.id // Identification
});
// Add all of the connections from this node to the edge array to be added
// after all the nodes are added, otherwise edges will attempted to be created
// for nodes that have not yet been added
AudioNodeConnections.get(node, new Set()).forEach(dest => edges.push([node, dest]));
let paramConnections = AudioParamConnections.get(node, {});
Object.keys(paramConnections).forEach(destId => {
let dest = getViewNodeById(destId);
let connections = paramConnections[destId] || [];
connections.forEach(param => edges.push([node, dest, param]));
});
});
edges.forEach(([node, dest, param]) => {
let options = {
source: node.id,
target: dest.id
};
// Only add `label` if `param` specified, as this is an AudioParam connection then.
// `label` adds the magic to render with dagre-d3, and `param` is just more explicitly
// the param, ignoring implementation details.
if (param) {
options.label = param;
options.param = param;
}
graph.addEdge(null, node.id, dest.id, options);
});
let renderer = new dagreD3.Renderer();
// Post-render manipulation of the nodes
let oldDrawNodes = renderer.drawNodes();
renderer.drawNodes(function(graph, root) {
let svgNodes = oldDrawNodes(graph, root);
svgNodes.attr("class", (n) => {
let node = graph.node(n);
return "audionode type-" + node.type;
});
svgNodes.attr("data-id", (n) => {
let node = graph.node(n);
return node.id;
});
return svgNodes;
});
// Post-render manipulation of edges
// TODO do all of this more efficiently, rather than
// using the direct D3 helper utilities to loop over each
// edge several times
let oldDrawEdgePaths = renderer.drawEdgePaths();
renderer.drawEdgePaths(function(graph, root) {
let svgEdges = oldDrawEdgePaths(graph, root);
svgEdges.attr("data-source", (n) => {
let edge = graph.edge(n);
return edge.source;
});
svgEdges.attr("data-target", (n) => {
let edge = graph.edge(n);
return edge.target;
});
svgEdges.attr("data-param", (n) => {
let edge = graph.edge(n);
return edge.param ? edge.param : null;
});
// We have to manually specify the default classes on the edges
// as to not overwrite them
let defaultClasses = "edgePath enter";
svgEdges.attr("class", (n) => {
let edge = graph.edge(n);
return defaultClasses + (edge.param ? (" param-connection " + edge.param) : "");
});
return svgEdges;
});
// Override Dagre-d3's post render function by passing in our own.
// This way we can leave styles out of it.
renderer.postRender((graph, root) => {
// We have to manually set the marker styling since we cannot
// do this currently with CSS, although it is in spec for SVG2
// https://svgwg.org/svg2-draft/painting.html#VertexMarkerProperties
// For now, manually set it on creation, and the `_onThemeChange`
// function will fire when the devtools theme changes to update the
// styling manually.
let theme = Services.prefs.getCharPref("devtools.theme");
let markerColor = MARKER_STYLING[theme];
if (graph.isDirected() && root.select("#arrowhead").empty()) {
root
.append("svg:defs")
.append("svg:marker")
.attr("id", "arrowhead")
.attr("viewBox", "0 0 10 10")
.attr("refX", ARROW_WIDTH)
.attr("refY", ARROW_HEIGHT)
.attr("markerUnits", "strokewidth")
.attr("markerWidth", ARROW_WIDTH)
.attr("markerHeight", ARROW_HEIGHT)
.attr("orient", "auto")
.attr("style", "fill: " + markerColor)
.append("svg:path")
.attr("d", "M 0 0 L 10 5 L 0 10 z");
}
// Reselect the previously selected audio node
let currentNode = WebAudioInspectorView.getCurrentAudioNode();
if (currentNode) {
this.focusNode(currentNode.id);
}
// Fire an event upon completed rendering
let paramEdgeCount = edges.filter(p => !!p[2]).length;
window.emit(EVENTS.UI_GRAPH_RENDERED, AudioNodes.length, edges.length - paramEdgeCount, paramEdgeCount);
});
let layout = dagreD3.layout().rankDir("LR");
renderer.layout(layout).run(graph, d3.select("#graph-target"));
// Handle the sliding and zooming of the graph,
// store as `this._zoomBinding` so we can unbind during destruction
if (!this._zoomBinding) {
this._zoomBinding = d3.behavior.zoom().on("zoom", function () {
var ev = d3.event;
d3.select("#graph-target")
.attr("transform", "translate(" + ev.translate + ") scale(" + ev.scale + ")");
});
d3.select("svg").call(this._zoomBinding);
// Set initial translation and scale -- this puts D3's awareness of
// the graph in sync with what the user sees originally.
this.resetGraphPosition();
}
},
/**
* Event handlers
*/
/**
* Called once "start-context" is fired, indicating that there is an audio
* context being created to view so render the graph.
*/
_onStartContext: function () {
this.draw();
},
/**
* Called when a node gets GC'd -- redraws the graph.
*/
_onDestroyNode: function () {
this.draw();
},
_onNodeSelect: function (eventName, id) {
this.focusNode(id);
},
/**
* Fired when the devtools theme changes.
*/
_onThemeChange: function (eventName, theme) {
let markerColor = MARKER_STYLING[theme];
let marker = $("#arrowhead");
if (marker) {
marker.setAttribute("style", "fill: " + markerColor);
}
},
/**
* Fired when a node in the svg graph is clicked. Used to handle triggering the AudioNodePane.
*
* @param Event e
* Click event.
*/
_onGraphNodeClick: function (e) {
let node = findGraphNodeParent(e.target);
// If node not found (clicking outside of an audio node in the graph),
// then ignore this event
if (!node)
return;
window.emit(EVENTS.UI_SELECT_NODE, node.getAttribute("data-id"));
}
};
let WebAudioInspectorView = {
_propsView: null,
_currentNode: null,
_inspectorPane: null,
_inspectorPaneToggleButton: null,
_tabsPane: null,
/**
* Initialization function called when the tool starts up.
*/
initialize: function () {
this._inspectorPane = $("#web-audio-inspector");
this._inspectorPaneToggleButton = $("#inspector-pane-toggle");
this._tabsPane = $("#web-audio-editor-tabs");
// Hide inspector view on startup
this._inspectorPane.setAttribute("width", INSPECTOR_WIDTH);
this.toggleInspector({ visible: false, delayed: false, animated: false });
this._onEval = this._onEval.bind(this);
this._onNodeSelect = this._onNodeSelect.bind(this);
this._onTogglePaneClick = this._onTogglePaneClick.bind(this);
this._onDestroyNode = this._onDestroyNode.bind(this);
this._inspectorPaneToggleButton.addEventListener("mousedown", this._onTogglePaneClick, false);
this._propsView = new VariablesView($("#properties-tabpanel-content"), GENERIC_VARIABLES_VIEW_SETTINGS);
this._propsView.eval = this._onEval;
window.on(EVENTS.UI_SELECT_NODE, this._onNodeSelect);
window.on(EVENTS.DESTROY_NODE, this._onDestroyNode);
},
/**
* Destruction function called when the tool cleans up.
*/
destroy: function () {
this._inspectorPaneToggleButton.removeEventListener("mousedown", this._onTogglePaneClick);
window.off(EVENTS.UI_SELECT_NODE, this._onNodeSelect);
window.off(EVENTS.DESTROY_NODE, this._onDestroyNode);
this._inspectorPane = null;
this._inspectorPaneToggleButton = null;
this._tabsPane = null;
},
/**
* Toggles the visibility of the AudioNode Inspector.
*
* @param object visible
* - visible: boolean indicating whether the panel should be shown or not
* - animated: boolean indiciating whether the pane should be animated
* - delayed: boolean indicating whether the pane's opening should wait
* a few cycles or not
* - index: the index of the tab to be selected inside the inspector
* @param number index
* Index of the tab that should be selected when shown.
*/
toggleInspector: function ({ visible, animated, delayed, index }) {
let pane = this._inspectorPane;
let button = this._inspectorPaneToggleButton;
let flags = {
visible: visible,
animated: animated != null ? animated : true,
delayed: delayed != null ? delayed : true,
callback: () => window.emit(EVENTS.UI_INSPECTOR_TOGGLED, visible)
};
ViewHelpers.togglePane(flags, pane);
if (flags.visible) {
button.removeAttribute("pane-collapsed");
button.setAttribute("tooltiptext", COLLAPSE_INSPECTOR_STRING);
}
else {
button.setAttribute("pane-collapsed", "");
button.setAttribute("tooltiptext", EXPAND_INSPECTOR_STRING);
}
if (index != undefined) {
pane.selectedIndex = index;
}
},
/**
* Returns a boolean indicating whether or not the AudioNode inspector
* is currently being shown.
*/
isVisible: function () {
return !this._inspectorPane.hasAttribute("pane-collapsed");
},
/**
* Takes a AudioNodeView `node` and sets it as the current
* node and scaffolds the inspector view based off of the new node.
*/
setCurrentAudioNode: function (node) {
this._currentNode = node || null;
// If no node selected, set the inspector back to "no AudioNode selected"
// view.
if (!node) {
$("#web-audio-editor-details-pane-empty").removeAttribute("hidden");
$("#web-audio-editor-tabs").setAttribute("hidden", "true");
window.emit(EVENTS.UI_INSPECTOR_NODE_SET, null);
}
// Otherwise load up the tabs view and hide the empty placeholder
else {
$("#web-audio-editor-details-pane-empty").setAttribute("hidden", "true");
$("#web-audio-editor-tabs").removeAttribute("hidden");
this._setTitle();
this._buildPropertiesView()
.then(() => window.emit(EVENTS.UI_INSPECTOR_NODE_SET, this._currentNode.id));
}
},
/**
* Returns the current AudioNodeView.
*/
getCurrentAudioNode: function () {
return this._currentNode;
},
/**
* Empties out the props view.
*/
resetUI: function () {
this._propsView.empty();
// Set current node to empty to load empty view
this.setCurrentAudioNode();
// Reset AudioNode inspector and hide
this.toggleInspector({ visible: false, animated: false, delayed: false });
},
/**
* Sets the title of the Inspector view
*/
_setTitle: function () {
let node = this._currentNode;
let title = node.type.replace(/Node$/, "");
$("#web-audio-inspector-title").setAttribute("value", title);
},
/**
* Reconstructs the `Properties` tab in the inspector
* with the `this._currentNode` as it's source.
*/
_buildPropertiesView: Task.async(function* () {
let propsView = this._propsView;
let node = this._currentNode;
propsView.empty();
let audioParamsScope = propsView.addScope("AudioParams");
let props = yield node.getParams();
// Disable AudioParams VariableView expansion
// when there are no props i.e. AudioDestinationNode
this._togglePropertiesView(!!props.length);
props.forEach(({ param, value, flags }) => {
let descriptor = {
value: value,
writable: !flags || !flags.readonly,
};
audioParamsScope.addItem(param, descriptor);
});
audioParamsScope.expanded = true;
window.emit(EVENTS.UI_PROPERTIES_TAB_RENDERED, node.id);
}),
_togglePropertiesView: function (show) {
let propsView = $("#properties-tabpanel-content");
let emptyView = $("#properties-tabpanel-content-empty");
(show ? propsView : emptyView).removeAttribute("hidden");
(show ? emptyView : propsView).setAttribute("hidden", "true");
},
/**
* Returns the scope for AudioParams in the
* VariablesView.
*
* @return Scope
*/
_getAudioPropertiesScope: function () {
return this._propsView.getScopeAtIndex(0);
},
/**
* Event handlers
*/
/**
* Executed when an audio prop is changed in the UI.
*/
_onEval: Task.async(function* (variable, value) {
let ownerScope = variable.ownerView;
let node = this._currentNode;
let propName = variable.name;
let error;
if (!variable._initialDescriptor.writable) {
error = new Error("Variable " + propName + " is not writable.");
} else {
// Cast value to proper type
try {
let number = parseFloat(value);
if (!isNaN(number)) {
value = number;
} else {
value = JSON.parse(value);
}
error = yield node.actor.setParam(propName, value);
}
catch (e) {
error = e;
}
}
// TODO figure out how to handle and display set prop errors
// and enable `test/brorwser_wa_properties-view-edit.js`
// Bug 994258
if (!error) {
ownerScope.get(propName).setGrip(value);
window.emit(EVENTS.UI_SET_PARAM, node.id, propName, value);
} else {
window.emit(EVENTS.UI_SET_PARAM_ERROR, node.id, propName, value);
}
}),
/**
* Called on EVENTS.UI_SELECT_NODE, and takes an actorID `id`
* and calls `setCurrentAudioNode` to scaffold the inspector view.
*/
_onNodeSelect: function (_, id) {
this.setCurrentAudioNode(getViewNodeById(id));
// Ensure inspector is visible when selecting a new node
this.toggleInspector({ visible: true });
},
/**
* Called when clicking on the toggling the inspector into view.
*/
_onTogglePaneClick: function () {
this.toggleInspector({ visible: !this.isVisible() });
},
/**
* Called when `DESTROY_NODE` is fired to remove the node from props view if
* it's currently selected.
*/
_onDestroyNode: function (_, id) {
if (this._currentNode && this._currentNode.id === id) {
this.setCurrentAudioNode(null);
}
}
};
/**
* Takes an element in an SVG graph and iterates over
* ancestors until it finds the graph node container. If not found,
* returns null.
*/
function findGraphNodeParent (el) {
// Some targets may not contain `classList` property
if (!el.classList)
return null;
while (!el.classList.contains("nodes")) {
if (el.classList.contains("audionode"))
return el;
else
el = el.parentNode;
}
return null;
}

View File

@ -19,8 +19,12 @@
<script type="application/javascript" src="chrome://browser/content/devtools/d3.js"/>
<script type="application/javascript" src="dagre-d3.js"/>
<script type="application/javascript" src="webaudioeditor-controller.js"/>
<script type="application/javascript" src="webaudioeditor-view.js"/>
<script type="application/javascript" src="webaudioeditor/includes.js"/>
<script type="application/javascript" src="webaudioeditor/models.js"/>
<script type="application/javascript" src="webaudioeditor/controller.js"/>
<script type="application/javascript" src="webaudioeditor/views/utils.js"/>
<script type="application/javascript" src="webaudioeditor/views/context.js"/>
<script type="application/javascript" src="webaudioeditor/views/inspector.js"/>
<vbox class="theme-body" flex="1">
<hbox id="reload-notice"

View File

@ -565,17 +565,21 @@ Messages.BaseMessage.prototype = {
*
* @constructor
* @extends Messages.BaseMessage
* @param string url
* The URL to display.
* @param object response
* The response received from the back end.
* @param number timestamp
* The message date and time, milliseconds elapsed since 1 January 1970
* 00:00:00 UTC.
*/
Messages.NavigationMarker = function(url, timestamp)
Messages.NavigationMarker = function(response, timestamp)
{
Messages.BaseMessage.call(this);
this._url = url;
this.textContent = "------ " + url;
// Store the response packet received from the server. It might
// be useful for extensions customizing the console output.
this.response = response;
this._url = response.url;
this.textContent = "------ " + this._url;
this.timestamp = timestamp;
};
@ -1204,6 +1208,10 @@ Messages.JavaScriptEvalOutput = function(evalResponse, errorMessage)
{
let severity = "log", msg, quoteStrings = true;
// Store also the response packet from the back end. It might
// be useful to extensions customizing the console output.
this.response = evalResponse;
if (errorMessage) {
severity = "error";
msg = errorMessage;
@ -3258,7 +3266,11 @@ Widgets.LongString.prototype = Heritage.extend(Widgets.BaseWidget.prototype,
this._renderString(this.longStringActor.initial + response.substring);
this.output.owner.emit("messages-updated", new Set([this.message.element]));
this.output.owner.emit("new-messages", new Set([{
update: true,
node: this.message.element,
response: response,
}]));
let toIndex = Math.min(this.longStringActor.length, MAX_LONG_STRING_LENGTH);
if (toIndex != this.longStringActor.length) {

View File

@ -1302,9 +1302,10 @@ function waitForMessages(aOptions)
return aRule.matched.size == count;
}
function onMessagesAdded(aEvent, aNewElements)
function onMessagesAdded(aEvent, aNewMessages)
{
for (let elem of aNewElements) {
for (let msg of aNewMessages) {
let elem = msg.node;
let location = elem.querySelector(".message-location");
if (location) {
let url = location.title;
@ -1343,8 +1344,7 @@ function waitForMessages(aOptions)
{
if (allRulesMatched()) {
if (listenerAdded) {
webconsole.ui.off("messages-added", onMessagesAdded);
webconsole.ui.off("messages-updated", onMessagesAdded);
webconsole.ui.off("new-messages", onMessagesAdded);
}
gPendingOutputTest--;
deferred.resolve(rules);
@ -1359,7 +1359,7 @@ function waitForMessages(aOptions)
}
if (webconsole.ui) {
webconsole.ui.off("messages-added", onMessagesAdded);
webconsole.ui.off("new-messages", onMessagesAdded);
}
for (let rule of rules) {
@ -1382,12 +1382,21 @@ function waitForMessages(aOptions)
}
executeSoon(() => {
onMessagesAdded("messages-added", webconsole.outputNode.childNodes);
let messages = [];
for (let elem of webconsole.outputNode.childNodes) {
messages.push({
node: elem,
update: false,
});
}
onMessagesAdded("new-messages", messages);
if (!allRulesMatched()) {
listenerAdded = true;
registerCleanupFunction(testCleanup);
webconsole.ui.on("messages-added", onMessagesAdded);
webconsole.ui.on("messages-updated", onMessagesAdded);
webconsole.ui.on("new-messages", onMessagesAdded);
}
});

View File

@ -471,7 +471,7 @@ WebConsoleFrame.prototype = {
}, (aReason) => { // on failure
let node = this.createMessageNode(CATEGORY_JS, SEVERITY_ERROR,
aReason.error + ": " + aReason.message);
this.outputMessage(CATEGORY_JS, node);
this.outputMessage(CATEGORY_JS, node, [aReason]);
this._initDefer.reject(aReason);
}).then(() => {
let id = WebConsoleUtils.supportsString(this.hudId);
@ -1454,14 +1454,15 @@ WebConsoleFrame.prototype = {
/**
* Log network event.
*
* @param object aActorId
* The network event actor ID to log.
* @param object aActor
* The network event actor to log.
* @return nsIDOMElement|null
* The message element to display in the Web Console output.
*/
logNetEvent: function WCF_logNetEvent(aActorId)
logNetEvent: function WCF_logNetEvent(aActor)
{
let networkInfo = this._networkRequests[aActorId];
let actorId = aActor.actor;
let networkInfo = this._networkRequests[actorId];
if (!networkInfo) {
return null;
}
@ -1485,7 +1486,7 @@ WebConsoleFrame.prototype = {
if (networkInfo.private) {
messageNode.setAttribute("private", true);
}
messageNode._connectionId = aActorId;
messageNode._connectionId = actorId;
messageNode.url = request.url;
let body = methodNode.parentNode;
@ -1526,7 +1527,7 @@ WebConsoleFrame.prototype = {
networkInfo.node = messageNode;
this._updateNetMessage(aActorId);
this._updateNetMessage(actorId);
return messageNode;
},
@ -1730,7 +1731,7 @@ WebConsoleFrame.prototype = {
};
this._networkRequests[aActor.actor] = networkInfo;
this.outputMessage(CATEGORY_NETWORK, this.logNetEvent, [aActor.actor]);
this.outputMessage(CATEGORY_NETWORK, this.logNetEvent, [aActor]);
},
/**
@ -1781,7 +1782,11 @@ WebConsoleFrame.prototype = {
}
if (networkInfo.node && this._updateNetMessage(aActorId)) {
this.emit("messages-updated", new Set([networkInfo.node]));
this.emit("new-messages", new Set([{
update: true,
node: networkInfo.node,
response: aPacket,
}]));
}
// For unit tests we pass the HTTP activity object to the test callback,
@ -2009,7 +2014,7 @@ WebConsoleFrame.prototype = {
{
if (aEvent == "will-navigate") {
if (this.persistLog) {
let marker = new Messages.NavigationMarker(aPacket.url, Date.now());
let marker = new Messages.NavigationMarker(aPacket, Date.now());
this.output.addMessage(marker);
}
else {
@ -2042,7 +2047,9 @@ WebConsoleFrame.prototype = {
* object and the arguments will be |aArguments|.
* @param array [aArguments]
* If a method is given to output the message element then the method
* will be invoked with the list of arguments given here.
* will be invoked with the list of arguments given here. The last
* object in this array should be the packet received from the
* back end.
*/
outputMessage: function WCF_outputMessage(aCategory, aMethodOrNode, aArguments)
{
@ -2114,18 +2121,17 @@ WebConsoleFrame.prototype = {
Utils.isOutputScrolledToBottom(outputNode);
// Output the current batch of messages.
let newMessages = new Set();
let updatedMessages = new Set();
let messages = new Set();
for (let i = 0; i < batch.length; i++) {
let item = batch[i];
let result = this._outputMessageFromQueue(hudIdSupportsString, item);
if (result) {
if (result.isRepeated) {
updatedMessages.add(result.isRepeated);
}
else {
newMessages.add(result.node);
}
messages.add({
node: result.isRepeated ? result.isRepeated : result.node,
response: result.message,
update: !!result.isRepeated,
});
if (result.visible && result.node == this.outputNode.lastChild) {
lastVisibleNode = result.node;
}
@ -2167,11 +2173,8 @@ WebConsoleFrame.prototype = {
scrollNode.scrollTop -= oldScrollHeight - scrollNode.scrollHeight;
}
if (newMessages.size) {
this.emit("messages-added", newMessages);
}
if (updatedMessages.size) {
this.emit("messages-updated", updatedMessages);
if (messages.size) {
this.emit("new-messages", messages);
}
// If the output queue is empty, then run _flushCallback.
@ -2228,6 +2231,10 @@ WebConsoleFrame.prototype = {
{
let [category, methodOrNode, args] = aItem;
// The last object in the args array should be message
// object or response packet received from the server.
let message = (args && args.length) ? args[args.length-1] : null;
let node = typeof methodOrNode == "function" ?
methodOrNode.apply(this, args || []) :
methodOrNode;
@ -2265,6 +2272,7 @@ WebConsoleFrame.prototype = {
visible: visible,
node: node,
isRepeated: isRepeated,
message: message
};
},
@ -2342,7 +2350,7 @@ WebConsoleFrame.prototype = {
if (category == CATEGORY_NETWORK) {
let connectionId = null;
if (methodOrNode == this.logNetEvent) {
connectionId = args[0];
connectionId = args[0].actor;
}
else if (typeof methodOrNode != "function") {
connectionId = methodOrNode._connectionId;

View File

@ -227,7 +227,7 @@ let UI = {
this._busyTimeout = setTimeout(() => {
this.unbusy();
UI.reportError("error_operationTimeout", this._busyOperationDescription);
}, 30000);
}, 6000);
},
cancelBusyTimeout: function() {

View File

@ -7,3 +7,4 @@ support-files =
templates.json
[browser_tabs.js]
skip-if = e10s # Bug 1072167 - browser_tabs.js test fails under e10s

View File

@ -555,13 +555,18 @@ getUserMedia.stopSharing.accesskey = S
getUserMedia.sharingMenu.label = Tabs sharing devices
getUserMedia.sharingMenu.accesskey = d
# LOCALIZATION NOTE (getUserMedia.sharingMenuCamera, getUserMedia.sharingMenuCamera,
# getUserMedia.sharingMenuMicrophone, getUserMedia.sharingMenuApplication,
# getUserMedia.sharingMenuScreen, getUserMedia.sharingMenuWindow,
# LOCALIZATION NOTE (getUserMedia.sharingMenuCamera
# getUserMedia.sharingMenuMicrophone,
# getUserMedia.sharingMenuApplication,
# getUserMedia.sharingMenuScreen,
# getUserMedia.sharingMenuWindow,
# getUserMedia.sharingMenuCameraMicrophone,
# getUserMedia.sharingMenuCameraMicrophoneApplication,
# getUserMedia.sharingMenuCameraMicrophoneScreen,
# getUserMedia.sharingMenuCameraMicrophoneWindow,
# getUserMedia.sharingMenuCameraApplication,
# getUserMedia.sharingMenuCameraScreen,
# getUserMedia.sharingMenuCameraWindow,
# getUserMedia.sharingMenuMicrophoneApplication,
# getUserMedia.sharingMenuMicrophoneScreen,
# getUserMedia.sharingMenuMicrophoneWindow):
@ -575,6 +580,9 @@ getUserMedia.sharingMenuCameraMicrophone = %S (camera and microphone)
getUserMedia.sharingMenuCameraMicrophoneApplication = %S (camera, microphone and application)
getUserMedia.sharingMenuCameraMicrophoneScreen = %S (camera, microphone and screen)
getUserMedia.sharingMenuCameraMicrophoneWindow = %S (camera, microphone and window)
getUserMedia.sharingMenuCameraApplication = %S (camera and application)
getUserMedia.sharingMenuCameraScreen = %S (camera and screen)
getUserMedia.sharingMenuCameraWindow = %S (camera and window)
getUserMedia.sharingMenuMicrophoneApplication = %S (microphone and application)
getUserMedia.sharingMenuMicrophoneScreen = %S (microphone and screen)
getUserMedia.sharingMenuMicrophoneWindow = %S (microphone and window)

View File

@ -98,6 +98,7 @@ this.UITour = {
["help", {query: "#PanelUI-help"}],
["home", {query: "#home-button"}],
["loop", {query: "#loop-call-button"}],
["privateWindow", {query: "#privatebrowsing-button"}],
["quit", {query: "#PanelUI-quit"}],
["search", {
query: "#searchbar",
@ -1131,8 +1132,7 @@ this.UITour = {
});
break;
case "appinfo":
let props = ["defaultUpdateChannel", "distributionID", "isOfficialBranding",
"isReleaseBuild", "name", "vendor", "version"];
let props = ["defaultUpdateChannel", "version"];
let appinfo = {};
props.forEach(property => appinfo[property] = Services.appinfo[property]);
this.sendPageCallback(aContentDocument, aCallbackID, appinfo);

View File

@ -267,8 +267,7 @@ let tests = [
},
function test_getConfigurationVersion(done) {
function callback(result) {
let props = ["defaultUpdateChannel", "distributionID", "isOfficialBranding",
"isReleaseBuild", "name", "vendor", "version"];
let props = ["defaultUpdateChannel", "version"];
for (let property of props) {
ok(typeof(result[property]) !== undefined, "Check " + property + " isn't undefined.");
is(result[property], Services.appinfo[property], "Should have the same " + property + " property.");

View File

@ -28,6 +28,7 @@ let tests = [
"home",
"loop",
"pinnedTab",
"privateWindow",
"quit",
"search",
"searchProvider",
@ -54,6 +55,7 @@ let tests = [
"loop",
"home",
"pinnedTab",
"privateWindow",
"quit",
"search",
"searchProvider",
@ -85,6 +87,7 @@ let tests = [
"home",
"loop",
"pinnedTab",
"privateWindow",
"quit",
"urlbar",
]);

View File

@ -165,8 +165,9 @@
}
/* Vertical toolbar border */
#main-window[sizemode=normal] #navigator-toolbox::after,
#main-window[sizemode=normal] #navigator-toolbox > toolbar:not(#toolbar-menubar):not(#TabsToolbar) {
#main-window[sizemode=normal] #navigator-toolbox:not(:-moz-lwtheme)::after,
#main-window[sizemode=normal] #navigator-toolbox > toolbar:not(#toolbar-menubar):not(#TabsToolbar):not(:-moz-lwtheme),
#main-window[sizemode=normal] #navigator-toolbox:-moz-lwtheme {
border-left: 1px solid @toolbarShadowColor@;
border-right: 1px solid @toolbarShadowColor@;
background-clip: padding-box;

View File

@ -2861,22 +2861,24 @@ nsDocShell::PopProfileTimelineMarkers(JSContext* aCx,
mProfileTimelineMarkers[j]->mPayload);
const char* endMarkerName = mProfileTimelineMarkers[j]->mName;
// Look for Layer markers to stream out paint markers
// Look for Layer markers to stream out paint markers.
if (strcmp(endMarkerName, "Layer") == 0) {
hasSeenPaintedLayer = true;
}
bool isSameMarkerType = strcmp(startMarkerName, endMarkerName) == 0;
bool isValidType = strcmp(endMarkerName, "Paint") != 0 ||
hasSeenPaintedLayer;
bool isPaint = strcmp(startMarkerName, "Paint") == 0;
if (endPayload->GetMetaData() == TRACING_INTERVAL_END &&
isSameMarkerType && isValidType) {
mozilla::dom::ProfileTimelineMarker marker;
marker.mName = NS_ConvertUTF8toUTF16(startMarkerName);
marker.mStart = mProfileTimelineMarkers[i]->mTime;
marker.mEnd = mProfileTimelineMarkers[j]->mTime;
profileTimelineMarkers.AppendElement(marker);
// Pair start and end markers.
if (endPayload->GetMetaData() == TRACING_INTERVAL_END && isSameMarkerType) {
// But ignore paint start/end if no layer has been painted.
if (!isPaint || (isPaint && hasSeenPaintedLayer)) {
mozilla::dom::ProfileTimelineMarker marker;
marker.mName = NS_ConvertUTF8toUTF16(startMarkerName);
marker.mStart = mProfileTimelineMarkers[i]->mTime;
marker.mEnd = mProfileTimelineMarkers[j]->mTime;
profileTimelineMarkers.AppendElement(marker);
}
break;
}

View File

@ -180,6 +180,7 @@ this.DOMApplicationRegistry = {
"Webapps:Download", "Webapps:ApplyDownload",
"Webapps:Install:Return:Ack", "Webapps:AddReceipt",
"Webapps:RemoveReceipt", "Webapps:ReplaceReceipt",
"Webapps:RegisterBEP",
"child-process-shutdown"];
this.frameMessages = ["Webapps:ClearBrowserData"];
@ -1177,6 +1178,14 @@ this.DOMApplicationRegistry = {
return null;
}
}
// And RegisterBEP requires "browser" permission...
if ("Webapps:RegisterBEP" == aMessage.name) {
if (!aMessage.target.assertPermission("browser")) {
debug("mozApps message " + aMessage.name +
" from a content process with no 'browser' privileges.");
return null;
}
}
let msg = aMessage.data || {};
let mm = aMessage.target;
@ -1285,6 +1294,9 @@ this.DOMApplicationRegistry = {
case "Webapps:ReplaceReceipt":
this.replaceReceipt(msg, mm);
break;
case "Webapps:RegisterBEP":
this.registerBrowserElementParentForApp(msg, mm);
break;
}
});
},
@ -4272,14 +4284,16 @@ this.DOMApplicationRegistry = {
}
},
registerBrowserElementParentForApp: function(bep, appId) {
let mm = bep._mm;
registerBrowserElementParentForApp: function(aMsg, aMn) {
let appId = this.getAppLocalIdByManifestURL(aMsg.manifestURL);
if (appId == Ci.nsIScriptSecurityManager.NO_APP_ID) {
return;
}
// Make a listener function that holds on to this appId.
let listener = this.receiveAppMessage.bind(this, appId);
this.frameMessages.forEach(function(msgName) {
mm.addMessageListener(msgName, listener);
aMn.addMessageListener(msgName, listener);
});
},

View File

@ -210,10 +210,15 @@ BrowserElementParent.prototype = {
let appManifestURL =
this._frameElement.QueryInterface(Ci.nsIMozBrowserFrame).appManifestURL;
if (appManifestURL) {
let appId =
DOMApplicationRegistry.getAppLocalIdByManifestURL(appManifestURL);
if (appId != Ci.nsIScriptSecurityManager.NO_APP_ID) {
DOMApplicationRegistry.registerBrowserElementParentForApp(this, appId);
let inParent = Cc["@mozilla.org/xre/app-info;1"]
.getService(Ci.nsIXULRuntime)
.processType == Ci.nsIXULRuntime.PROCESS_TYPE_DEFAULT;
if (inParent) {
DOMApplicationRegistry.registerBrowserElementParentForApp(
{ manifestURL: appManifestURL }, this._mm);
} else {
this._mm.sendAsyncMessage("Webapps:RegisterBEP",
{ manifestURL: appManifestURL });
}
}
},

View File

@ -1827,12 +1827,15 @@ PreloadSlowThings()
bool
ContentChild::RecvAppInfo(const nsCString& version, const nsCString& buildID,
const nsCString& name, const nsCString& UAName)
const nsCString& name, const nsCString& UAName,
const nsCString& ID, const nsCString& vendor)
{
mAppInfo.version.Assign(version);
mAppInfo.buildID.Assign(buildID);
mAppInfo.name.Assign(name);
mAppInfo.UAName.Assign(UAName);
mAppInfo.ID.Assign(ID);
mAppInfo.vendor.Assign(vendor);
if (!Preferences::GetBool("dom.ipc.processPrelaunch.enabled", false)) {
return true;

View File

@ -70,6 +70,8 @@ public:
nsCString buildID;
nsCString name;
nsCString UAName;
nsCString ID;
nsCString vendor;
};
bool Init(MessageLoop* aIOLoop,
@ -297,7 +299,8 @@ public:
virtual bool RecvCycleCollect() MOZ_OVERRIDE;
virtual bool RecvAppInfo(const nsCString& version, const nsCString& buildID,
const nsCString& name, const nsCString& UAName) MOZ_OVERRIDE;
const nsCString& name, const nsCString& UAName,
const nsCString& ID, const nsCString& vendor) MOZ_OVERRIDE;
virtual bool RecvLastPrivateDocShellDestroyed() MOZ_OVERRIDE;

View File

@ -2044,9 +2044,11 @@ ContentParent::InitInternal(ProcessPriority aInitialPriority,
nsCString buildID(gAppData->buildID);
nsCString name(gAppData->name);
nsCString UAName(gAppData->UAName);
nsCString ID(gAppData->ID);
nsCString vendor(gAppData->vendor);
// Sending all information to content process.
unused << SendAppInfo(version, buildID, name, UAName);
unused << SendAppInfo(version, buildID, name, UAName, ID, vendor);
}
nsStyleSheetService *sheetService = nsStyleSheetService::GetInstance();

View File

@ -438,7 +438,8 @@ child:
*/
ActivateA11y();
AppInfo(nsCString version, nsCString buildID, nsCString name, nsCString UAName);
AppInfo(nsCString version, nsCString buildID, nsCString name, nsCString UAName,
nsCString ID, nsCString vendor);
// Notify child that last-pb-context-exited notification was observed
LastPrivateDocShellDestroyed();

View File

@ -23,12 +23,13 @@
android:contentDescription="@string/site_security"
android:visibility="gone"/>
<org.mozilla.gecko.widget.ThemedTextView android:id="@+id/url_bar_title"
style="@style/UrlBar.Title"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_weight="1.0"
gecko:autoUpdateTheme="false"/>
<org.mozilla.gecko.widget.FadedTextView android:id="@+id/url_bar_title"
style="@style/UrlBar.Title"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_weight="1.0"
gecko:fadeWidth="40dip"
gecko:autoUpdateTheme="false"/>
<org.mozilla.gecko.toolbar.PageActionLayout android:id="@+id/page_action_layout"
android:layout_width="wrap_content"

View File

@ -13,6 +13,7 @@ import org.mozilla.gecko.GeckoApplication;
import org.mozilla.gecko.GeckoProfile;
import org.mozilla.gecko.LightweightTheme;
import org.mozilla.gecko.LightweightThemeDrawable;
import org.mozilla.gecko.NewTabletUI;
import org.mozilla.gecko.R;
import org.mozilla.gecko.Telemetry;
import org.mozilla.gecko.TelemetryContract;
@ -66,7 +67,11 @@ public class TabsPanel extends LinearLayout
}
public static View createTabsLayout(final Context context, final AttributeSet attrs) {
return new TabsListLayout(context, attrs);
if (NewTabletUI.isEnabled(context)) {
return new TabsGridLayout(context, attrs);
} else {
return new TabsListLayout(context, attrs);
}
}
public static interface TabsLayoutChangeListener {

View File

@ -13,15 +13,15 @@ import android.graphics.Shader;
import android.graphics.drawable.Drawable;
import android.text.Layout;
import android.util.AttributeSet;
import android.widget.TextView;
import org.mozilla.gecko.R;
import org.mozilla.gecko.widget.ThemedTextView;
/**
* FadedTextView fades the ends of the text by fadeWidth amount,
* if the text is too long and requires an ellipsis.
*/
public class FadedTextView extends TextView {
public class FadedTextView extends ThemedTextView {
// Width of the fade effect from end of the view.
private final int mFadeWidth;
@ -34,11 +34,7 @@ public class FadedTextView extends TextView {
}
public FadedTextView(Context context, AttributeSet attrs) {
this(context, attrs, android.R.attr.textViewStyle);
}
public FadedTextView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
super(context, attrs);
setSingleLine(true);
setEllipsize(null);

View File

@ -24,13 +24,11 @@ EXTRA_JS_MODULES += [
'Prompt.jsm',
'Sanitizer.jsm',
'SharedPreferences.jsm',
'SimpleServiceDiscovery.jsm',
'SSLExceptions.jsm',
'TabMirror.jsm',
'WebappManagerWorker.js',
]
EXTRA_PP_JS_MODULES += [
'RokuApp.jsm',
'WebappManager.jsm',
]

View File

@ -20,6 +20,7 @@
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
android:scrollbarStyle="outsideOverlay"
android:paddingLeft="@dimen/search_row_padding"
android:paddingRight="@dimen/search_row_padding"/>

View File

@ -3,12 +3,12 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Default search engine
browser.search.defaultenginename=Google
browser.search.defaultenginename=Yahoo
# Search engine order (order displayed in the search bar dropdown)s
browser.search.order.1=Google
browser.search.order.2=Yahoo
browser.search.order.3=Bing
browser.search.order.1=Yahoo
browser.search.order.2=Bing
browser.search.order.3=Google
# increment this number when anything gets changed in the list below. This will
# cause Firefox to re-read these prefs and inject any new handlers into the

View File

@ -22,13 +22,13 @@ extern "C" JNIEXPORT jbyteArray JNICALL Java_org_mozilla_gecko_background_native
if (dkLen < 0) {
env->ThrowNew(env->FindClass("java/lang/IllegalArgumentException"),
"dkLen should not be less than 0");
return NULL;
return nullptr;
}
jbyte *password = env->GetByteArrayElements(jpassword, NULL);
jbyte *password = env->GetByteArrayElements(jpassword, nullptr);
size_t passwordLen = env->GetArrayLength(jpassword);
jbyte *salt = env->GetByteArrayElements(jsalt, NULL);
jbyte *salt = env->GetByteArrayElements(jsalt, nullptr);
size_t saltLen = env->GetArrayLength(jsalt);
uint8_t hashResult[dkLen];
@ -39,8 +39,8 @@ extern "C" JNIEXPORT jbyteArray JNICALL Java_org_mozilla_gecko_background_native
env->ReleaseByteArrayElements(jsalt, salt, JNI_ABORT);
jbyteArray out = env->NewByteArray(dkLen);
if (out == NULL) {
return NULL;
if (out == nullptr) {
return nullptr;
}
env->SetByteArrayRegion(out, 0, dkLen, (jbyte *) hashResult);
@ -54,7 +54,7 @@ using namespace mozilla;
*/
extern "C" JNIEXPORT jbyteArray JNICALL Java_org_mozilla_gecko_background_nativecode_NativeCrypto_sha1
(JNIEnv *env, jclass jc, jbyteArray jstr) {
jbyte *str = env->GetByteArrayElements(jstr, NULL);
jbyte *str = env->GetByteArrayElements(jstr, nullptr);
size_t strLen = env->GetArrayLength(jstr);
SHA1Sum sha1;
@ -65,8 +65,8 @@ extern "C" JNIEXPORT jbyteArray JNICALL Java_org_mozilla_gecko_background_native
env->ReleaseByteArrayElements(jstr, str, JNI_ABORT);
jbyteArray out = env->NewByteArray(SHA1Sum::kHashSize);
if (out == NULL) {
return NULL;
if (out == nullptr) {
return nullptr;
}
env->SetByteArrayRegion(out, 0, SHA1Sum::kHashSize, (jbyte *) hashResult);

View File

@ -18,6 +18,7 @@ XPCOMUtils.defineLazyModuleGetter(this, "NetUtil",
"resource://gre/modules/NetUtil.jsm");
Cu.import("resource://gre/modules/Promise.jsm");
Cu.import("resource://gre/modules/Task.jsm");
Cu.import("resource://gre/modules/CloudSyncPlacesWrapper.jsm");
Cu.import("resource://gre/modules/CloudSyncEventSource.jsm");
Cu.import("resource://gre/modules/CloudSyncBookmarksFolderCache.jsm");
@ -484,7 +485,15 @@ let RootFolder = function (rootId, rootName) {
}
if (item.hasOwnProperty("index") && !item.hasOwnProperty("parent")) {
promises.push(PlacesWrapper.bookmarks.setItemIndex(localId, item.index));
promises.push(Task.spawn(function* () {
let localItem = (yield getLocalItemsById([item.id]))[0];
let parent = yield PlacesWrapper.guidToLocalId(localItem.parent);
let index = item.index;
if (CS_FOLDER & item.type) {
folderCache.setParent(localId, parent);
}
yield PlacesWrapper.moveItem(localId, parent, index);
}));
}
Promise.all(promises)

View File

@ -180,8 +180,22 @@ this.FxAccountsOAuthClient.prototype = {
this.tearDown();
// if the message asked to close the tab
if (data.closeWindow && target && target.contentWindow) {
target.contentWindow.close();
if (data.closeWindow && target) {
// for e10s reasons the best way is to use the TabBrowser to close the tab.
let tabbrowser = target.getTabBrowser();
if (tabbrowser) {
let tab = tabbrowser._getTabForBrowser(target);
if (tab) {
tabbrowser.removeTab(tab);
log.debug("OAuth flow closed the tab.");
} else {
log.debug("OAuth flow failed to close the tab. Tab not found in TabBrowser.");
}
} else {
log.debug("OAuth flow failed to close the tab. TabBrowser not found.");
}
}
break;
}

View File

@ -9,10 +9,7 @@
////////////////////////////////////////////////////////////////////////////////
//// Constants
const Cc = Components.classes;
const Ci = Components.interfaces;
const Cr = Components.results;
const Cu = Components.utils;
const { classes: Cc, interfaces: Ci, results: Cr, utils: Cu } = Components;
const TOPIC_SHUTDOWN = "places-shutdown";
const TOPIC_PREFCHANGED = "nsPref:changed";
@ -56,6 +53,7 @@ const QUERYTYPE_KEYWORD = 0;
const QUERYTYPE_FILTERED = 1;
const QUERYTYPE_AUTOFILL_HOST = 2;
const QUERYTYPE_AUTOFILL_URL = 3;
const QUERYTYPE_AUTOFILL_PREDICTURL = 4;
// This separator is used as an RTL-friendly way to split the title and tags.
// It can also be used by an nsIAutoCompleteResult consumer to re-split the
@ -68,7 +66,7 @@ const TITLE_SEARCH_ENGINE_SEPARATOR = " \u00B7\u2013\u00B7 ";
// Telemetry probes.
const TELEMETRY_1ST_RESULT = "PLACES_AUTOCOMPLETE_1ST_RESULT_TIME_MS";
const TELEMETRY_6_FIRST_RESULTS = "PLACES_AUTOCOMPLETE_6_FIRST_RESULTS_TIME_MS";
// The default frecency value used when inserting search engine results.
// The default frecency value used when inserting matches with unknown frecency.
const FRECENCY_SEARCHENGINES_DEFAULT = 1000;
// Sqlite result row index constants.
@ -742,12 +740,6 @@ Search.prototype = {
return;
}
}
// If we didn't find enough matches and we have some frecency-driven
// matches, add them.
if (this._frecencyMatches) {
this._frecencyMatches.forEach(this._addMatch, this);
}
}),
_matchKnownUrl: function* (conn, queries) {
@ -825,7 +817,7 @@ Search.prototype = {
}
this._result.setDefaultIndex(0);
this._addFrecencyMatch({
this._addMatch({
value: value,
comment: match.engineName,
icon: match.iconUrl,
@ -843,6 +835,8 @@ Search.prototype = {
switch (queryType) {
case QUERYTYPE_AUTOFILL_HOST:
this._result.setDefaultIndex(0);
// Fall through.
case QUERYTYPE_AUTOFILL_PREDICTURL:
match = this._processHostRow(row);
break;
case QUERYTYPE_AUTOFILL_URL:
@ -857,19 +851,6 @@ Search.prototype = {
this._addMatch(match);
},
/**
* These matches should be mixed up with other matches, based on frecency.
*/
_addFrecencyMatch: function (match) {
if (!this._frecencyMatches)
this._frecencyMatches = [];
this._frecencyMatches.push(match);
// We keep this array in reverse order, so we can walk it and remove stuff
// from it in one pass. Notice that for frecency reverse order means from
// lower to higher.
this._frecencyMatches.sort((a, b) => a.frecency - b.frecency);
},
_maybeRestyleSearchMatch: function (match) {
// Return if the URL does not represent a search result.
let parseResult =
@ -902,14 +883,6 @@ Search.prototype = {
let notifyResults = false;
if (this._frecencyMatches) {
for (let i = this._frecencyMatches.length - 1; i >= 0 ; i--) {
if (this._frecencyMatches[i].frecency > match.frecency) {
this._addMatch(this._frecencyMatches.splice(i, 1)[0]);
}
}
}
// Must check both id and url, cause keywords dinamically modify the url.
let urlMapKey = stripHttpAndTrim(match.value);
if ((!match.placeId || !this._usedPlaceIds.has(match.placeId)) &&
@ -1304,7 +1277,7 @@ Search.prototype = {
return [
SQL_HOST_QUERY,
{
query_type: QUERYTYPE_AUTOFILL_HOST,
query_type: QUERYTYPE_AUTOFILL_PREDICTURL,
searchString: host
}
];

View File

@ -0,0 +1,28 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
add_task(function* test_searchEngine_autoFill() {
Services.search.addEngineWithDetails("MySearchEngine", "", "", "",
"GET", "http://my.search.com/");
let engine = Services.search.getEngineByName("MySearchEngine");
do_register_cleanup(() => Services.search.removeEngine(engine));
// Add an uri that matches the search string with high frecency.
let uri = NetUtil.newURI("http://www.example.com/my/");
let visits = [];
for (let i = 0; i < 100; ++i) {
visits.push({ uri , title: "Terms - SearchEngine Search" });
}
yield promiseAddVisits(visits);
addBookmark({ uri: uri, title: "Example bookmark" });
Assert.ok(frecencyForUrl(uri) > 10000);
do_log_info("Check search domain is autoFilled even if there's an higher frecency match");
yield check_autocomplete({
search: "my",
autofilled: "my.search.com",
completed: "http://my.search.com"
});
yield cleanup();
});

View File

@ -27,6 +27,7 @@ tail =
[test_multi_word_search.js]
[test_queryurl.js]
[test_searchEngine.js]
[test_searchEngine_host.js]
[test_special_search.js]
[test_swap_protocol.js]
[test_tabmatches.js]

View File

@ -937,8 +937,11 @@
var isMouseOver = (event.type == "mouseover");
var isMouseInControls = event.clientY > this.controlBar.getBoundingClientRect().top &&
event.clientY < this.controlBar.getBoundingClientRect().bottom;
var controlRect = this.controlBar.getBoundingClientRect();
var isMouseInControls = event.clientY > controlRect.top &&
event.clientY < controlRect.bottom &&
event.clientX > controlRect.left &&
event.clientX < controlRect.right;
// Suppress fading out the controls until the video has rendered
// its first frame. But since autoplay videos start off with no

View File

@ -6,7 +6,7 @@
const { Cc, Ci, Cu } = require("chrome");
let protocol = require("devtools/server/protocol");
let { method, RetVal, Arg } = protocol;
let { method, RetVal, Arg, types } = protocol;
const { reportException } = require("devtools/toolkit/DevToolsUtils");
loader.lazyRequireGetter(this, "events", "sdk/event/core");
@ -37,6 +37,13 @@ function expectState(expectedState, method) {
};
}
types.addDictType("AllocationsRecordingOptions", {
// The probability we sample any given allocation when recording
// allocations. Must be between 0.0 and 1.0. Defaults to 1.0, or sampling
// every allocation.
probability: "number"
});
/**
* An actor that returns memory usage data for its parent actor's window.
* A tab-scoped instance of this actor will measure the memory footprint of its
@ -118,6 +125,11 @@ let MemoryActor = protocol.ActorClass({
},
_initFrames: function() {
if (this._framesToCounts) {
// The maps are already initialized.
return;
}
this._framesToCounts = new Map();
this._framesToIndices = new Map();
this._framesToForms = new Map();
@ -160,19 +172,27 @@ let MemoryActor = protocol.ActorClass({
/**
* Start recording allocation sites.
*
* @param AllocationsRecordingOptions options
* See the protocol.js definition of AllocationsRecordingOptions above.
*/
startRecordingAllocations: method(expectState("attached", function() {
startRecordingAllocations: method(expectState("attached", function(options = {}) {
this._initFrames();
this.dbg.memory.allocationSamplingProbability = options.probability != null
? options.probability
: 1.0;
this.dbg.memory.trackingAllocationSites = true;
}), {
request: {},
request: {
options: Arg(0, "nullable:AllocationsRecordingOptions")
},
response: {}
}),
/**
* Stop recording allocation sites.
*/
stopRecordingAllocations: method(expectState("attached", function(shouldRecord) {
stopRecordingAllocations: method(expectState("attached", function() {
this.dbg.memory.trackingAllocationSites = false;
this._clearFrames();
}), {

View File

@ -83,20 +83,27 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
},
/**
* Convert a window to a docShell.
* @param {nsIDOMWindow}
* @return {nsIDocShell}
*/
toDocShell: win => win.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIWebNavigation)
.QueryInterface(Ci.nsIDocShell),
/**
* Get the list of docShells in the currently attached tabActor.
* Get the list of docShells in the currently attached tabActor. Note that we
* always list the docShells included in the real root docShell, even if the
* tabActor was switched to a child frame. This is because for now, paint
* markers are only recorded at parent frame level so switching the timeline
* to a child frame would hide all paint markers.
* See https://bugzilla.mozilla.org/show_bug.cgi?id=1050773#c14
* @return {Array}
*/
get docShells() {
return this.tabActor.windows.map(this.toDocShell);
let docShellsEnum = this.tabActor.originalDocShell.getDocShellEnumerator(
Ci.nsIDocShellTreeItem.typeAll,
Ci.nsIDocShell.ENUMERATE_FORWARDS
);
let docShells = [];
while (docShellsEnum.hasMoreElements()) {
let docShell = docShellsEnum.getNext();
docShells.push(docShell.QueryInterface(Ci.nsIDocShell));
}
return docShells;
},
/**
@ -171,7 +178,12 @@ let TimelineActor = exports.TimelineActor = protocol.ActorClass({
*/
_onWindowReady: function({window}) {
if (this._isRecording) {
this.toDocShell(window).recordProfileTimelineMarkers = true;
// XXX As long as bug 1070089 isn't fixed, each docShell has its own start
// recording time, so markers aren't going to be properly ordered.
let docShell = window.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIWebNavigation)
.QueryInterface(Ci.nsIDocShell);
docShell.recordProfileTimelineMarkers = true;
}
}
});

View File

@ -156,6 +156,44 @@ let AudioNodeActor = exports.AudioNodeActor = protocol.ActorClass({
response: { source: RetVal("boolean") }
}),
/**
* Returns a boolean indicating if the AudioNode has been "bypassed",
* via `AudioNodeActor#bypass` method.
*
* @return Boolean
*/
isBypassed: method(function () {
let node = this.node.get();
if (node === null) {
return false;
}
return node.passThrough;
}, {
response: { bypassed: RetVal("boolean") }
}),
/**
* Takes a boolean, either enabling or disabling the "passThrough" option
* on an AudioNode. If a node is bypassed, an effects processing node (like gain, biquad),
* will allow the audio stream to pass through the node, unaffected.
*
* @param Boolean enable
* Whether the bypass value should be set on or off.
*/
bypass: method(function (enable) {
let node = this.node.get();
if (node === null) {
return;
}
node.passThrough = enable;
}, {
request: { enable: Arg(0, "boolean") },
oneway: true
}),
/**
* Changes a param on the audio node. Responds with either `undefined`
* on success, or a description of the error upon param set failure.

View File

@ -656,6 +656,34 @@ TabActor.prototype = {
});
},
/**
* Getter for the original docShell the tabActor got attached to in the first
* place.
* Note that your actor should normally *not* rely on this top level docShell
* if you want it to show information relative to the iframe that's currently
* being inspected in the toolbox.
*/
get originalDocShell() {
if (!this._originalWindow) {
return this.docShell;
}
return this._originalWindow.QueryInterface(Ci.nsIInterfaceRequestor)
.getInterface(Ci.nsIWebNavigation)
.QueryInterface(Ci.nsIDocShell);
},
/**
* Getter for the original window the tabActor got attached to in the first
* place.
* Note that your actor should normally *not* rely on this top level window if
* you want it to show information relative to the iframe that's currently
* being inspected in the toolbox.
*/
get originalWindow() {
return this._originalWindow || this.window;
},
/**
* Getter for the nsIWebProgress for watching this window.
*/
@ -789,7 +817,9 @@ TabActor.prototype = {
metadata = Cu.getSandboxMetadata(global);
}
catch (e) {}
if (metadata["inner-window-id"] && metadata["inner-window-id"] == id) {
if (metadata
&& metadata["inner-window-id"]
&& metadata["inner-window-id"] == id) {
return true;
}

View File

@ -77,6 +77,7 @@ skip-if = buildapp == 'mulet'
[test_memory_allocations_01.html]
[test_memory_allocations_02.html]
[test_memory_allocations_03.html]
[test_memory_allocations_04.html]
[test_memory_attach_01.html]
[test_memory_attach_02.html]
[test_memory_census.html]

View File

@ -37,22 +37,22 @@ window.onload = function() {
yield memory.stopRecordingAllocations();
ok(true, "Can stop recording allocations");
// Find the index of our 10 allocations, and then assert that it is in the
// `allocator` frame.
// Assert that we have the 10 allocations in the `allocator` frame.
var index = 0;
var found = false;
for (var count of response.counts) {
if (count === 10) {
if (count === 10
&& response.frames[index]
&& response.frames[index].functionDisplayName == "allocator") {
found = true;
break;
}
index++;
}
ok(found, "Should find the 10 allocations.");
is(response.frames[index].functionDisplayName, "allocator",
"Should have found the allocator frame.");
ok(found, "Should find the 10 allocations in the allocator frame.");
yield memory.detach();
destroyServerAndFinish(client);

View File

@ -0,0 +1,60 @@
<!DOCTYPE HTML>
<html>
<!--
Bug 1068171 - Test controlling the memory actor's allocation sampling probability.
-->
<head>
<meta charset="utf-8">
<title>Memory monitoring actor test</title>
<script type="application/javascript" src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
</head>
<body>
<pre id="test">
<script src="memory-helpers.js" type="application/javascript;version=1.8"></script>
<script>
window.onload = function() {
SimpleTest.waitForExplicitFinish();
Task.spawn(function* () {
var { memory, client } = yield startServerAndGetSelectedTabMemory();
yield memory.attach();
var allocs = [];
function allocator() {
for (var i = 0; i < 100; i++) {
allocs.push({});
}
}
var testProbability = Task.async(function* (p, expected) {
info("probability = " + p);
yield memory.startRecordingAllocations({
probability: p
});
allocator();
var response = yield memory.getAllocations();
return response.allocations.length;
});
is((yield testProbability(0.0)), 0,
"With probability = 0.0, we shouldn't get any allocations.");
ok((yield testProbability(1.0)) >= 100,
"With probability = 1.0, we should get all 100 allocations (plus "
+ "whatever allocations the actor and SpiderMonkey make).");
// We don't test any other probabilities because the test would be
// non-deterministic. We don't have a way to control the PRNG like we do in
// jit-tests
// (js/src/jit-test/tests/debug/Memory-allocationsSamplingProbability-*.js).
yield memory.stopRecordingAllocations();
yield memory.detach();
destroyServerAndFinish(client);
});
};
</script>
</pre>
</body>
</html>

Some files were not shown because too many files have changed in this diff Show More