Merge mozilla-central to b2g-inbound

This commit is contained in:
Carsten "Tomcat" Book 2015-11-06 14:00:33 +01:00
commit 7da46b6087
230 changed files with 41237 additions and 2236 deletions

View File

@ -41,118 +41,118 @@ public: // construction, destruction
NS_DECL_ISUPPORTS_INHERITED
public: // IUnknown methods - see iunknown.h for documentation
STDMETHODIMP QueryInterface(REFIID, void**);
STDMETHODIMP QueryInterface(REFIID, void**) override;
// Return the registered OLE class ID of this object's CfDataObj.
CLSID GetClassID() const;
public: // COM interface IAccessible
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accParent(
/* [retval][out] */ IDispatch __RPC_FAR *__RPC_FAR *ppdispParent);
/* [retval][out] */ IDispatch __RPC_FAR *__RPC_FAR *ppdispParent) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accChildCount(
/* [retval][out] */ long __RPC_FAR *pcountChildren);
/* [retval][out] */ long __RPC_FAR *pcountChildren) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accChild(
/* [in] */ VARIANT varChild,
/* [retval][out] */ IDispatch __RPC_FAR *__RPC_FAR *ppdispChild);
/* [retval][out] */ IDispatch __RPC_FAR *__RPC_FAR *ppdispChild) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accName(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ BSTR __RPC_FAR *pszName);
/* [retval][out] */ BSTR __RPC_FAR *pszName) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accValue(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ BSTR __RPC_FAR *pszValue);
/* [retval][out] */ BSTR __RPC_FAR *pszValue) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accDescription(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ BSTR __RPC_FAR *pszDescription);
/* [retval][out] */ BSTR __RPC_FAR *pszDescription) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accRole(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ VARIANT __RPC_FAR *pvarRole);
/* [retval][out] */ VARIANT __RPC_FAR *pvarRole) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accState(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ VARIANT __RPC_FAR *pvarState);
/* [retval][out] */ VARIANT __RPC_FAR *pvarState) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accHelp(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ BSTR __RPC_FAR *pszHelp);
/* [retval][out] */ BSTR __RPC_FAR *pszHelp) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accHelpTopic(
/* [out] */ BSTR __RPC_FAR *pszHelpFile,
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ long __RPC_FAR *pidTopic);
/* [retval][out] */ long __RPC_FAR *pidTopic) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accKeyboardShortcut(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ BSTR __RPC_FAR *pszKeyboardShortcut);
/* [retval][out] */ BSTR __RPC_FAR *pszKeyboardShortcut) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accFocus(
/* [retval][out] */ VARIANT __RPC_FAR *pvarChild);
/* [retval][out] */ VARIANT __RPC_FAR *pvarChild) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accSelection(
/* [retval][out] */ VARIANT __RPC_FAR *pvarChildren);
/* [retval][out] */ VARIANT __RPC_FAR *pvarChildren) override;
virtual /* [id][propget] */ HRESULT STDMETHODCALLTYPE get_accDefaultAction(
/* [optional][in] */ VARIANT varChild,
/* [retval][out] */ BSTR __RPC_FAR *pszDefaultAction);
/* [retval][out] */ BSTR __RPC_FAR *pszDefaultAction) override;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE accSelect(
/* [in] */ long flagsSelect,
/* [optional][in] */ VARIANT varChild);
/* [optional][in] */ VARIANT varChild) override;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE accLocation(
/* [out] */ long __RPC_FAR *pxLeft,
/* [out] */ long __RPC_FAR *pyTop,
/* [out] */ long __RPC_FAR *pcxWidth,
/* [out] */ long __RPC_FAR *pcyHeight,
/* [optional][in] */ VARIANT varChild);
/* [optional][in] */ VARIANT varChild) override;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE accNavigate(
/* [in] */ long navDir,
/* [optional][in] */ VARIANT varStart,
/* [retval][out] */ VARIANT __RPC_FAR *pvarEndUpAt);
/* [retval][out] */ VARIANT __RPC_FAR *pvarEndUpAt) override;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE accHitTest(
/* [in] */ long xLeft,
/* [in] */ long yTop,
/* [retval][out] */ VARIANT __RPC_FAR *pvarChild);
/* [retval][out] */ VARIANT __RPC_FAR *pvarChild) override;
virtual /* [id] */ HRESULT STDMETHODCALLTYPE accDoDefaultAction(
/* [optional][in] */ VARIANT varChild);
/* [optional][in] */ VARIANT varChild) override;
virtual /* [id][propput] */ HRESULT STDMETHODCALLTYPE put_accName(
/* [optional][in] */ VARIANT varChild,
/* [in] */ BSTR szName);
/* [in] */ BSTR szName) override;
virtual /* [id][propput] */ HRESULT STDMETHODCALLTYPE put_accValue(
/* [optional][in] */ VARIANT varChild,
/* [in] */ BSTR szValue);
/* [in] */ BSTR szValue) override;
// IDispatch (support of scripting languages like VB)
virtual HRESULT STDMETHODCALLTYPE GetTypeInfoCount(UINT *pctinfo);
virtual HRESULT STDMETHODCALLTYPE GetTypeInfoCount(UINT *pctinfo) override;
virtual HRESULT STDMETHODCALLTYPE GetTypeInfo(UINT iTInfo, LCID lcid,
ITypeInfo **ppTInfo);
ITypeInfo **ppTInfo) override;
virtual HRESULT STDMETHODCALLTYPE GetIDsOfNames(REFIID riid,
LPOLESTR *rgszNames,
UINT cNames,
LCID lcid,
DISPID *rgDispId);
DISPID *rgDispId) override;
virtual HRESULT STDMETHODCALLTYPE Invoke(DISPID dispIdMember, REFIID riid,
LCID lcid, WORD wFlags,
DISPPARAMS *pDispParams,
VARIANT *pVarResult,
EXCEPINFO *pExcepInfo,
UINT *puArgErr);
UINT *puArgErr) override;
// Accessible
virtual nsresult HandleAccEvent(AccEvent* aEvent);
virtual nsresult HandleAccEvent(AccEvent* aEvent) override;
virtual void Shutdown() override;
// Helper methods

View File

@ -28,11 +28,8 @@ USE_LIBS += [
for var in ('MOZ_APP_NAME', 'MOZ_APP_VERSION', 'MOZ_UPDATER'):
DEFINES[var] = CONFIG[var]
GENERATED_INCLUDES += [
'/build',
]
LOCAL_INCLUDES += [
'!/build',
'/toolkit/xre',
'/xpcom/base',
'/xpcom/build',

View File

@ -24,11 +24,8 @@ FINAL_TARGET_FILES.defaults.profile += ['profile/prefs.js']
DEFINES['APP_VERSION'] = CONFIG['MOZ_APP_VERSION']
GENERATED_INCLUDES += [
'/build',
]
LOCAL_INCLUDES += [
'!/build',
'/toolkit/xre',
'/xpcom/base',
'/xpcom/build',

View File

@ -17,6 +17,7 @@
"_": false,
"Backbone": false,
"chai": false,
"classNames": false,
"console": false,
"loop": true,
"MozActivity": false,

View File

@ -23,6 +23,7 @@
<script type="text/javascript" src="loop/shared/libs/react-0.12.2.js"></script>
<script type="text/javascript" src="loop/shared/libs/lodash-3.9.3.js"></script>
<script type="text/javascript" src="loop/shared/libs/backbone-1.2.1.js"></script>
<script type="text/javascript" src="loop/shared/libs/classnames-2.2.0.js"></script>
<script type="text/javascript" src="loop/shared/js/utils.js"></script>
<script type="text/javascript" src="loop/shared/js/mixins.js"></script>

View File

@ -165,7 +165,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
if (!this.props.displayed) {
return null;
@ -245,7 +245,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var accountEntryCSSClass = this._isSignedIn() ? "entry-settings-signout" :
"entry-settings-signin";
var notificationsLabel = this.props.mozLoop.doNotDisturb ? "settings_menu_item_turnnotificationson" :
@ -436,7 +436,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var roomClasses = React.addons.classSet({
var roomClasses = classNames({
"room-entry": true,
"room-active": this._isActive(),
"room-opened": this.props.isOpenedRoom
@ -588,7 +588,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var dropdownClasses = React.addons.classSet({
var dropdownClasses = classNames({
"dropdown-menu": true,
"dropdown-menu-up": this.state.openDirUp
});

View File

@ -165,7 +165,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
if (!this.props.displayed) {
return null;
@ -245,7 +245,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var accountEntryCSSClass = this._isSignedIn() ? "entry-settings-signout" :
"entry-settings-signin";
var notificationsLabel = this.props.mozLoop.doNotDisturb ? "settings_menu_item_turnnotificationson" :
@ -436,7 +436,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var roomClasses = React.addons.classSet({
var roomClasses = classNames({
"room-entry": true,
"room-active": this._isActive(),
"room-opened": this.props.isOpenedRoom
@ -588,7 +588,7 @@ loop.panel = (function(_, mozL10n) {
},
render: function() {
var dropdownClasses = React.addons.classSet({
var dropdownClasses = classNames({
"dropdown-menu": true,
"dropdown-menu-up": this.state.openDirUp
});

View File

@ -199,7 +199,7 @@ loop.roomViews = (function(mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var shareDropdown = cx({
"share-service-dropdown": true,
"dropdown-menu": true,
@ -330,7 +330,7 @@ loop.roomViews = (function(mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
return (
React.createElement("div", {className: "room-invitation-overlay"},
React.createElement("div", {className: "room-invitation-content"},
@ -546,7 +546,7 @@ loop.roomViews = (function(mozL10n) {
var urlDescription = url && url.description || "";
var location = url && url.location || "";
var cx = React.addons.classSet;
var cx = classNames;
var availableContext = this.state.availableContext;
return (
React.createElement("div", {className: "room-context"},

View File

@ -199,7 +199,7 @@ loop.roomViews = (function(mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var shareDropdown = cx({
"share-service-dropdown": true,
"dropdown-menu": true,
@ -330,7 +330,7 @@ loop.roomViews = (function(mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
return (
<div className="room-invitation-overlay">
<div className="room-invitation-content">
@ -546,7 +546,7 @@ loop.roomViews = (function(mozL10n) {
var urlDescription = url && url.description || "";
var location = url && url.location || "";
var cx = React.addons.classSet;
var cx = classNames;
var availableContext = this.state.availableContext;
return (
<div className="room-context">

View File

@ -17,6 +17,7 @@
<script type="text/javascript" src="loop/libs/l10n.js"></script>
<script type="text/javascript" src="loop/shared/libs/lodash-3.9.3.js"></script>
<script type="text/javascript" src="loop/shared/libs/backbone-1.2.1.js"></script>
<script type="text/javascript" src="loop/shared/libs/classnames-2.2.0.js"></script>
<script type="text/javascript" src="loop/shared/js/utils.js"></script>
<script type="text/javascript" src="loop/shared/js/models.js"></script>

View File

@ -48,7 +48,7 @@ loop.shared.views.chat = (function(mozL10n) {
},
render: function() {
var classes = React.addons.classSet({
var classes = classNames({
"text-chat-entry": true,
"received": this.props.type === CHAT_MESSAGE_TYPES.RECEIVED,
"sent": this.props.type === CHAT_MESSAGE_TYPES.SENT,
@ -165,7 +165,7 @@ loop.shared.views.chat = (function(mozL10n) {
/* Keep track of the last printed timestamp. */
var lastTimestamp = 0;
var entriesClasses = React.addons.classSet({
var entriesClasses = classNames({
"text-chat-entries": true
});
@ -395,7 +395,7 @@ loop.shared.views.chat = (function(mozL10n) {
return item.type === CHAT_MESSAGE_TYPES.SENT;
});
var textChatViewClasses = React.addons.classSet({
var textChatViewClasses = classNames({
"text-chat-view": true,
"text-chat-entries-empty": !messageList.length,
"text-chat-disabled": !this.state.textChatEnabled

View File

@ -48,7 +48,7 @@ loop.shared.views.chat = (function(mozL10n) {
},
render: function() {
var classes = React.addons.classSet({
var classes = classNames({
"text-chat-entry": true,
"received": this.props.type === CHAT_MESSAGE_TYPES.RECEIVED,
"sent": this.props.type === CHAT_MESSAGE_TYPES.SENT,
@ -165,7 +165,7 @@ loop.shared.views.chat = (function(mozL10n) {
/* Keep track of the last printed timestamp. */
var lastTimestamp = 0;
var entriesClasses = React.addons.classSet({
var entriesClasses = classNames({
"text-chat-entries": true
});
@ -395,7 +395,7 @@ loop.shared.views.chat = (function(mozL10n) {
return item.type === CHAT_MESSAGE_TYPES.SENT;
});
var textChatViewClasses = React.addons.classSet({
var textChatViewClasses = classNames({
"text-chat-view": true,
"text-chat-entries-empty": !messageList.length,
"text-chat-disabled": !this.state.textChatEnabled

View File

@ -70,7 +70,7 @@ loop.shared.views = (function(_, mozL10n) {
},
_getClasses: function() {
var cx = React.addons.classSet;
var cx = classNames;
// classes
var classesObj = {
"btn": true,
@ -170,7 +170,7 @@ loop.shared.views = (function(_, mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var isActive = this.props.state === SCREEN_SHARE_STATES.ACTIVE;
var screenShareClasses = cx({
@ -304,7 +304,7 @@ loop.shared.views = (function(_, mozL10n) {
* Recover the needed info for generating an specific menu Item
*/
getItemInfo: function(menuItem) {
var cx = React.addons.classSet;
var cx = classNames;
switch (menuItem.id) {
case "help":
return {
@ -362,7 +362,7 @@ loop.shared.views = (function(_, mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var settingsDropdownMenuClasses = cx({
"settings-menu": true,
"dropdown-menu": true,
@ -494,7 +494,7 @@ loop.shared.views = (function(_, mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var conversationToolbarCssClasses = cx({
"conversation-toolbar": true,
"idle": this.state.idle
@ -642,7 +642,7 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var classObject = { button: true, disabled: this.props.disabled };
if (this.props.additionalClass) {
classObject[this.props.additionalClass] = true;
@ -675,7 +675,7 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var classObject = { "button-group": true };
if (this.props.additionalClass) {
classObject[this.props.additionalClass] = true;
@ -742,7 +742,7 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var wrapperClasses = {
"checkbox-wrapper": true,
disabled: this.props.disabled
@ -860,7 +860,7 @@ loop.shared.views = (function(_, mozL10n) {
"shared/img/icons-16x16.svg#globe";
}
var wrapperClasses = React.addons.classSet({
var wrapperClasses = classNames({
"context-wrapper": true,
"clicks-allowed": this.props.allowClick
});
@ -1076,17 +1076,17 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var remoteStreamClasses = React.addons.classSet({
var remoteStreamClasses = classNames({
"remote": true,
"focus-stream": !this.props.displayScreenShare
});
var screenShareStreamClasses = React.addons.classSet({
var screenShareStreamClasses = classNames({
"screen": true,
"focus-stream": this.props.displayScreenShare
});
var mediaWrapperClasses = React.addons.classSet({
var mediaWrapperClasses = classNames({
"media-wrapper": true,
"receiving-screen-share": this.props.displayScreenShare,
"showing-local-streams": this.props.localSrcMediaElement ||

View File

@ -70,7 +70,7 @@ loop.shared.views = (function(_, mozL10n) {
},
_getClasses: function() {
var cx = React.addons.classSet;
var cx = classNames;
// classes
var classesObj = {
"btn": true,
@ -170,7 +170,7 @@ loop.shared.views = (function(_, mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var isActive = this.props.state === SCREEN_SHARE_STATES.ACTIVE;
var screenShareClasses = cx({
@ -304,7 +304,7 @@ loop.shared.views = (function(_, mozL10n) {
* Recover the needed info for generating an specific menu Item
*/
getItemInfo: function(menuItem) {
var cx = React.addons.classSet;
var cx = classNames;
switch (menuItem.id) {
case "help":
return {
@ -362,7 +362,7 @@ loop.shared.views = (function(_, mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var settingsDropdownMenuClasses = cx({
"settings-menu": true,
"dropdown-menu": true,
@ -494,7 +494,7 @@ loop.shared.views = (function(_, mozL10n) {
return null;
}
var cx = React.addons.classSet;
var cx = classNames;
var conversationToolbarCssClasses = cx({
"conversation-toolbar": true,
"idle": this.state.idle
@ -642,7 +642,7 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var classObject = { button: true, disabled: this.props.disabled };
if (this.props.additionalClass) {
classObject[this.props.additionalClass] = true;
@ -675,7 +675,7 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var classObject = { "button-group": true };
if (this.props.additionalClass) {
classObject[this.props.additionalClass] = true;
@ -742,7 +742,7 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var cx = React.addons.classSet;
var cx = classNames;
var wrapperClasses = {
"checkbox-wrapper": true,
disabled: this.props.disabled
@ -860,7 +860,7 @@ loop.shared.views = (function(_, mozL10n) {
"shared/img/icons-16x16.svg#globe";
}
var wrapperClasses = React.addons.classSet({
var wrapperClasses = classNames({
"context-wrapper": true,
"clicks-allowed": this.props.allowClick
});
@ -1076,17 +1076,17 @@ loop.shared.views = (function(_, mozL10n) {
},
render: function() {
var remoteStreamClasses = React.addons.classSet({
var remoteStreamClasses = classNames({
"remote": true,
"focus-stream": !this.props.displayScreenShare
});
var screenShareStreamClasses = React.addons.classSet({
var screenShareStreamClasses = classNames({
"screen": true,
"focus-stream": this.props.displayScreenShare
});
var mediaWrapperClasses = React.addons.classSet({
var mediaWrapperClasses = classNames({
"media-wrapper": true,
"receiving-screen-share": this.props.displayScreenShare,
"showing-local-streams": this.props.localSrcMediaElement ||

View File

@ -0,0 +1,48 @@
/*!
Copyright (c) 2015 Jed Watson.
Licensed under the MIT License (MIT), see
http://jedwatson.github.io/classnames
*/
/* global define */
(function () {
'use strict';
var hasOwn = {}.hasOwnProperty;
function classNames () {
var classes = '';
for (var i = 0; i < arguments.length; i++) {
var arg = arguments[i];
if (!arg) continue;
var argType = typeof arg;
if (argType === 'string' || argType === 'number') {
classes += ' ' + arg;
} else if (Array.isArray(arg)) {
classes += ' ' + classNames.apply(null, arg);
} else if (argType === 'object') {
for (var key in arg) {
if (hasOwn.call(arg, key) && arg[key]) {
classes += ' ' + key;
}
}
}
}
return classes.substr(1);
}
if (typeof module !== 'undefined' && module.exports) {
module.exports = classNames;
} else if (typeof define === 'function' && typeof define.amd === 'object' && define.amd) {
// register as 'classnames', consistent with npm package name
define('classnames', function () {
return classNames;
});
} else {
window.classNames = classNames;
}
}());

View File

@ -114,6 +114,7 @@ browser.jar:
#endif
content/browser/loop/shared/libs/lodash-3.9.3.js (content/shared/libs/lodash-3.9.3.js)
content/browser/loop/shared/libs/backbone-1.2.1.js (content/shared/libs/backbone-1.2.1.js)
content/browser/loop/shared/libs/classnames-2.2.0.js (content/shared/libs/classnames-2.2.0.js)
# Shared sounds
content/browser/loop/shared/sounds/ringtone.ogg (content/shared/sounds/ringtone.ogg)

View File

@ -80,7 +80,7 @@ loop.standaloneRoomViews = (function(mozL10n) {
mozL10n.get("rooms_room_joined_own_conversation_label") :
mozL10n.get("rooms_room_join_label");
var buttonClasses = React.addons.classSet({
var buttonClasses = classNames({
btn: true,
"btn-info": true,
disabled: this.state.roomState === ROOM_STATES.JOINED
@ -325,7 +325,7 @@ loop.standaloneRoomViews = (function(mozL10n) {
var isChrome = utils.isChrome(navigator.userAgent);
var isFirefox = utils.isFirefox(navigator.userAgent);
var isOpera = utils.isOpera(navigator.userAgent);
var promptMediaMessageClasses = React.addons.classSet({
var promptMediaMessageClasses = classNames({
"prompt-media-message": true,
"chrome": isChrome,
"firefox": isFirefox,

View File

@ -80,7 +80,7 @@ loop.standaloneRoomViews = (function(mozL10n) {
mozL10n.get("rooms_room_joined_own_conversation_label") :
mozL10n.get("rooms_room_join_label");
var buttonClasses = React.addons.classSet({
var buttonClasses = classNames({
btn: true,
"btn-info": true,
disabled: this.state.roomState === ROOM_STATES.JOINED
@ -325,7 +325,7 @@ loop.standaloneRoomViews = (function(mozL10n) {
var isChrome = utils.isChrome(navigator.userAgent);
var isFirefox = utils.isFirefox(navigator.userAgent);
var isOpera = utils.isOpera(navigator.userAgent);
var promptMediaMessageClasses = React.addons.classSet({
var promptMediaMessageClasses = classNames({
"prompt-media-message": true,
"chrome": isChrome,
"firefox": isFirefox,

View File

@ -38,9 +38,11 @@ if (typeof __PROD__ !== "undefined") {
// pull in the unbuilt version from node_modules
require("expose?React!react");
require("expose?React!react/addons");
require("expose?classNames!classnames");
} else {
// our development server setup doesn't yet handle real modules, so for now...
require("shared/libs/react-0.12.2.js");
require("shared/libs/classnames-2.2.0.js");
}

View File

@ -12,6 +12,7 @@
},
"dependencies": {},
"devDependencies": {
"classnames": "2.2.x",
"compression": "1.5.x",
"eslint": "1.6.x",
"eslint-plugin-mozilla": "../../../../testing/eslint-plugin-mozilla",

View File

@ -31,6 +31,7 @@
<!-- libs -->
<script src="../../content/libs/l10n.js"></script>
<script src="../../content/shared/libs/react-0.12.2.js"></script>
<script src="../../content/shared/libs/classnames-2.2.0.js"></script>
<script src="../../content/shared/libs/lodash-3.9.3.js"></script>
<script src="../../content/shared/libs/backbone-1.2.1.js"></script>

View File

@ -12,6 +12,7 @@ module.exports = function(config) {
baseConfig.files = baseConfig.files.concat([
"content/libs/l10n.js",
"content/shared/libs/react-0.12.2.js",
"content/shared/libs/classnames-2.2.0.js",
"content/shared/libs/lodash-3.9.3.js",
"content/shared/libs/backbone-1.2.1.js",
"test/shared/vendor/*.js",

View File

@ -14,6 +14,7 @@ module.exports = function(config) {
"content/shared/libs/lodash-3.9.3.js",
"content/shared/libs/backbone-1.2.1.js",
"content/shared/libs/react-0.12.2.js",
"content/shared/libs/classnames-2.2.0.js",
"content/shared/libs/sdk.js",
"test/shared/vendor/*.js",
"test/karma/head.js", // Add test fixture container

View File

@ -30,6 +30,7 @@
<!-- libs -->
<script src="../../content/shared/libs/react-0.12.2.js"></script>
<script src="../../content/shared/libs/classnames-2.2.0.js"></script>
<script src="../../content/shared/libs/lodash-3.9.3.js"></script>
<script src="../../content/shared/libs/backbone-1.2.1.js"></script>
<script src="../../standalone/content/libs/l10n-gaia-02ca67948fe8.js"></script>

View File

@ -31,6 +31,7 @@
<!-- libs -->
<script src="../../content/shared/libs/react-0.12.2.js"></script>
<script src="../../content/shared/libs/classnames-2.2.0.js"></script>
<script src="../../content/shared/libs/lodash-3.9.3.js"></script>
<script src="../../content/shared/libs/backbone-1.2.1.js"></script>
<script src="../../standalone/content/libs/l10n-gaia-02ca67948fe8.js"></script>

View File

@ -28,6 +28,7 @@
<script src="fake-mozLoop.js"></script>
<script src="fake-l10n.js"></script>
<script src="../content/shared/libs/react-0.12.2.js"></script>
<script src="../content/shared/libs/classnames-2.2.0.js"></script>
<script src="../content/shared/libs/lodash-3.9.3.js"></script>
<script src="../content/shared/libs/backbone-1.2.1.js"></script>
<script src="../content/shared/js/actions.js"></script>

View File

@ -556,7 +556,7 @@
width += 2;
}
var cx = React.addons.classSet;
var cx = classNames;
return (
React.createElement("div", {className: "example"},
React.createElement("h3", {id: this.makeId()},

View File

@ -556,7 +556,7 @@
width += 2;
}
var cx = React.addons.classSet;
var cx = classNames;
return (
<div className="example">
<h3 id={this.makeId()}>

View File

@ -115,7 +115,6 @@ idea {
excludeDirs -= file(topobjdir)
excludeDirs += files(file(topobjdir).listFiles())
excludeDirs -= file("${topobjdir}/gradle")
excludeDirs -= file("${topobjdir}/mobile")
}
if (!mozconfig.substs.MOZ_INSTALL_TRACKING) {

View File

@ -41,25 +41,6 @@ Note: `{mach} mercurial-setup` does not make any changes without prompting
you first.
'''.strip()
OLD_MERCURIAL_TOOLS = '''
*** MERCURIAL CONFIGURATION POTENTIALLY OUT OF DATE ***
mach has detected that it has been a while since you have run
`{mach} mercurial-setup`.
Having the latest Mercurial tools and configuration should lead to a better,
more productive experience when working on Mozilla projects.
Please run `{mach} mercurial-setup` now.
Reminder: `{mach} mercurial-setup` does not make any changes without
prompting you first.
To avoid this message in the future, run `{mach} mercurial-setup` once a month.
Or, schedule `{mach} mercurial-setup --update-only` to run automatically in
the background at least once a month.
'''.strip()
MERCURIAL_SETUP_FATAL_INTERVAL = 31 * 24 * 60 * 60
@ -281,9 +262,6 @@ def bootstrap(topsrcdir, mozilla_dir=None):
if mtime is None:
print(NO_MERCURIAL_SETUP.format(mach=sys.argv[0]), file=sys.stderr)
sys.exit(2)
elif time.time() - mtime > MERCURIAL_SETUP_FATAL_INTERVAL:
print(OLD_MERCURIAL_TOOLS.format(mach=sys.argv[0]), file=sys.stderr)
sys.exit(2)
def populate_context(context, key=None):
if key is None:

View File

@ -28,11 +28,8 @@ include('/ipc/chromium/chromium-config.mozbuild')
FINAL_LIBRARY = 'xul'
GENERATED_INCLUDES += [
'/xpcom',
]
LOCAL_INCLUDES += [
'!/xpcom',
'/dom/base',
'/netwerk/base',
'/netwerk/protocol/res',

View File

@ -416,7 +416,7 @@ skip-if = e10s && debug
[browser_dbg_search-sources-03.js]
skip-if = e10s && debug
[browser_dbg_search-symbols.js]
skip-if = e10s && debug
skip-if = (e10s && debug) || os == "linux" # Bug 1132375
[browser_dbg_searchbox-help-popup-01.js]
skip-if = e10s && debug
[browser_dbg_searchbox-help-popup-02.js]

View File

@ -31,10 +31,9 @@ class nsDOMNavigationTiming;
[ptr] native nsDOMNavigationTimingPtr(nsDOMNavigationTiming);
[ref] native nsIContentViewerTArray(nsTArray<nsCOMPtr<nsIContentViewer> >);
[scriptable, builtinclass, uuid(702e0a92-7d63-490e-b5ee-d247e6bd4588)]
[scriptable, builtinclass, uuid(fbd04c99-e149-473f-8a68-44f53d82f98b)]
interface nsIContentViewer : nsISupports
{
[noscript] void init(in nsIWidgetPtr aParentWidget,
[const] in nsIntRectRef aBounds);
@ -245,27 +244,6 @@ interface nsIContentViewer : nsISupports
*/
[noscript] void appendSubtree(in nsIContentViewerTArray array);
/**
* Set the maximum line width for the document.
* NOTE: This will generate a reflow!
*
* @param maxLineWidth The maximum width of any line boxes on the page,
* in CSS pixels.
*/
void changeMaxLineBoxWidth(in int32_t maxLineBoxWidth);
/**
* Instruct the refresh driver to discontinue painting until further
* notice.
*/
void pausePainting();
/**
* Instruct the refresh driver to resume painting after a previous call to
* pausePainting().
*/
void resumePainting();
/*
* Render the document as if being viewed on a device with the specified
* media type. This will cause a reflow.

View File

@ -87,6 +87,9 @@ enum DOM4ErrorTypeCodeMap {
BtAuthFailureError = 0,
BtRmtDevDownError = 0,
BtAuthRejectedError = 0,
/* Push API errors */
PermissionDeniedError = 0,
};
#define DOM4_MSG_DEF(name, message, nsresult) {(nsresult), name, #name, message},

View File

@ -151,5 +151,11 @@ DOM4_MSG_DEF(InvalidStateError, "A mutation operation was attempted on a file st
DOM4_MSG_DEF(AbortError, "A request was aborted, for example through a call to FileHandle.abort.", NS_ERROR_DOM_FILEHANDLE_ABORT_ERR)
DOM4_MSG_DEF(QuotaExceededError, "The current file handle exceeded its quota limitations.", NS_ERROR_DOM_FILEHANDLE_QUOTA_ERR)
/* Push API errors. */
DOM4_MSG_DEF(InvalidStateError, "Invalid service worker registration.", NS_ERROR_DOM_PUSH_INVALID_REGISTRATION_ERR)
DOM4_MSG_DEF(PermissionDeniedError, "User denied permission to use the Push API.", NS_ERROR_DOM_PUSH_DENIED_ERR)
DOM4_MSG_DEF(AbortError, "Error retrieving push subscription.", NS_ERROR_DOM_PUSH_ABORT_ERR)
DOM4_MSG_DEF(NetworkError, "Push service unreachable.", NS_ERROR_DOM_PUSH_SERVICE_UNREACHABLE)
DOM_MSG_DEF(NS_ERROR_DOM_JS_EXCEPTION, "A callback threw an exception")
DOM_MSG_DEF(NS_ERROR_DOM_DOMEXCEPTION, "A DOMException was thrown")

View File

@ -386,30 +386,14 @@ CandidatesTraverse(CustomElementHashKey* aKey,
return PL_DHASH_NEXT;
}
struct CustomDefinitionTraceArgs
{
const TraceCallbacks& callbacks;
void* closure;
};
static PLDHashOperator
CustomDefinitionTrace(CustomElementHashKey *aKey,
CustomElementDefinition *aData,
void *aArg)
{
CustomDefinitionTraceArgs* traceArgs = static_cast<CustomDefinitionTraceArgs*>(aArg);
MOZ_ASSERT(aData, "Definition must not be null");
traceArgs->callbacks.Trace(&aData->mPrototype, "mCustomDefinitions prototype",
traceArgs->closure);
return PL_DHASH_NEXT;
}
NS_IMPL_CYCLE_COLLECTION_CLASS(Registry)
NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(Registry)
CustomDefinitionTraceArgs customDefinitionArgs = { aCallbacks, aClosure };
tmp->mCustomDefinitions.EnumerateRead(CustomDefinitionTrace,
&customDefinitionArgs);
for (auto iter = tmp->mCustomDefinitions.Iter(); !iter.Done(); iter.Next()) {
aCallbacks.Trace(&iter.UserData()->mPrototype,
"mCustomDefinitions prototype",
aClosure);
}
NS_IMPL_CYCLE_COLLECTION_TRACE_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(Registry)
@ -941,57 +925,16 @@ nsExternalResourceMap::RequestResource(nsIURI* aURI,
return nullptr;
}
struct
nsExternalResourceEnumArgs
{
nsIDocument::nsSubDocEnumFunc callback;
void *data;
};
static PLDHashOperator
ExternalResourceEnumerator(nsIURI* aKey,
nsExternalResourceMap::ExternalResource* aData,
void* aClosure)
{
nsExternalResourceEnumArgs* args =
static_cast<nsExternalResourceEnumArgs*>(aClosure);
bool next =
aData->mDocument ? args->callback(aData->mDocument, args->data) : true;
return next ? PL_DHASH_NEXT : PL_DHASH_STOP;
}
void
nsExternalResourceMap::EnumerateResources(nsIDocument::nsSubDocEnumFunc aCallback,
void* aData)
{
nsExternalResourceEnumArgs args = { aCallback, aData };
mMap.EnumerateRead(ExternalResourceEnumerator, &args);
}
static PLDHashOperator
ExternalResourceTraverser(nsIURI* aKey,
nsExternalResourceMap::ExternalResource* aData,
void* aClosure)
{
nsCycleCollectionTraversalCallback *cb =
static_cast<nsCycleCollectionTraversalCallback*>(aClosure);
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*cb,
"mExternalResourceMap.mMap entry"
"->mDocument");
cb->NoteXPCOMChild(aData->mDocument);
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*cb,
"mExternalResourceMap.mMap entry"
"->mViewer");
cb->NoteXPCOMChild(aData->mViewer);
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*cb,
"mExternalResourceMap.mMap entry"
"->mLoadGroup");
cb->NoteXPCOMChild(aData->mLoadGroup);
return PL_DHASH_NEXT;
for (auto iter = mMap.Iter(); !iter.Done(); iter.Next()) {
nsExternalResourceMap::ExternalResource* resource = iter.UserData();
if (resource->mDocument && !aCallback(resource->mDocument, aData)) {
break;
}
}
}
void
@ -999,7 +942,24 @@ nsExternalResourceMap::Traverse(nsCycleCollectionTraversalCallback* aCallback) c
{
// mPendingLoads will get cleared out as the requests complete, so
// no need to worry about those here.
mMap.EnumerateRead(ExternalResourceTraverser, aCallback);
for (auto iter = mMap.ConstIter(); !iter.Done(); iter.Next()) {
nsExternalResourceMap::ExternalResource* resource = iter.UserData();
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*aCallback,
"mExternalResourceMap.mMap entry"
"->mDocument");
aCallback->NoteXPCOMChild(resource->mDocument);
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*aCallback,
"mExternalResourceMap.mMap entry"
"->mViewer");
aCallback->NoteXPCOMChild(resource->mViewer);
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*aCallback,
"mExternalResourceMap.mMap entry"
"->mLoadGroup");
aCallback->NoteXPCOMChild(resource->mLoadGroup);
}
}
static PLDHashOperator

View File

@ -121,26 +121,17 @@ nsFrameMessageManager::~nsFrameMessageManager()
}
}
static PLDHashOperator
CycleCollectorTraverseListeners(const nsAString& aKey,
nsAutoTObserverArray<nsMessageListenerInfo, 1>* aListeners,
void* aCb)
{
nsCycleCollectionTraversalCallback* cb =
static_cast<nsCycleCollectionTraversalCallback*> (aCb);
uint32_t count = aListeners->Length();
for (uint32_t i = 0; i < count; ++i) {
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(*cb, "listeners[i] mStrongListener");
cb->NoteXPCOMChild(aListeners->ElementAt(i).mStrongListener.get());
}
return PL_DHASH_NEXT;
}
NS_IMPL_CYCLE_COLLECTION_CLASS(nsFrameMessageManager)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(nsFrameMessageManager)
tmp->mListeners.EnumerateRead(CycleCollectorTraverseListeners,
static_cast<void*>(&cb));
for (auto iter = tmp->mListeners.Iter(); !iter.Done(); iter.Next()) {
nsAutoTObserverArray<nsMessageListenerInfo, 1>* listeners = iter.UserData();
uint32_t count = listeners->Length();
for (uint32_t i = 0; i < count; ++i) {
NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(cb, "listeners[i] mStrongListener");
cb.NoteXPCOMChild(listeners->ElementAt(i).mStrongListener.get());
}
}
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mChildManagers)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParentManager)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
@ -419,35 +410,6 @@ nsFrameMessageManager::RemoveMessageListener(const nsAString& aMessage,
return NS_OK;
}
#ifdef DEBUG
typedef struct
{
nsCOMPtr<nsISupports> mCanonical;
nsWeakPtr mWeak;
} CanonicalCheckerParams;
static PLDHashOperator
CanonicalChecker(const nsAString& aKey,
nsAutoTObserverArray<nsMessageListenerInfo, 1>* aListeners,
void* aParams)
{
CanonicalCheckerParams* params =
static_cast<CanonicalCheckerParams*> (aParams);
uint32_t count = aListeners->Length();
for (uint32_t i = 0; i < count; i++) {
if (!aListeners->ElementAt(i).mWeakListener) {
continue;
}
nsCOMPtr<nsISupports> otherCanonical =
do_QueryReferent(aListeners->ElementAt(i).mWeakListener);
MOZ_ASSERT((params->mCanonical == otherCanonical) ==
(params->mWeak == aListeners->ElementAt(i).mWeakListener));
}
return PL_DHASH_NEXT;
}
#endif
NS_IMETHODIMP
nsFrameMessageManager::AddWeakMessageListener(const nsAString& aMessage,
nsIMessageListener* aListener)
@ -461,10 +423,17 @@ nsFrameMessageManager::AddWeakMessageListener(const nsAString& aMessage,
// this to happen; it will break e.g. RemoveWeakMessageListener. So let's
// check that we're not getting ourselves into that situation.
nsCOMPtr<nsISupports> canonical = do_QueryInterface(aListener);
CanonicalCheckerParams params;
params.mCanonical = canonical;
params.mWeak = weak;
mListeners.EnumerateRead(CanonicalChecker, (void*)&params);
for (auto iter = mListeners.Iter(); !iter.Done(); iter.Next()) {
nsAutoTObserverArray<nsMessageListenerInfo, 1>* listeners = iter.UserData();
uint32_t count = listeners->Length();
for (uint32_t i = 0; i < count; i++) {
nsWeakPtr weakListener = listeners->ElementAt(i).mWeakListener;
if (weakListener) {
nsCOMPtr<nsISupports> otherCanonical = do_QueryReferent(weakListener);
MOZ_ASSERT((canonical == otherCanonical) == (weak == weakListener));
}
}
}
#endif
nsAutoTObserverArray<nsMessageListenerInfo, 1>* listeners =
@ -1488,55 +1457,45 @@ protected:
NS_IMPL_ISUPPORTS(MessageManagerReporter, nsIMemoryReporter)
static PLDHashOperator
CollectMessageListenerData(const nsAString& aKey,
nsAutoTObserverArray<nsMessageListenerInfo, 1>* aListeners,
void* aData)
{
MessageManagerReferentCount* referentCount =
static_cast<MessageManagerReferentCount*>(aData);
uint32_t listenerCount = aListeners->Length();
if (!listenerCount) {
return PL_DHASH_NEXT;
}
nsString key(aKey);
uint32_t oldCount = 0;
referentCount->mMessageCounter.Get(key, &oldCount);
uint32_t currentCount = oldCount + listenerCount;
referentCount->mMessageCounter.Put(key, currentCount);
// Keep track of messages that have a suspiciously large
// number of referents (symptom of leak).
if (currentCount == MessageManagerReporter::kSuspectReferentCount) {
referentCount->mSuspectMessages.AppendElement(key);
}
for (uint32_t i = 0; i < listenerCount; ++i) {
const nsMessageListenerInfo& listenerInfo =
aListeners->ElementAt(i);
if (listenerInfo.mWeakListener) {
nsCOMPtr<nsISupports> referent =
do_QueryReferent(listenerInfo.mWeakListener);
if (referent) {
referentCount->mWeakAlive++;
} else {
referentCount->mWeakDead++;
}
} else {
referentCount->mStrong++;
}
}
return PL_DHASH_NEXT;
}
void
MessageManagerReporter::CountReferents(nsFrameMessageManager* aMessageManager,
MessageManagerReferentCount* aReferentCount)
{
aMessageManager->mListeners.EnumerateRead(CollectMessageListenerData,
aReferentCount);
for (auto it = aMessageManager->mListeners.Iter(); !it.Done(); it.Next()) {
nsAutoTObserverArray<nsMessageListenerInfo, 1>* listeners =
it.UserData();
uint32_t listenerCount = listeners->Length();
if (listenerCount == 0) {
continue;
}
nsString key(it.Key());
uint32_t oldCount = 0;
aReferentCount->mMessageCounter.Get(key, &oldCount);
uint32_t currentCount = oldCount + listenerCount;
aReferentCount->mMessageCounter.Put(key, currentCount);
// Keep track of messages that have a suspiciously large
// number of referents (symptom of leak).
if (currentCount == MessageManagerReporter::kSuspectReferentCount) {
aReferentCount->mSuspectMessages.AppendElement(key);
}
for (uint32_t i = 0; i < listenerCount; ++i) {
const nsMessageListenerInfo& listenerInfo = listeners->ElementAt(i);
if (listenerInfo.mWeakListener) {
nsCOMPtr<nsISupports> referent =
do_QueryReferent(listenerInfo.mWeakListener);
if (referent) {
aReferentCount->mWeakAlive++;
} else {
aReferentCount->mWeakDead++;
}
} else {
aReferentCount->mStrong++;
}
}
}
// Add referent count in child managers because the listeners
// participate in messages dispatched from parent message manager.
@ -2182,24 +2141,20 @@ NS_NewChildProcessMessageManager(nsISyncMessageSender** aResult)
return NS_OK;
}
static PLDHashOperator
CycleCollectorMarkListeners(const nsAString& aKey,
nsAutoTObserverArray<nsMessageListenerInfo, 1>* aListeners,
void* aData)
{
uint32_t count = aListeners->Length();
for (uint32_t i = 0; i < count; i++) {
if (aListeners->ElementAt(i).mStrongListener) {
xpc_TryUnmarkWrappedGrayObject(aListeners->ElementAt(i).mStrongListener);
}
}
return PL_DHASH_NEXT;
}
bool
nsFrameMessageManager::MarkForCC()
{
mListeners.EnumerateRead(CycleCollectorMarkListeners, nullptr);
for (auto iter = mListeners.Iter(); !iter.Done(); iter.Next()) {
nsAutoTObserverArray<nsMessageListenerInfo, 1>* listeners = iter.UserData();
uint32_t count = listeners->Length();
for (uint32_t i = 0; i < count; i++) {
nsCOMPtr<nsIMessageListener> strongListener =
listeners->ElementAt(i).mStrongListener;
if (strongListener) {
xpc_TryUnmarkWrappedGrayObject(strongListener);
}
}
}
if (mRefCnt.IsPurple()) {
mRefCnt.RemovePurple();

View File

@ -20,6 +20,7 @@ var Cu = Components.utils;
lifetime_test,
cancel_test,
cancel_test2,
unsafe_test,
];
function go() {
@ -337,3 +338,20 @@ function cancel_test2(finish)
if (fin1 && fin2) finish();
});
}
function unsafe_test(finish)
{
if (!is_remote) {
// Only run this test when running out-of-process.
finish();
return;
}
function f() {}
sendAsyncMessage("cpows:unsafe", null, {f});
addMessageListener("cpows:unsafe_done", msg => {
sendRpcMessage("cpows:safe", null, {f});
addMessageListener("cpows:safe_done", finish);
});
}

View File

@ -390,6 +390,33 @@
msg.target.messageManager.sendAsyncMessage("cpows:cancel_test2_done");
}
function recvUnsafe(msg) {
let failed = false;
const PREF_UNSAFE_FORBIDDEN = "dom.ipc.cpows.forbid-unsafe-from-browser";
opener.wrappedJSObject.SpecialPowers.setBoolPref(PREF_UNSAFE_FORBIDDEN, true);
try {
msg.objects.f();
} catch (e if /unsafe CPOW usage forbidden/.test(String(e))) {
failed = true;
}
opener.wrappedJSObject.SpecialPowers.clearUserPref(PREF_UNSAFE_FORBIDDEN);
ok(failed, "CPOW should fail when unsafe");
msg.target.messageManager.sendAsyncMessage("cpows:unsafe_done");
}
function recvSafe(msg) {
const PREF_UNSAFE_FORBIDDEN = "dom.ipc.cpows.forbid-unsafe-from-browser";
opener.wrappedJSObject.SpecialPowers.setBoolPref(PREF_UNSAFE_FORBIDDEN, true);
try {
msg.objects.f();
} catch (e if /unsafe CPOW usage forbidden/.test(String(e))) {
ok(false, "cpow failed");
}
opener.wrappedJSObject.SpecialPowers.clearUserPref(PREF_UNSAFE_FORBIDDEN);
msg.target.messageManager.sendAsyncMessage("cpows:safe_done");
}
function run_tests(type) {
info("Running tests: " + type);
var node = document.getElementById('cpowbrowser_' + type);
@ -429,6 +456,8 @@
mm.addMessageListener("cpows:cancel_test", recvCancelTest);
mm.addMessageListener("cpows:cancel_sync_message", recvCancelSyncMessage);
mm.addMessageListener("cpows:cancel_test2", recvCancelTest2);
mm.addMessageListener("cpows:unsafe", recvUnsafe);
mm.addMessageListener("cpows:safe", recvSafe);
mm.loadFrameScript("chrome://mochitests/content/chrome/dom/base/test/chrome/cpows_child.js", true);
}

View File

@ -41,8 +41,8 @@ WEBIDL_EXAMPLE_INTERFACES += [
'TestExampleProxyInterface',
]
GENERATED_INCLUDES += ['..']
LOCAL_INCLUDES += [
'!..',
'/dom/bindings',
'/js/xpconnect/src',
'/js/xpconnect/wrappers',

View File

@ -7,6 +7,7 @@
#include "GLContext.h"
#include "mozilla/CheckedInt.h"
#include "mozilla/UniquePtrExtensions.h"
#include "WebGLBuffer.h"
#include "WebGLContextUtils.h"
#include "WebGLFramebuffer.h"
@ -580,7 +581,7 @@ WebGLContext::DoFakeVertexAttrib0(GLuint vertexCount)
GetAndFlushUnderlyingGLErrors();
if (mFakeVertexAttrib0BufferStatus == WebGLVertexAttrib0Status::EmulatedInitializedArray) {
UniquePtr<GLfloat[]> array(new (fallible) GLfloat[4 * vertexCount]);
auto array = MakeUniqueFallible<GLfloat[]>(4 * vertexCount);
if (!array) {
ErrorOutOfMemory("Fake attrib0 array.");
return false;

View File

@ -52,6 +52,7 @@
#include "mozilla/dom/ToJSValue.h"
#include "mozilla/Endian.h"
#include "mozilla/RefPtr.h"
#include "mozilla/UniquePtrExtensions.h"
namespace mozilla {
@ -1622,7 +1623,7 @@ WebGLContext::ReadPixels(GLint x, GLint y, GLsizei width,
uint32_t subrect_byteLength = (subrect_height-1)*subrect_alignedRowSize + subrect_plainRowSize;
// create subrect buffer, call glReadPixels, copy pixels into destination buffer, delete subrect buffer
UniquePtr<GLubyte> subrect_data(new (fallible) GLubyte[subrect_byteLength]);
auto subrect_data = MakeUniqueFallible<GLubyte[]>(subrect_byteLength);
if (!subrect_data)
return ErrorOutOfMemory("readPixels: subrect_data");

View File

@ -25,6 +25,7 @@ function CaptureStreamTestHelper(width, height) {
CaptureStreamTestHelper.prototype = {
/* Predefined colors for use in the methods below. */
black: { data: [0, 0, 0, 255], name: "black" },
blackTransparent: { data: [0, 0, 0, 0], name: "blackTransparent" },
green: { data: [0, 255, 0, 255], name: "green" },
red: { data: [255, 0, 0, 255], name: "red" },
@ -52,55 +53,90 @@ CaptureStreamTestHelper.prototype = {
video.srcObject.requestFrame();
},
/* Tests the top left pixel of |video| against |refData|. Format [R,G,B,A]. */
testPixel: function (video, refData, threshold) {
/*
* Returns the pixel at (|offsetX|, |offsetY|) (from top left corner) of
* |video| as an array of the pixel's color channels: [R,G,B,A].
*/
getPixel: function (video, offsetX, offsetY) {
offsetX = offsetX || 0; // Set to 0 if not passed in.
offsetY = offsetY || 0; // Set to 0 if not passed in.
var ctxout = this.cout.getContext('2d');
ctxout.drawImage(video, 0, 0);
var pixel = ctxout.getImageData(0, 0, 1, 1).data;
return pixel.every((val, i) => Math.abs(val - refData[i]) <= threshold);
return ctxout.getImageData(offsetX, offsetY, 1, 1).data;
},
/*
* Returns a promise that resolves when the pixel matches. Use |threshold|
* for fuzzy matching the color on each channel, in the range [0,255].
* Returns true if px lies within the per-channel |threshold| of the
* referenced color for all channels. px is on the form of an array of color
* channels, [R,G,B,A]. Each channel is in the range [0, 255].
*/
waitForPixel: function (video, refColor, threshold, infoString) {
isPixel: function (px, refColor, threshold) {
threshold = threshold || 0; // Default to 0 (exact match) if not passed in.
return px.every((ch, i) => Math.abs(ch - refColor.data[i]) <= threshold);
},
/*
* Returns true if px lies further away than |threshold| of the
* referenced color for any channel. px is on the form of an array of color
* channels, [R,G,B,A]. Each channel is in the range [0, 255].
*/
isPixelNot: function (px, refColor, threshold) {
if (threshold === undefined) {
// Default to 127 (should be sufficiently far away) if not passed in.
threshold = 127;
}
return px.some((ch, i) => Math.abs(ch - refColor.data[i]) > threshold);
},
/*
* Returns a promise that resolves when the provided function |test|
* returns true.
*/
waitForPixel: function (video, offsetX, offsetY, test, timeout) {
return new Promise(resolve => {
info("Testing " + video.id + " against [" + refColor.data.join(',') + "]");
const startTime = video.currentTime;
CaptureStreamTestHelper2D.prototype.clear.call(this, this.cout);
video.ontimeupdate = () => {
if (this.testPixel(video, refColor.data, threshold)) {
ok(true, video.id + " " + infoString);
video.ontimeupdate = null;
resolve();
var ontimeupdate = () => {
const pixelMatch = test(this.getPixel(video, offsetX, offsetY));
if (!pixelMatch &&
(!timeout || video.currentTime < startTime + (timeout / 1000.0))) {
// No match yet and,
// No timeout (waiting indefinitely) or |timeout| has not passed yet.
return;
}
video.removeEventListener("timeupdate", ontimeupdate);
resolve(pixelMatch);
};
video.addEventListener("timeupdate", ontimeupdate);
});
},
/*
* Returns a promise that resolves after |timeout| ms of playback or when a
* pixel of |video| becomes the color |refData|. The test is failed if the
* Returns a promise that resolves when the top left pixel of |video| matches
* on all channels. Use |threshold| for fuzzy matching the color on each
* channel, in the range [0,255].
*/
waitForPixelColor: function (video, refColor, threshold, infoString) {
info("Waiting for video " + video.id + " to match [" +
refColor.data.join(',') + "] - " + refColor.name +
" (" + infoString + ")");
return this.waitForPixel(video, 0, 0,
px => this.isPixel(px, refColor, threshold))
.then(() => ok(true, video.id + " " + infoString));
},
/*
* Returns a promise that resolves after |timeout| ms of playback or when the
* top left pixel of |video| becomes |refColor|. The test is failed if the
* timeout is not reached.
*/
waitForPixelToTimeout: function (video, refColor, threshold, timeout, infoString) {
return new Promise(resolve => {
info("Waiting for " + video.id + " to time out after " + timeout +
"ms against [" + refColor.data.join(',') + "] - " + refColor.name);
CaptureStreamTestHelper2D.prototype.clear.call(this, this.cout);
var startTime = video.currentTime;
video.ontimeupdate = () => {
if (this.testPixel(video, refColor.data, threshold)) {
ok(false, video.id + " " + infoString);
video.ontimeupdate = null;
resolve();
} else if (video.currentTime > startTime + (timeout / 1000.0)) {
ok(true, video.id + " " + infoString);
video.ontimeupdate = null;
resolve();
}
};
});
waitForPixelColorTimeout: function (video, refColor, threshold, timeout, infoString) {
info("Waiting for " + video.id + " to time out after " + timeout +
"ms against [" + refColor.data.join(',') + "] - " + refColor.name);
return this.waitForPixel(video, 0, 0,
px => this.isPixel(px, refColor, threshold),
timeout)
.then(result => ok(!result, video.id + " " + infoString));
},
/* Create an element of type |type| with id |id| and append it to the body. */

View File

@ -23,15 +23,21 @@ function checkDrawColorInitialRed() {
vmanual.srcObject = c.captureStream(0);
vrate.srcObject = c.captureStream(10);
ok(h.testPixel(vauto, [0, 0, 0, 0], 0), "vauto hould not be drawn to before stable state");
ok(h.testPixel(vrate, [0, 0, 0, 0], 0), "vrate Should not be drawn to before stable state");
ok(h.testPixel(vmanual, [0, 0, 0, 0], 0), "vmanual Should not be drawn to before stable state");
ok(h.isPixel(h.getPixel(vauto), h.blackTransparent, 0),
"vauto should not be drawn to before stable state");
ok(h.isPixel(h.getPixel(vrate), h.blackTransparent, 0),
"vrate should not be drawn to before stable state");
ok(h.isPixel(h.getPixel(vmanual), h.blackTransparent, 0),
"vmanual should not be drawn to before stable state");
return Promise.resolve()
.then(() => h.waitForPixel(vauto, h.red, 0, "should become red automatically"))
.then(() => h.waitForPixel(vrate, h.red, 0, "should become red automatically"))
.then(() => h.waitForPixel(vmanual, h.red, 0, "should become red when we get" +
" to stable state (first frame)"));
.then(() => h.waitForPixelColor(vauto, h.red, 0,
"should become red automatically"))
.then(() => h.waitForPixelColor(vrate, h.red, 0,
"should become red automatically"))
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
"should become red when we get" +
" to stable state (first frame)"));
}
function checkDrawColorGreen() {
@ -40,11 +46,15 @@ function checkDrawColorGreen() {
var drawing = h.startDrawing(() => h.drawColor(c, h.green));
return Promise.resolve()
.then(() => h.waitForPixel(vauto, h.green, 0, "should become green automatically"))
.then(() => h.waitForPixel(vrate, h.green, 0, "should become green automatically"))
.then(() => h.waitForPixel(vmanual, h.red, 0, "should still be red"))
.then(() => h.waitForPixelColor(vauto, h.green, 0,
"should become green automatically"))
.then(() => h.waitForPixelColor(vrate, h.green, 0,
"should become green automatically"))
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
"should still be red"))
.then(() => h.requestFrame(vmanual))
.then(() => h.waitForPixel(vmanual, h.green, 0, "should become green after requstFrame()"))
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
"should become green after requstFrame()"))
.catch(err => ok(false, "checkDrawColorGreen failed: ", err))
.then(() => drawing.stop());
}
@ -54,10 +64,12 @@ function checkRequestFrameOrderGuarantee() {
"call results in the expected frame seen in the stream.");
return Promise.resolve()
.then(() => h.waitForPixel(vmanual, h.green, 0, "should still be green"))
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
"should still be green"))
.then(() => h.drawColor(c, h.red)) // 1. Draw canvas red
.then(() => h.requestFrame(vmanual)) // 2. Immediately request a frame
.then(() => h.waitForPixel(vmanual, h.red, 0, "should become red after call order test"))
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
"should become red after call order test"))
}
function checkDrawImageNotCleanRed() {
@ -74,11 +86,14 @@ function checkDrawImageNotCleanRed() {
})
.then(() => drawing = h.startDrawing(() => ctx.drawImage(notCleanRed, 0, 0, c.width, c.height)))
.then(() => h.testNotClean(c))
.then(() => h.waitForPixelToTimeout(vauto, h.red, 0, 1000, "should not become red"))
.then(() => h.waitForPixelToTimeout(vrate, h.red, 0, 0, "should not become red"))
.then(() => h.waitForPixel(vmanual, h.green, 0, "should still be green"))
.then(() => h.waitForPixelColorTimeout(vauto, h.red, 0, 1000,
"should not become red"))
.then(() => h.isPixelNot(h.getPixel(vrate), h.red, 250,
"should not have become red"))
.then(() => h.waitForPixelColor(vmanual, h.green, 0, "should still be green"))
.then(() => h.requestFrame(vmanual))
.then(() => h.waitForPixelToTimeout(vmanual, h.red, 0, 1000, "should not become red"))
.then(() => h.waitForPixelColorTimeout(vmanual, h.red, 0, 1000,
"should not become red"))
.catch(err => ok(false, "checkDrawImageNotCleanRed failed: ", err))
.then(() => drawing.stop());
}

View File

@ -54,14 +54,21 @@ function checkClearColorInitialRed() {
vmanual.srcObject = c.captureStream(0);
vrate.srcObject = c.captureStream(10);
ok(h.testPixel(vauto, [0, 0, 0, 0], 0), "Should not be drawn to before stable state");
ok(h.testPixel(vrate, [0, 0, 0, 0], 0), "Should not be drawn to before stable state");
ok(h.testPixel(vmanual, [0, 0, 0, 0], 0), "Should not be drawn to before stable state");
ok(h.isPixel(h.getPixel(vauto), h.blackTransparent, 0,
"vauto should not be drawn to before stable state"));
ok(h.isPixel(h.getPixel(vrate), h.blackTransparent, 0,
"vrate should not be drawn to before stable state"));
ok(h.isPixel(h.getPixel(vmanual), h.blackTransparent, 0,
"vmanual should not be drawn to before stable state"));
return Promise.resolve()
.then(() => h.waitForPixel(vauto, h.red, 0, "should become red automatically"))
.then(() => h.waitForPixel(vrate, h.red, 0, "should become red automatically"))
.then(() => h.waitForPixel(vmanual, h.red, 0, "should become red when we get to stable state (first frame)"))
.then(() => h.waitForPixelColor(vauto, h.red, 0,
"should become red automatically"))
.then(() => h.waitForPixelColor(vrate, h.red, 0,
"should become red automatically"))
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
"should become red when we get to stable " +
"state (first frame)"))
}
function checkDrawColorGreen() {
@ -69,11 +76,15 @@ function checkDrawColorGreen() {
var drawing = h.startDrawing(h.drawColor.bind(h, c, h.green));
checkGLError('after DrawColor');
return Promise.resolve()
.then(() => h.waitForPixel(vauto, h.green, 0, "should become green automatically"))
.then(() => h.waitForPixel(vrate, h.green, 0, "should become green automatically"))
.then(() => h.waitForPixel(vmanual, h.red, 0, "should still be red"))
.then(() => h.waitForPixelColor(vauto, h.green, 0,
"should become green automatically"))
.then(() => h.waitForPixelColor(vrate, h.green, 0,
"should become green automatically"))
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
"should still be red"))
.then(() => h.requestFrame(vmanual))
.then(() => h.waitForPixel(vmanual, h.green, 0, "should become green after requstFrame()"))
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
"should become green after requstFrame()"))
.then(() => drawing.stop());
}
@ -81,11 +92,15 @@ function checkClearColorRed() {
info("Checking that clearing to red works.");
var drawing = h.startDrawing(h.clearColor.bind(h, c, h.red));
return Promise.resolve()
.then(() => h.waitForPixel(vauto, h.red, 0, "should become red automatically"))
.then(() => h.waitForPixel(vrate, h.red, 0, "should become red automatically"))
.then(() => h.waitForPixel(vmanual, h.green, 0, "should still be green"))
.then(() => h.waitForPixelColor(vauto, h.red, 0,
"should become red automatically"))
.then(() => h.waitForPixelColor(vrate, h.red, 0,
"should become red automatically"))
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
"should still be green"))
.then(() => h.requestFrame(vmanual))
.then(() => h.waitForPixel(vmanual, h.red, 0, "should become red after requestFrame()"))
.then(() => h.waitForPixelColor(vmanual, h.red, 0,
"should become red after requestFrame()"))
.then(() => drawing.stop());
}
@ -93,10 +108,11 @@ function checkRequestFrameOrderGuarantee() {
info("Checking that requestFrame() immediately after a draw " +
"call results in the expected frame seen in the stream.");
return Promise.resolve()
.then(() => h.waitForPixel(vmanual, h.red, 0, "should still be red"))
.then(() => h.waitForPixelColor(vmanual, h.red, 0, "should still be red"))
.then(() => h.drawColor(c, h.green)) // 1. Draw canvas green
.then(() => h.requestFrame(vmanual)) // 2. Immediately request a frame
.then(() => h.waitForPixel(vmanual, h.green, 0, "should become green after call order test"))
.then(() => h.waitForPixelColor(vmanual, h.green, 0,
"should become green after call order test"))
}
function finish() {

View File

@ -1910,28 +1910,6 @@ ContentParent::OnChannelError()
PContentParent::OnChannelError();
}
void
ContentParent::OnBeginSyncTransaction() {
if (XRE_IsParentProcess()) {
nsCOMPtr<nsIConsoleService> console(do_GetService(NS_CONSOLESERVICE_CONTRACTID));
JSContext *cx = nsContentUtils::GetCurrentJSContext();
if (!sDisableUnsafeCPOWWarnings) {
if (console && cx) {
nsAutoString filename;
uint32_t lineno = 0, column = 0;
nsJSUtils::GetCallingLocation(cx, filename, &lineno, &column);
nsCOMPtr<nsIScriptError> error(do_CreateInstance(NS_SCRIPTERROR_CONTRACTID));
error->Init(NS_LITERAL_STRING("unsafe CPOW usage"), filename,
EmptyString(), lineno, column,
nsIScriptError::warningFlag, "chrome javascript");
console->LogMessage(error);
} else {
NS_WARNING("Unsafe synchronous IPC message");
}
}
}
}
void
ContentParent::OnChannelConnected(int32_t pid)
{

View File

@ -327,8 +327,6 @@ public:
virtual void OnChannelError() override;
virtual void OnBeginSyncTransaction() override;
virtual PCrashReporterParent*
AllocPCrashReporterParent(const NativeThreadId& tid,
const uint32_t& processType) override;

View File

@ -1415,25 +1415,22 @@ MediaManager::EnumerateRawDevices(uint64_t aWindowId,
RefPtr<PledgeSourceSet> p = new PledgeSourceSet();
uint32_t id = mOutstandingPledges.Append(*p);
// Check if the preference for using audio/video loopback devices is
// enabled. This is currently used for automated media tests only.
//
// If present (and we're doing non-exotic cameras and microphones) use them
// instead of our built-in fake devices, except if fake tracks are requested
// (a feature of the built-in ones only).
nsAdoptingCString audioLoopDev, videoLoopDev;
if (!aFakeTracks) {
if (aVideoType == dom::MediaSourceEnum::Camera) {
audioLoopDev = Preferences::GetCString("media.audio_loopback_dev");
if (!aFake) {
// Fake stream not requested. The entire device stack is available.
// Loop in loopback devices if they are set, and their respective type is
// requested. This is currently used for automated media tests only.
if (aVideoType == MediaSourceEnum::Camera) {
videoLoopDev = Preferences::GetCString("media.video_loopback_dev");
if (aFake && !audioLoopDev.IsEmpty() && !videoLoopDev.IsEmpty()) {
aFake = false;
}
} else {
aFake = false;
}
if (aAudioType == MediaSourceEnum::Microphone) {
audioLoopDev = Preferences::GetCString("media.audio_loopback_dev");
}
}
if (!aFake) {
// Fake tracks only make sense when we have a fake stream.
aFakeTracks = false;
}
MediaManager::PostTask(FROM_HERE, NewTaskFrom([id, aWindowId, audioLoopDev,

View File

@ -42,13 +42,16 @@ FINAL_LIBRARY = 'xul'
# These includes are from Android JB, for use of MediaCodec.
LOCAL_INCLUDES += ['/ipc/chromium/src']
CXXFLAGS += [
'-I%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
'frameworks/native/opengl/include',
'frameworks/native/include',
'frameworks/av/include/media',
if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gonk' and CONFIG['ANDROID_VERSION'] > '15':
LOCAL_INCLUDES += [
'%' + '%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
'frameworks/av/include/media',
'frameworks/native/include',
'frameworks/native/opengl/include',
]
]
]
include('/ipc/chromium/chromium-config.mozbuild')

View File

@ -97,20 +97,26 @@ LOCAL_INCLUDES += [
'/ipc/chromium/src',
]
CXXFLAGS += [
'-I%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
'dalvik/libnativehelper/include/nativehelper',
'frameworks/av/include/media',
'frameworks/base/include',
'frameworks/base/include/binder',
'frameworks/base/include/utils',
'frameworks/base/include/media',
'frameworks/base/include/media/stagefright/openmax',
'frameworks/base/media/libstagefright/include',
'frameworks/native/opengl/include',
'frameworks/native/include',
'hardware/libhardware/include/',
if CONFIG['ANDROID_VERSION'] == '15':
LOCAL_INCLUDES += [
'%' + '%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
'dalvik/libnativehelper/include/nativehelper',
'frameworks/base/include',
'frameworks/base/include/binder',
'frameworks/base/include/media',
'frameworks/base/include/media/stagefright/openmax',
'frameworks/base/include/utils',
'frameworks/base/media/libstagefright/include',
'hardware/libhardware/include',
]
]
else:
LOCAL_INCLUDES += [
'%' + '%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
'frameworks/av/include/media',
'frameworks/native/include',
'frameworks/native/opengl/include',
]
]
if CONFIG['ANDROID_VERSION'] > '15':

View File

@ -33,7 +33,7 @@ if CONFIG['GNU_CC'] or CONFIG['CLANG_CL']:
FINAL_LIBRARY = 'xul'
CXXFLAGS += [
'-I%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
LOCAL_INCLUDES += [
'%' + '%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
'frameworks/native/opengl/include',]
]

View File

@ -40,12 +40,14 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa':
EXPORTS += ['OSXRunLoopSingleton.h']
if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gonk':
CXXFLAGS += [
'-I%s/%s' % (CONFIG['ANDROID_SOURCE'], d) for d in [
'frameworks/wilhelm/include',
'system/media/wilhelm/include',
if CONFIG['ANDROID_VERSION'] >= '17':
LOCAL_INCLUDES += [
'%' + '%s/frameworks/wilhelm/include' % CONFIG['ANDROID_SOURCE'],
]
else:
LOCAL_INCLUDES += [
'%' + '%s/system/media/wilhelm/include' % CONFIG['ANDROID_SOURCE'],
]
]
if CONFIG['_MSC_VER']:
DEFINES['__PRETTY_FUNCTION__'] = '__FUNCSIG__'

View File

@ -56,7 +56,7 @@ function startTest() {
SimpleTest.finish();
};
document.getElementById("content").appendChild(video);
helper.waitForPixel(video, helper.red, 128, "Should become red")
helper.waitForPixelColor(video, helper.red, 128, "Should become red")
.then(SimpleTest.finish);
};

View File

@ -20,7 +20,6 @@ function identityPcTest(remoteOptions) {
}], [remoteOptions || {
audio: true,
video: true,
fake: true,
peerIdentity: id1
}]);
test.pcLocal.setIdentityProvider('test1.example.com', 'idp.js');

View File

@ -20,8 +20,7 @@ function theTest() {
// side is isolated anyway.
identityPcTest({
audio: true,
video: true,
fake: true
video: true
});
}
runNetworkTest(theTest);

View File

@ -150,6 +150,7 @@ skip-if = toolkit == 'gonk' || buildapp == 'mulet' || (android_version == '18' &
[test_peerConnection_throwInCallbacks.html]
skip-if = toolkit == 'gonk' || buildapp == 'mulet' # b2g(Bug 960442, video support for WebRTC is disabled on b2g)
[test_peerConnection_toJSON.html]
[test_peerConnection_trackDisabling.html]
[test_peerConnection_twoAudioStreams.html]
skip-if = toolkit == 'gonk' || buildapp == 'mulet' || (android_version == '18' && debug) # b2g (Bug 1059867), android(Bug 1189784, timeouts on 4.3 emulator)

View File

@ -46,27 +46,24 @@ runTest(() =>
.then(() => mustSucceed("unknown plain deviceId on video",
() => navigator.mediaDevices.getUserMedia({
video: { deviceId: "unknown9qHr8B0JIbcHlbl9xR+jMbZZ8WyoPfpCXPfc=" },
fake: true,
})))
.then(() => mustSucceed("unknown plain deviceId on audio",
() => navigator.mediaDevices.getUserMedia({
audio: { deviceId: "unknown9qHr8B0JIbcHlbl9xR+jMbZZ8WyoPfpCXPfc=" },
fake: true,
})))
.then(() => mustFailWith("unknown exact deviceId on video",
"OverconstrainedError", "deviceId",
() => navigator.mediaDevices.getUserMedia({
video: { deviceId: { exact: "unknown9qHr8B0JIbcHlbl9xR+jMbZZ8WyoPfpCXPfc=" } },
fake: true,
})))
.then(() => mustFailWith("unknown exact deviceId on audio",
"OverconstrainedError", "deviceId",
() => navigator.mediaDevices.getUserMedia({
audio: { deviceId: { exact: "unknown9qHr8B0JIbcHlbl9xR+jMbZZ8WyoPfpCXPfc=" } },
fake: true,
})))
// Check the special case of no devices found (these prefs override fake).
.then(() => pushPrefs(["media.audio_loopback_dev", "none"],
// Check the special case of no devices found.
.then(() => pushPrefs(["media.navigator.streams.fake", false],
["media.audio_loopback_dev", "none"],
["media.video_loopback_dev", "none"]))
.then(() => navigator.mediaDevices.enumerateDevices())
.then(devices => ok(devices.length === 0, "No devices found")));

View File

@ -25,8 +25,11 @@
var testVideo = createMediaElement('video', 'testVideo');
return Promise.resolve()
.then(() => getUserMedia({ video: { mediaSource: "browser",
scrollWithPage: true } }))
.then(() => getUserMedia({
video: { mediaSource: "browser",
scrollWithPage: true },
fake: false
}))
.then(stream => {
var playback = new LocalMediaStreamPlayback(testVideo, stream);
return playback.playMediaWithDeprecatedStreamStop(false);
@ -38,7 +41,8 @@
viewportOffsetY: 0,
viewportWidth: 100,
viewportHeight: 100
}
},
fake: false
}))
.then(stream => {
var playback = new LocalMediaStreamPlayback(testVideo, stream);

View File

@ -16,16 +16,13 @@ createHTML({ title: "Test getUserMedia constraints", bug: "882145" });
var tests = [
// Each test here tests a different constraint or codepath.
{ message: "unknown required constraint on video ignored",
constraints: { video: { somethingUnknown: { exact: 0 } },
fake: true },
constraints: { video: { somethingUnknown: { exact: 0 } } },
error: null },
{ message: "unknown required constraint on audio ignored",
constraints: { audio: { somethingUnknown: { exact: 0 } },
fake: true },
constraints: { audio: { somethingUnknown: { exact: 0 } } },
error: null },
{ message: "audio overconstrained by facingMode ignored",
constraints: { audio: { facingMode: { exact: 'left' } },
fake: true },
constraints: { audio: { facingMode: { exact: 'left' } } },
error: null },
{ message: "full screensharing requires permission",
constraints: { video: { mediaSource: 'screen' } },
@ -51,8 +48,7 @@ var tests = [
constraints: { },
error: "NotSupportedError" },
{ message: "Success-path: optional video facingMode + audio ignoring facingMode",
constraints: { fake: true,
audio: { mediaSource: 'microphone',
constraints: { audio: { mediaSource: 'microphone',
facingMode: 'left',
foo: 0,
advanced: [{ facingMode: 'environment' },
@ -66,7 +62,7 @@ var tests = [
{ bar: 0 }] } },
error: null },
{ message: "legacy facingMode ignored",
constraints: { video: { mandatory: { facingMode: 'left' } }, fake: true },
constraints: { video: { mandatory: { facingMode: 'left' } } },
error: null },
];

View File

@ -13,7 +13,7 @@ createHTML({
});
function theTest() {
function testPeerIdentityConstraint(withConstraint) {
var config = { audio: true, video: true, fake: true };
var config = { audio: true, video: true };
if (withConstraint) {
config.peerIdentity = 'user@example.com';
}

View File

@ -65,7 +65,7 @@ runNetworkTest(function() {
is(v2.currentTime, 0, "v2.currentTime is zero at outset");
// not testing legacy gUM here
navigator.mediaDevices.getUserMedia({ fake: true, video: true, audio: true })
navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(stream => pc1.addStream(v1.mozSrcObject = stream))
.then(() => pcall(pc1, pc1.createOffer))
.then(offer => pcall(pc1, pc1.setLocalDescription, offer))

View File

@ -39,7 +39,8 @@ runNetworkTest(() => {
ok(!!vremote, "Should have remote video element for pcRemote");
},
function WAIT_FOR_REMOTE_GREEN() {
return h.waitForPixel(vremote, h.green, 128, "pcRemote's remote should become green");
return h.waitForPixelColor(vremote, h.green, 128,
"pcRemote's remote should become green");
},
function DRAW_LOCAL_RED() {
// After requesting a frame it will be captured at the time of next render.
@ -49,7 +50,8 @@ runNetworkTest(() => {
h.drawColor(canvas, h.red);
},
function WAIT_FOR_REMOTE_RED() {
return h.waitForPixel(vremote, h.red, 128, "pcRemote's remote should become red");
return h.waitForPixelColor(vremote, h.red, 128,
"pcRemote's remote should become red");
}
]);
test.run();

View File

@ -89,7 +89,8 @@ runNetworkTest(() => {
ok(!!vremote, "Should have remote video element for pcRemote");
},
function WAIT_FOR_REMOTE_GREEN() {
return h.waitForPixel(vremote, h.green, 128, "pcRemote's remote should become green");
return h.waitForPixelColor(vremote, h.green, 128,
"pcRemote's remote should become green");
},
function REQUEST_FRAME(test) {
// After requesting a frame it will be captured at the time of next render.
@ -101,7 +102,8 @@ runNetworkTest(() => {
h.drawColor(canvas, h.red);
},
function WAIT_FOR_REMOTE_RED() {
return h.waitForPixel(vremote, h.red, 128, "pcRemote's remote should become red");
return h.waitForPixelColor(vremote, h.red, 128,
"pcRemote's remote should become red");
}
]);
test.run();

View File

@ -36,7 +36,7 @@
is(v2.currentTime, 0, "v2.currentTime is zero at outset");
navigator.mediaDevices.getUserMedia({ fake: true, video: true, audio: true })
navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(stream => pc1.addStream(v1.srcObject = stream))
.then(() => pc1.createOffer({})) // check that createOffer accepts arg.
.then(offer => pc1.setLocalDescription(offer))

View File

@ -30,7 +30,7 @@
var newTrack;
var audiotrack;
return navigator.mediaDevices.getUserMedia({video:true, audio:true, fake:true})
return navigator.mediaDevices.getUserMedia({video:true, audio:true})
.then(newStream => {
window.grip = newStream;
newTrack = newStream.getVideoTracks()[0];
@ -112,7 +112,8 @@
var sourceNode = test.audioCtx.createOscillator();
sourceNode.type = 'sine';
// We need a frequency not too close to the fake audio track (1kHz).
// We need a frequency not too close to the fake audio track
// (440Hz for loopback devices, 1kHz for fake tracks).
sourceNode.frequency.value = 2000;
sourceNode.start();
@ -157,7 +158,7 @@
is(e.name, "InvalidParameterError",
"addTrack existing track should fail");
}
return navigator.mediaDevices.getUserMedia({video:true, fake: true})
return navigator.mediaDevices.getUserMedia({video:true})
.then(differentStream => {
var track = differentStream.getVideoTracks()[0];
try {

View File

@ -21,7 +21,7 @@
var oldstream = test.pcLocal._pc.getLocalStreams()[0];
var oldtrack = oldstream.getVideoTracks()[0];
var sender = test.pcLocal._pc.getSenders()[0];
return navigator.mediaDevices.getUserMedia({video:true, fake:true})
return navigator.mediaDevices.getUserMedia({video:true})
.then(newstream => {
var newtrack = newstream.getVideoTracks()[0];
return test.pcLocal.senderReplaceTrack(0, newtrack, newstream.id);

View File

@ -48,7 +48,7 @@ runNetworkTest(function () {
pc2 = new RTCPeerConnection();
// Test success callbacks (happy path)
navigator.mozGetUserMedia({video:true, fake: true}, function(video1) {
navigator.mozGetUserMedia({video:true}, function(video1) {
pc1.addStream(video1);
pc1.createOffer(function(offer) {
pc1.setLocalDescription(offer, function() {

View File

@ -0,0 +1,95 @@
<!DOCTYPE HTML>
<html>
<head>
<script type="application/javascript" src="pc.js"></script>
<script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
</head>
<body>
<pre id="test">
<script type="application/javascript;version=1.8">
createHTML({
bug: "1219711",
title: "Disabling locally should be reflected remotely",
visible: true
});
runNetworkTest(() => {
var test = new PeerConnectionTest();
// Always use fake tracks since we depend on video to be somewhat green and
// audio to have a large 1000Hz component.
test.setMediaConstraints([{audio: true, video: true, fake: true}], []);
test.chain.append([
function CHECK_ASSUMPTIONS() {
is(test.pcLocal.mediaElements.length, 1,
"pcLocal should only have one media element");
is(test.pcRemote.mediaElements.length, 1,
"pcRemote should only have one media element");
is(test.pcLocal.streams.length, 1,
"pcLocal should only have one stream (the local one)");
is(test.pcRemote.streams.length, 1,
"pcRemote should only have one stream (the remote one)");
},
function CHECK_VIDEO() {
var h = new CaptureStreamTestHelper2D();
var localVideo = test.pcLocal.mediaElements[0];
var remoteVideo = test.pcRemote.mediaElements[0];
// We check a pixel somewhere away from the top left corner since
// MediaEngineDefault puts semi-transparent time indicators there.
const offsetX = 50;
const offsetY = 50;
const threshold = 128;
return Promise.resolve()
.then(() => info("Checking local video enabled"))
.then(() => h.waitForPixel(localVideo, offsetX, offsetY,
px => h.isPixelNot(px, h.black, 128)))
.then(() => info("Checking remote video enabled"))
.then(() => h.waitForPixel(remoteVideo, offsetX, offsetY,
px => h.isPixelNot(px, h.black, 128)))
.then(() => test.pcLocal.streams[0].getVideoTracks()[0].enabled = false)
.then(() => info("Checking local video disabled"))
.then(() => h.waitForPixel(localVideo, offsetX, offsetY,
px => h.isPixel(px, h.blackTransparent, 128)))
.then(() => info("Checking remote video disabled"))
.then(() => h.waitForPixel(remoteVideo, offsetX, offsetY,
px => h.isPixel(px, h.black, 128)))
},
function CHECK_AUDIO() {
var ac = new AudioContext();
var localAnalyser = new AudioStreamAnalyser(ac, test.pcLocal.streams[0]);
var remoteAnalyser = new AudioStreamAnalyser(ac, test.pcRemote.streams[0]);
var checkAudio = (analyser, fun) => {
analyser.enableDebugCanvas();
return analyser.waitForAnalysisSuccess(fun)
.then(() => analyser.disableDebugCanvas());
};
var freq1k = localAnalyser.binIndexForFrequency(1000);
var checkAudioEnabled = analyser =>
checkAudio(analyser, array => array[freq1k] > 200);
var checkAudioDisabled = analyser =>
checkAudio(analyser, array => array[freq1k] < 50);
return Promise.resolve()
.then(() => info("Checking local audio enabled"))
.then(() => checkAudioEnabled(localAnalyser))
.then(() => info("Checking remote audio enabled"))
.then(() => checkAudioEnabled(remoteAnalyser))
.then(() => test.pcLocal.streams[0].getAudioTracks()[0].enabled = false)
.then(() => info("Checking local audio disabled"))
.then(() => checkAudioDisabled(localAnalyser))
.then(() => info("Checking remote audio disabled"))
.then(() => checkAudioDisabled(remoteAnalyser))
}
]);
test.run();
});
</script>
</pre>
</body>
</html>

View File

@ -96,7 +96,6 @@ public:
}
mReverb = new WebCore::Reverb(mBuffer, mBufferLength,
WEBAUDIO_BLOCK_SIZE,
MaxFFTSize, 2, mUseBackgroundThreads,
mNormalize, mSampleRate);
}
@ -153,7 +152,7 @@ public:
}
aOutput->AllocateChannels(2);
mReverb->process(&input, aOutput, WEBAUDIO_BLOCK_SIZE);
mReverb->process(&input, aOutput);
}
virtual bool IsActive() const override

View File

@ -1,347 +0,0 @@
/*
* Copyright (C) 2012 Intel Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "DirectConvolver.h"
#include "mozilla/PodOperations.h"
using namespace mozilla;
namespace WebCore {
DirectConvolver::DirectConvolver(size_t inputBlockSize)
: m_inputBlockSize(inputBlockSize)
{
m_buffer.SetLength(inputBlockSize * 2);
PodZero(m_buffer.Elements(), inputBlockSize * 2);
}
void DirectConvolver::process(const nsTArray<float>* convolutionKernel, const float* sourceP, float* destP, size_t framesToProcess)
{
MOZ_ASSERT(framesToProcess == m_inputBlockSize);
if (framesToProcess != m_inputBlockSize)
return;
// Only support kernelSize <= m_inputBlockSize
size_t kernelSize = convolutionKernel->Length();
MOZ_ASSERT(kernelSize <= m_inputBlockSize);
if (kernelSize > m_inputBlockSize)
return;
const float* kernelP = convolutionKernel->Elements();
// Sanity check
bool isCopyGood = kernelP && sourceP && destP && m_buffer.Elements();
MOZ_ASSERT(isCopyGood);
if (!isCopyGood)
return;
float* inputP = m_buffer.Elements() + m_inputBlockSize;
// Copy samples to 2nd half of input buffer.
memcpy(inputP, sourceP, sizeof(float) * framesToProcess);
// FIXME: The macro can be further optimized to avoid pipeline stalls. One possibility is to maintain 4 separate sums and change the macro to CONVOLVE_FOUR_SAMPLES.
#define CONVOLVE_ONE_SAMPLE \
sum += inputP[i - j] * kernelP[j]; \
j++;
size_t i = 0;
while (i < framesToProcess) {
size_t j = 0;
float sum = 0;
// FIXME: SSE optimization may be applied here.
if (kernelSize == 32) {
CONVOLVE_ONE_SAMPLE // 1
CONVOLVE_ONE_SAMPLE // 2
CONVOLVE_ONE_SAMPLE // 3
CONVOLVE_ONE_SAMPLE // 4
CONVOLVE_ONE_SAMPLE // 5
CONVOLVE_ONE_SAMPLE // 6
CONVOLVE_ONE_SAMPLE // 7
CONVOLVE_ONE_SAMPLE // 8
CONVOLVE_ONE_SAMPLE // 9
CONVOLVE_ONE_SAMPLE // 10
CONVOLVE_ONE_SAMPLE // 11
CONVOLVE_ONE_SAMPLE // 12
CONVOLVE_ONE_SAMPLE // 13
CONVOLVE_ONE_SAMPLE // 14
CONVOLVE_ONE_SAMPLE // 15
CONVOLVE_ONE_SAMPLE // 16
CONVOLVE_ONE_SAMPLE // 17
CONVOLVE_ONE_SAMPLE // 18
CONVOLVE_ONE_SAMPLE // 19
CONVOLVE_ONE_SAMPLE // 20
CONVOLVE_ONE_SAMPLE // 21
CONVOLVE_ONE_SAMPLE // 22
CONVOLVE_ONE_SAMPLE // 23
CONVOLVE_ONE_SAMPLE // 24
CONVOLVE_ONE_SAMPLE // 25
CONVOLVE_ONE_SAMPLE // 26
CONVOLVE_ONE_SAMPLE // 27
CONVOLVE_ONE_SAMPLE // 28
CONVOLVE_ONE_SAMPLE // 29
CONVOLVE_ONE_SAMPLE // 30
CONVOLVE_ONE_SAMPLE // 31
CONVOLVE_ONE_SAMPLE // 32
} else if (kernelSize == 64) {
CONVOLVE_ONE_SAMPLE // 1
CONVOLVE_ONE_SAMPLE // 2
CONVOLVE_ONE_SAMPLE // 3
CONVOLVE_ONE_SAMPLE // 4
CONVOLVE_ONE_SAMPLE // 5
CONVOLVE_ONE_SAMPLE // 6
CONVOLVE_ONE_SAMPLE // 7
CONVOLVE_ONE_SAMPLE // 8
CONVOLVE_ONE_SAMPLE // 9
CONVOLVE_ONE_SAMPLE // 10
CONVOLVE_ONE_SAMPLE // 11
CONVOLVE_ONE_SAMPLE // 12
CONVOLVE_ONE_SAMPLE // 13
CONVOLVE_ONE_SAMPLE // 14
CONVOLVE_ONE_SAMPLE // 15
CONVOLVE_ONE_SAMPLE // 16
CONVOLVE_ONE_SAMPLE // 17
CONVOLVE_ONE_SAMPLE // 18
CONVOLVE_ONE_SAMPLE // 19
CONVOLVE_ONE_SAMPLE // 20
CONVOLVE_ONE_SAMPLE // 21
CONVOLVE_ONE_SAMPLE // 22
CONVOLVE_ONE_SAMPLE // 23
CONVOLVE_ONE_SAMPLE // 24
CONVOLVE_ONE_SAMPLE // 25
CONVOLVE_ONE_SAMPLE // 26
CONVOLVE_ONE_SAMPLE // 27
CONVOLVE_ONE_SAMPLE // 28
CONVOLVE_ONE_SAMPLE // 29
CONVOLVE_ONE_SAMPLE // 30
CONVOLVE_ONE_SAMPLE // 31
CONVOLVE_ONE_SAMPLE // 32
CONVOLVE_ONE_SAMPLE // 33
CONVOLVE_ONE_SAMPLE // 34
CONVOLVE_ONE_SAMPLE // 35
CONVOLVE_ONE_SAMPLE // 36
CONVOLVE_ONE_SAMPLE // 37
CONVOLVE_ONE_SAMPLE // 38
CONVOLVE_ONE_SAMPLE // 39
CONVOLVE_ONE_SAMPLE // 40
CONVOLVE_ONE_SAMPLE // 41
CONVOLVE_ONE_SAMPLE // 42
CONVOLVE_ONE_SAMPLE // 43
CONVOLVE_ONE_SAMPLE // 44
CONVOLVE_ONE_SAMPLE // 45
CONVOLVE_ONE_SAMPLE // 46
CONVOLVE_ONE_SAMPLE // 47
CONVOLVE_ONE_SAMPLE // 48
CONVOLVE_ONE_SAMPLE // 49
CONVOLVE_ONE_SAMPLE // 50
CONVOLVE_ONE_SAMPLE // 51
CONVOLVE_ONE_SAMPLE // 52
CONVOLVE_ONE_SAMPLE // 53
CONVOLVE_ONE_SAMPLE // 54
CONVOLVE_ONE_SAMPLE // 55
CONVOLVE_ONE_SAMPLE // 56
CONVOLVE_ONE_SAMPLE // 57
CONVOLVE_ONE_SAMPLE // 58
CONVOLVE_ONE_SAMPLE // 59
CONVOLVE_ONE_SAMPLE // 60
CONVOLVE_ONE_SAMPLE // 61
CONVOLVE_ONE_SAMPLE // 62
CONVOLVE_ONE_SAMPLE // 63
CONVOLVE_ONE_SAMPLE // 64
} else if (kernelSize == 128) {
CONVOLVE_ONE_SAMPLE // 1
CONVOLVE_ONE_SAMPLE // 2
CONVOLVE_ONE_SAMPLE // 3
CONVOLVE_ONE_SAMPLE // 4
CONVOLVE_ONE_SAMPLE // 5
CONVOLVE_ONE_SAMPLE // 6
CONVOLVE_ONE_SAMPLE // 7
CONVOLVE_ONE_SAMPLE // 8
CONVOLVE_ONE_SAMPLE // 9
CONVOLVE_ONE_SAMPLE // 10
CONVOLVE_ONE_SAMPLE // 11
CONVOLVE_ONE_SAMPLE // 12
CONVOLVE_ONE_SAMPLE // 13
CONVOLVE_ONE_SAMPLE // 14
CONVOLVE_ONE_SAMPLE // 15
CONVOLVE_ONE_SAMPLE // 16
CONVOLVE_ONE_SAMPLE // 17
CONVOLVE_ONE_SAMPLE // 18
CONVOLVE_ONE_SAMPLE // 19
CONVOLVE_ONE_SAMPLE // 20
CONVOLVE_ONE_SAMPLE // 21
CONVOLVE_ONE_SAMPLE // 22
CONVOLVE_ONE_SAMPLE // 23
CONVOLVE_ONE_SAMPLE // 24
CONVOLVE_ONE_SAMPLE // 25
CONVOLVE_ONE_SAMPLE // 26
CONVOLVE_ONE_SAMPLE // 27
CONVOLVE_ONE_SAMPLE // 28
CONVOLVE_ONE_SAMPLE // 29
CONVOLVE_ONE_SAMPLE // 30
CONVOLVE_ONE_SAMPLE // 31
CONVOLVE_ONE_SAMPLE // 32
CONVOLVE_ONE_SAMPLE // 33
CONVOLVE_ONE_SAMPLE // 34
CONVOLVE_ONE_SAMPLE // 35
CONVOLVE_ONE_SAMPLE // 36
CONVOLVE_ONE_SAMPLE // 37
CONVOLVE_ONE_SAMPLE // 38
CONVOLVE_ONE_SAMPLE // 39
CONVOLVE_ONE_SAMPLE // 40
CONVOLVE_ONE_SAMPLE // 41
CONVOLVE_ONE_SAMPLE // 42
CONVOLVE_ONE_SAMPLE // 43
CONVOLVE_ONE_SAMPLE // 44
CONVOLVE_ONE_SAMPLE // 45
CONVOLVE_ONE_SAMPLE // 46
CONVOLVE_ONE_SAMPLE // 47
CONVOLVE_ONE_SAMPLE // 48
CONVOLVE_ONE_SAMPLE // 49
CONVOLVE_ONE_SAMPLE // 50
CONVOLVE_ONE_SAMPLE // 51
CONVOLVE_ONE_SAMPLE // 52
CONVOLVE_ONE_SAMPLE // 53
CONVOLVE_ONE_SAMPLE // 54
CONVOLVE_ONE_SAMPLE // 55
CONVOLVE_ONE_SAMPLE // 56
CONVOLVE_ONE_SAMPLE // 57
CONVOLVE_ONE_SAMPLE // 58
CONVOLVE_ONE_SAMPLE // 59
CONVOLVE_ONE_SAMPLE // 60
CONVOLVE_ONE_SAMPLE // 61
CONVOLVE_ONE_SAMPLE // 62
CONVOLVE_ONE_SAMPLE // 63
CONVOLVE_ONE_SAMPLE // 64
CONVOLVE_ONE_SAMPLE // 65
CONVOLVE_ONE_SAMPLE // 66
CONVOLVE_ONE_SAMPLE // 67
CONVOLVE_ONE_SAMPLE // 68
CONVOLVE_ONE_SAMPLE // 69
CONVOLVE_ONE_SAMPLE // 70
CONVOLVE_ONE_SAMPLE // 71
CONVOLVE_ONE_SAMPLE // 72
CONVOLVE_ONE_SAMPLE // 73
CONVOLVE_ONE_SAMPLE // 74
CONVOLVE_ONE_SAMPLE // 75
CONVOLVE_ONE_SAMPLE // 76
CONVOLVE_ONE_SAMPLE // 77
CONVOLVE_ONE_SAMPLE // 78
CONVOLVE_ONE_SAMPLE // 79
CONVOLVE_ONE_SAMPLE // 80
CONVOLVE_ONE_SAMPLE // 81
CONVOLVE_ONE_SAMPLE // 82
CONVOLVE_ONE_SAMPLE // 83
CONVOLVE_ONE_SAMPLE // 84
CONVOLVE_ONE_SAMPLE // 85
CONVOLVE_ONE_SAMPLE // 86
CONVOLVE_ONE_SAMPLE // 87
CONVOLVE_ONE_SAMPLE // 88
CONVOLVE_ONE_SAMPLE // 89
CONVOLVE_ONE_SAMPLE // 90
CONVOLVE_ONE_SAMPLE // 91
CONVOLVE_ONE_SAMPLE // 92
CONVOLVE_ONE_SAMPLE // 93
CONVOLVE_ONE_SAMPLE // 94
CONVOLVE_ONE_SAMPLE // 95
CONVOLVE_ONE_SAMPLE // 96
CONVOLVE_ONE_SAMPLE // 97
CONVOLVE_ONE_SAMPLE // 98
CONVOLVE_ONE_SAMPLE // 99
CONVOLVE_ONE_SAMPLE // 100
CONVOLVE_ONE_SAMPLE // 101
CONVOLVE_ONE_SAMPLE // 102
CONVOLVE_ONE_SAMPLE // 103
CONVOLVE_ONE_SAMPLE // 104
CONVOLVE_ONE_SAMPLE // 105
CONVOLVE_ONE_SAMPLE // 106
CONVOLVE_ONE_SAMPLE // 107
CONVOLVE_ONE_SAMPLE // 108
CONVOLVE_ONE_SAMPLE // 109
CONVOLVE_ONE_SAMPLE // 110
CONVOLVE_ONE_SAMPLE // 111
CONVOLVE_ONE_SAMPLE // 112
CONVOLVE_ONE_SAMPLE // 113
CONVOLVE_ONE_SAMPLE // 114
CONVOLVE_ONE_SAMPLE // 115
CONVOLVE_ONE_SAMPLE // 116
CONVOLVE_ONE_SAMPLE // 117
CONVOLVE_ONE_SAMPLE // 118
CONVOLVE_ONE_SAMPLE // 119
CONVOLVE_ONE_SAMPLE // 120
CONVOLVE_ONE_SAMPLE // 121
CONVOLVE_ONE_SAMPLE // 122
CONVOLVE_ONE_SAMPLE // 123
CONVOLVE_ONE_SAMPLE // 124
CONVOLVE_ONE_SAMPLE // 125
CONVOLVE_ONE_SAMPLE // 126
CONVOLVE_ONE_SAMPLE // 127
CONVOLVE_ONE_SAMPLE // 128
} else {
while (j < kernelSize) {
// Non-optimized using actual while loop.
CONVOLVE_ONE_SAMPLE
}
}
destP[i++] = sum;
}
// Copy 2nd half of input buffer to 1st half.
memcpy(m_buffer.Elements(), inputP, sizeof(float) * framesToProcess);
}
void DirectConvolver::reset()
{
PodZero(m_buffer.Elements(), m_buffer.Length());
}
} // namespace WebCore

View File

@ -1,61 +0,0 @@
/*
* Copyright (C) 2012 Intel Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef DirectConvolver_h
#define DirectConvolver_h
#include "nsTArray.h"
#include "mozilla/MemoryReporting.h"
namespace WebCore {
class DirectConvolver {
public:
explicit DirectConvolver(size_t inputBlockSize);
void process(const nsTArray<float>* convolutionKernel, const float* sourceP, float* destP, size_t framesToProcess);
void reset();
size_t sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
{
size_t amount = aMallocSizeOf(this);
amount += m_buffer.ShallowSizeOfExcludingThis(aMallocSizeOf);
return amount;
}
private:
size_t m_inputBlockSize;
nsTArray<float> m_buffer;
};
} // namespace WebCore
#endif // DirectConvolver_h

View File

@ -33,10 +33,11 @@ using namespace mozilla;
namespace WebCore {
FFTConvolver::FFTConvolver(size_t fftSize)
FFTConvolver::FFTConvolver(size_t fftSize, size_t renderPhase)
: m_frame(fftSize)
, m_readWriteIndex(0)
, m_readWriteIndex(renderPhase % (fftSize / 2))
{
MOZ_ASSERT(fftSize >= 2 * WEBAUDIO_BLOCK_SIZE);
m_inputBuffer.SetLength(fftSize);
PodZero(m_inputBuffer.Elements(), fftSize);
m_outputBuffer.SetLength(fftSize);
@ -60,67 +61,47 @@ size_t FFTConvolver::sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) co
return aMallocSizeOf(this) + sizeOfExcludingThis(aMallocSizeOf);
}
void FFTConvolver::process(FFTBlock* fftKernel, const float* sourceP, float* destP, size_t framesToProcess)
const float* FFTConvolver::process(FFTBlock* fftKernel, const float* sourceP)
{
size_t halfSize = fftSize() / 2;
// framesToProcess must be an exact multiple of halfSize,
// or halfSize is a multiple of framesToProcess when halfSize > framesToProcess.
bool isGood = !(halfSize % framesToProcess && framesToProcess % halfSize);
MOZ_ASSERT(isGood);
if (!isGood)
return;
// WEBAUDIO_BLOCK_SIZE must be an exact multiple of halfSize,
// halfSize must be a multiple of WEBAUDIO_BLOCK_SIZE
// and > WEBAUDIO_BLOCK_SIZE.
MOZ_ASSERT(halfSize % WEBAUDIO_BLOCK_SIZE == 0 &&
WEBAUDIO_BLOCK_SIZE <= halfSize);
size_t numberOfDivisions = halfSize <= framesToProcess ? (framesToProcess / halfSize) : 1;
size_t divisionSize = numberOfDivisions == 1 ? framesToProcess : halfSize;
// Copy samples to input buffer (note contraint above!)
float* inputP = m_inputBuffer.Elements();
for (size_t i = 0; i < numberOfDivisions; ++i, sourceP += divisionSize, destP += divisionSize) {
// Copy samples to input buffer (note contraint above!)
float* inputP = m_inputBuffer.Elements();
MOZ_ASSERT(sourceP && inputP && m_readWriteIndex + WEBAUDIO_BLOCK_SIZE <= m_inputBuffer.Length());
// Sanity check
bool isCopyGood1 = sourceP && inputP && m_readWriteIndex + divisionSize <= m_inputBuffer.Length();
MOZ_ASSERT(isCopyGood1);
if (!isCopyGood1)
return;
memcpy(inputP + m_readWriteIndex, sourceP, sizeof(float) * WEBAUDIO_BLOCK_SIZE);
memcpy(inputP + m_readWriteIndex, sourceP, sizeof(float) * divisionSize);
float* outputP = m_outputBuffer.Elements();
m_readWriteIndex += WEBAUDIO_BLOCK_SIZE;
// Copy samples from output buffer
float* outputP = m_outputBuffer.Elements();
// Check if it's time to perform the next FFT
if (m_readWriteIndex == halfSize) {
// The input buffer is now filled (get frequency-domain version)
m_frame.PerformFFT(m_inputBuffer.Elements());
m_frame.Multiply(*fftKernel);
m_frame.GetInverseWithoutScaling(m_outputBuffer.Elements());
// Sanity check
bool isCopyGood2 = destP && outputP && m_readWriteIndex + divisionSize <= m_outputBuffer.Length();
MOZ_ASSERT(isCopyGood2);
if (!isCopyGood2)
return;
// Overlap-add 1st half from previous time
AudioBufferAddWithScale(m_lastOverlapBuffer.Elements(), 1.0f,
m_outputBuffer.Elements(), halfSize);
memcpy(destP, outputP + m_readWriteIndex, sizeof(float) * divisionSize);
m_readWriteIndex += divisionSize;
// Finally, save 2nd half of result
MOZ_ASSERT(m_outputBuffer.Length() == 2 * halfSize && m_lastOverlapBuffer.Length() == halfSize);
// Check if it's time to perform the next FFT
if (m_readWriteIndex == halfSize) {
// The input buffer is now filled (get frequency-domain version)
m_frame.PerformFFT(m_inputBuffer.Elements());
m_frame.Multiply(*fftKernel);
m_frame.GetInverseWithoutScaling(m_outputBuffer.Elements());
memcpy(m_lastOverlapBuffer.Elements(), m_outputBuffer.Elements() + halfSize, sizeof(float) * halfSize);
// Overlap-add 1st half from previous time
AudioBufferAddWithScale(m_lastOverlapBuffer.Elements(), 1.0f,
m_outputBuffer.Elements(), halfSize);
// Finally, save 2nd half of result
bool isCopyGood3 = m_outputBuffer.Length() == 2 * halfSize && m_lastOverlapBuffer.Length() == halfSize;
MOZ_ASSERT(isCopyGood3);
if (!isCopyGood3)
return;
memcpy(m_lastOverlapBuffer.Elements(), m_outputBuffer.Elements() + halfSize, sizeof(float) * halfSize);
// Reset index back to start for next time
m_readWriteIndex = 0;
}
// Reset index back to start for next time
m_readWriteIndex = 0;
}
return outputP + m_readWriteIndex;
}
void FFTConvolver::reset()
@ -129,4 +110,10 @@ void FFTConvolver::reset()
m_readWriteIndex = 0;
}
size_t FFTConvolver::latencyFrames() const
{
return std::max<size_t>(fftSize()/2, WEBAUDIO_BLOCK_SIZE) -
WEBAUDIO_BLOCK_SIZE;
}
} // namespace WebCore

View File

@ -40,24 +40,29 @@ using mozilla::FFTBlock;
class FFTConvolver {
public:
// fftSize must be a power of two
explicit FFTConvolver(size_t fftSize);
// |fftSize| must be a power of two.
//
// |renderPhase| is the initial offset in the initially zero input buffer.
// It is coordinated with the other stages, so they don't all do their
// FFTs at the same time.
explicit FFTConvolver(size_t fftSize, size_t renderPhase = 0);
// Process WEBAUDIO_BLOCK_SIZE elements of array |sourceP| and return a
// pointer to an output array of the same size.
//
// |fftKernel| must be pre-scaled for FFTBlock::GetInverseWithoutScaling().
//
// For now, with multiple calls to Process(), framesToProcess MUST add up EXACTLY to fftSize / 2
//
// FIXME: Later, we can do more sophisticated buffering to relax this requirement...
//
// The input to output latency is equal to fftSize / 2
//
// Processing in-place is allowed...
void process(FFTBlock* fftKernel, const float* sourceP, float* destP, size_t framesToProcess);
const float* process(FFTBlock* fftKernel, const float* sourceP);
void reset();
size_t fftSize() const { return m_frame.FFTSize(); }
// The input to output latency is up to fftSize / 2, but alignment of the
// FFTs with the blocks reduces this by one block.
size_t latencyFrames() const;
size_t sizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
size_t sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;

View File

@ -76,19 +76,19 @@ size_t HRTFElevation::fftSizeForSampleRate(float sampleRate)
// This is the size if we were to use all raw response samples.
unsigned resampledLength =
floorf(ResponseFrameSize * sampleRate / rawSampleRate);
// Keep things semi-sane, with max FFT size of 1024 and minimum of 4.
// "size |= 3" ensures a minimum of 4 (with the size++ below) and sets the
// 2 least significant bits for rounding up to the next power of 2 below.
// Keep things semi-sane, with max FFT size of 1024.
unsigned size = min(resampledLength, 1023U);
size |= 3;
// Ensure a minimum of 2 * WEBAUDIO_BLOCK_SIZE (with the size++ below) for
// FFTConvolver and set the 8 least significant bits for rounding up to
// the next power of 2 below.
size |= 2 * WEBAUDIO_BLOCK_SIZE - 1;
// Round up to the next power of 2, making the FFT size no more than twice
// the impulse response length. This doubles size for values that are
// already powers of 2. This works by filling in 7 bits to right of the
// already powers of 2. This works by filling in alls bit to right of the
// most significant bit. The most significant bit is no greater than
// 1 << 9, and the least significant 2 bits were already set above.
// 1 << 9, and the least significant 8 bits were already set above, so
// there is at most one bit to add.
size |= (size >> 1);
size |= (size >> 2);
size |= (size >> 4);
size++;
MOZ_ASSERT((size & (size - 1)) == 0);

View File

@ -59,11 +59,6 @@ HRTFPanner::HRTFPanner(float sampleRate, already_AddRefed<HRTFDatabaseLoader> da
{
MOZ_ASSERT(m_databaseLoader);
MOZ_COUNT_CTOR(HRTFPanner);
m_tempL1.SetLength(RenderingQuantum);
m_tempR1.SetLength(RenderingQuantum);
m_tempL2.SetLength(RenderingQuantum);
m_tempR2.SetLength(RenderingQuantum);
}
HRTFPanner::~HRTFPanner()
@ -81,10 +76,6 @@ size_t HRTFPanner::sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) cons
amount += m_convolverL2.sizeOfExcludingThis(aMallocSizeOf);
amount += m_convolverR2.sizeOfExcludingThis(aMallocSizeOf);
amount += m_delayLine.SizeOfExcludingThis(aMallocSizeOf);
amount += m_tempL1.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += m_tempL2.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += m_tempR1.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += m_tempR2.ShallowSizeOfExcludingThis(aMallocSizeOf);
return amount;
}
@ -256,23 +247,26 @@ void HRTFPanner::pan(double desiredAzimuth, double elevation, const AudioBlock*
bool needsCrossfading = m_crossfadeIncr;
// Have the convolvers render directly to the final destination if we're not cross-fading.
float* convolutionDestinationL1 = needsCrossfading ? m_tempL1.Elements() : destinationL;
float* convolutionDestinationR1 = needsCrossfading ? m_tempR1.Elements() : destinationR;
float* convolutionDestinationL2 = needsCrossfading ? m_tempL2.Elements() : destinationL;
float* convolutionDestinationR2 = needsCrossfading ? m_tempR2.Elements() : destinationR;
const float* convolutionDestinationL1;
const float* convolutionDestinationR1;
const float* convolutionDestinationL2;
const float* convolutionDestinationR2;
// Now do the convolutions.
// Note that we avoid doing convolutions on both sets of convolvers if we're not currently cross-fading.
if (m_crossfadeSelection == CrossfadeSelection1 || needsCrossfading) {
m_convolverL1.process(kernelL1->fftFrame(), destinationL, convolutionDestinationL1, WEBAUDIO_BLOCK_SIZE);
m_convolverR1.process(kernelR1->fftFrame(), destinationR, convolutionDestinationR1, WEBAUDIO_BLOCK_SIZE);
convolutionDestinationL1 =
m_convolverL1.process(kernelL1->fftFrame(), destinationL);
convolutionDestinationR1 =
m_convolverR1.process(kernelR1->fftFrame(), destinationR);
}
if (m_crossfadeSelection == CrossfadeSelection2 || needsCrossfading) {
m_convolverL2.process(kernelL2->fftFrame(), destinationL, convolutionDestinationL2, WEBAUDIO_BLOCK_SIZE);
m_convolverR2.process(kernelR2->fftFrame(), destinationR, convolutionDestinationR2, WEBAUDIO_BLOCK_SIZE);
convolutionDestinationL2 =
m_convolverL2.process(kernelL2->fftFrame(), destinationL);
convolutionDestinationR2 =
m_convolverR2.process(kernelR2->fftFrame(), destinationR);
}
if (needsCrossfading) {
@ -298,6 +292,18 @@ void HRTFPanner::pan(double desiredAzimuth, double elevation, const AudioBlock*
m_crossfadeX = 0;
m_crossfadeIncr = 0;
}
} else {
const float* sourceL;
const float* sourceR;
if (m_crossfadeSelection == CrossfadeSelection1) {
sourceL = convolutionDestinationL1;
sourceR = convolutionDestinationR1;
} else {
sourceL = convolutionDestinationL2;
sourceR = convolutionDestinationR2;
}
PodCopy(destinationL, sourceL, WEBAUDIO_BLOCK_SIZE);
PodCopy(destinationR, sourceR, WEBAUDIO_BLOCK_SIZE);
}
}
@ -307,10 +313,12 @@ int HRTFPanner::maxTailFrames() const
// response, there is additional tail time from the approximations in the
// implementation. Because HRTFPanner is implemented with a DelayKernel
// and a FFTConvolver, the tailTime of the HRTFPanner is the sum of the
// tailTime of the DelayKernel and the tailTime of the FFTConvolver.
// The FFTConvolver has a tail time of fftSize(), including latency of
// fftSize()/2.
return m_delayLine.MaxDelayTicks() + fftSize();
// tailTime of the DelayKernel and the tailTime of the FFTConvolver. The
// FFTs of the convolver are fftSize(), half of which is latency, but this
// is aligned with blocks and so is reduced by the one block which is
// processed immediately.
return m_delayLine.MaxDelayTicks() +
m_convolverL1.fftSize()/2 + m_convolverL1.latencyFrames();
}
} // namespace WebCore

View File

@ -77,7 +77,7 @@ static float calculateNormalizationScale(ThreadSharedFloatArrayBufferList* respo
return scale;
}
Reverb::Reverb(ThreadSharedFloatArrayBufferList* impulseResponse, size_t impulseResponseBufferLength, size_t renderSliceSize, size_t maxFFTSize, size_t numberOfChannels, bool useBackgroundThreads, bool normalize, float sampleRate)
Reverb::Reverb(ThreadSharedFloatArrayBufferList* impulseResponse, size_t impulseResponseBufferLength, size_t maxFFTSize, size_t numberOfChannels, bool useBackgroundThreads, bool normalize, float sampleRate)
{
float scale = 1;
@ -101,7 +101,7 @@ Reverb::Reverb(ThreadSharedFloatArrayBufferList* impulseResponse, size_t impulse
}
}
initialize(irChannels, impulseResponseBufferLength, renderSliceSize,
initialize(irChannels, impulseResponseBufferLength,
maxFFTSize, numberOfChannels, useBackgroundThreads);
}
@ -121,7 +121,7 @@ size_t Reverb::sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
void Reverb::initialize(const nsTArray<const float*>& impulseResponseBuffer,
size_t impulseResponseBufferLength, size_t renderSliceSize,
size_t impulseResponseBufferLength,
size_t maxFFTSize, size_t numberOfChannels, bool useBackgroundThreads)
{
m_impulseResponseLength = impulseResponseBufferLength;
@ -135,10 +135,10 @@ void Reverb::initialize(const nsTArray<const float*>& impulseResponseBuffer,
const float* channel = impulseResponseBuffer[i];
size_t length = impulseResponseBufferLength;
nsAutoPtr<ReverbConvolver> convolver(new ReverbConvolver(channel, length, renderSliceSize, maxFFTSize, convolverRenderPhase, useBackgroundThreads));
nsAutoPtr<ReverbConvolver> convolver(new ReverbConvolver(channel, length, maxFFTSize, convolverRenderPhase, useBackgroundThreads));
m_convolvers.AppendElement(convolver.forget());
convolverRenderPhase += renderSliceSize;
convolverRenderPhase += WEBAUDIO_BLOCK_SIZE;
}
// For "True" stereo processing we allocate a temporary buffer to avoid repeatedly allocating it in the process() method.
@ -149,12 +149,12 @@ void Reverb::initialize(const nsTArray<const float*>& impulseResponseBuffer,
}
}
void Reverb::process(const AudioBlock* sourceBus, AudioBlock* destinationBus, size_t framesToProcess)
void Reverb::process(const AudioBlock* sourceBus, AudioBlock* destinationBus)
{
// Do a fairly comprehensive sanity check.
// If these conditions are satisfied, all of the source and destination pointers will be valid for the various matrixing cases.
bool isSafeToProcess = sourceBus && destinationBus && sourceBus->ChannelCount() > 0 && destinationBus->mChannelData.Length() > 0
&& framesToProcess <= MaxFrameSize && framesToProcess <= size_t(sourceBus->GetDuration()) && framesToProcess <= size_t(destinationBus->GetDuration());
&& WEBAUDIO_BLOCK_SIZE <= MaxFrameSize && WEBAUDIO_BLOCK_SIZE <= size_t(sourceBus->GetDuration()) && WEBAUDIO_BLOCK_SIZE <= size_t(destinationBus->GetDuration());
MOZ_ASSERT(isSafeToProcess);
if (!isSafeToProcess)
@ -175,28 +175,28 @@ void Reverb::process(const AudioBlock* sourceBus, AudioBlock* destinationBus, si
// 2 -> 2 -> 2
const float* sourceBusR = static_cast<const float*>(sourceBus->mChannelData[1]);
float* destinationChannelR = static_cast<float*>(const_cast<void*>(destinationBus->mChannelData[1]));
m_convolvers[0]->process(sourceBusL, sourceBus->GetDuration(), destinationChannelL, destinationBus->GetDuration(), framesToProcess);
m_convolvers[1]->process(sourceBusR, sourceBus->GetDuration(), destinationChannelR, destinationBus->GetDuration(), framesToProcess);
m_convolvers[0]->process(sourceBusL, destinationChannelL);
m_convolvers[1]->process(sourceBusR, destinationChannelR);
} else if (numInputChannels == 1 && numOutputChannels == 2 && numReverbChannels == 2) {
// 1 -> 2 -> 2
for (int i = 0; i < 2; ++i) {
float* destinationChannel = static_cast<float*>(const_cast<void*>(destinationBus->mChannelData[i]));
m_convolvers[i]->process(sourceBusL, sourceBus->GetDuration(), destinationChannel, destinationBus->GetDuration(), framesToProcess);
m_convolvers[i]->process(sourceBusL, destinationChannel);
}
} else if (numInputChannels == 1 && numReverbChannels == 1 && numOutputChannels == 2) {
// 1 -> 1 -> 2
m_convolvers[0]->process(sourceBusL, sourceBus->GetDuration(), destinationChannelL, destinationBus->GetDuration(), framesToProcess);
m_convolvers[0]->process(sourceBusL, destinationChannelL);
// simply copy L -> R
float* destinationChannelR = static_cast<float*>(const_cast<void*>(destinationBus->mChannelData[1]));
bool isCopySafe = destinationChannelL && destinationChannelR && size_t(destinationBus->GetDuration()) >= framesToProcess;
bool isCopySafe = destinationChannelL && destinationChannelR && size_t(destinationBus->GetDuration()) >= WEBAUDIO_BLOCK_SIZE;
MOZ_ASSERT(isCopySafe);
if (!isCopySafe)
return;
PodCopy(destinationChannelR, destinationChannelL, framesToProcess);
PodCopy(destinationChannelR, destinationChannelL, WEBAUDIO_BLOCK_SIZE);
} else if (numInputChannels == 1 && numReverbChannels == 1 && numOutputChannels == 1) {
// 1 -> 1 -> 1
m_convolvers[0]->process(sourceBusL, sourceBus->GetDuration(), destinationChannelL, destinationBus->GetDuration(), framesToProcess);
m_convolvers[0]->process(sourceBusL, destinationChannelL);
} else if (numInputChannels == 2 && numReverbChannels == 4 && numOutputChannels == 2) {
// 2 -> 4 -> 2 ("True" stereo)
const float* sourceBusR = static_cast<const float*>(sourceBus->mChannelData[1]);
@ -206,12 +206,12 @@ void Reverb::process(const AudioBlock* sourceBus, AudioBlock* destinationBus, si
float* tempChannelR = static_cast<float*>(const_cast<void*>(m_tempBuffer.mChannelData[1]));
// Process left virtual source
m_convolvers[0]->process(sourceBusL, sourceBus->GetDuration(), destinationChannelL, destinationBus->GetDuration(), framesToProcess);
m_convolvers[1]->process(sourceBusL, sourceBus->GetDuration(), destinationChannelR, destinationBus->GetDuration(), framesToProcess);
m_convolvers[0]->process(sourceBusL, destinationChannelL);
m_convolvers[1]->process(sourceBusL, destinationChannelR);
// Process right virtual source
m_convolvers[2]->process(sourceBusR, sourceBus->GetDuration(), tempChannelL, m_tempBuffer.GetDuration(), framesToProcess);
m_convolvers[3]->process(sourceBusR, sourceBus->GetDuration(), tempChannelR, m_tempBuffer.GetDuration(), framesToProcess);
m_convolvers[2]->process(sourceBusR, tempChannelL);
m_convolvers[3]->process(sourceBusR, tempChannelR);
AudioBufferAddWithScale(tempChannelL, 1.0f, destinationChannelL, sourceBus->GetDuration());
AudioBufferAddWithScale(tempChannelR, 1.0f, destinationChannelR, sourceBus->GetDuration());
@ -224,12 +224,12 @@ void Reverb::process(const AudioBlock* sourceBus, AudioBlock* destinationBus, si
float* tempChannelR = static_cast<float*>(const_cast<void*>(m_tempBuffer.mChannelData[1]));
// Process left virtual source
m_convolvers[0]->process(sourceBusL, sourceBus->GetDuration(), destinationChannelL, destinationBus->GetDuration(), framesToProcess);
m_convolvers[1]->process(sourceBusL, sourceBus->GetDuration(), destinationChannelR, destinationBus->GetDuration(), framesToProcess);
m_convolvers[0]->process(sourceBusL, destinationChannelL);
m_convolvers[1]->process(sourceBusL, destinationChannelR);
// Process right virtual source
m_convolvers[2]->process(sourceBusL, sourceBus->GetDuration(), tempChannelL, m_tempBuffer.GetDuration(), framesToProcess);
m_convolvers[3]->process(sourceBusL, sourceBus->GetDuration(), tempChannelR, m_tempBuffer.GetDuration(), framesToProcess);
m_convolvers[2]->process(sourceBusL, tempChannelL);
m_convolvers[3]->process(sourceBusL, tempChannelR);
AudioBufferAddWithScale(tempChannelL, 1.0f, destinationChannelL, sourceBus->GetDuration());
AudioBufferAddWithScale(tempChannelR, 1.0f, destinationChannelR, sourceBus->GetDuration());
@ -240,15 +240,4 @@ void Reverb::process(const AudioBlock* sourceBus, AudioBlock* destinationBus, si
}
}
void Reverb::reset()
{
for (size_t i = 0; i < m_convolvers.Length(); ++i)
m_convolvers[i]->reset();
}
size_t Reverb::latencyFrames() const
{
return !m_convolvers.IsEmpty() ? m_convolvers[0]->latencyFrames() : 0;
}
} // namespace WebCore

View File

@ -48,18 +48,22 @@ public:
enum { MaxFrameSize = 256 };
// renderSliceSize is a rendering hint, so the FFTs can be optimized to not all occur at the same time (very bad when rendering on a real-time thread).
Reverb(mozilla::ThreadSharedFloatArrayBufferList* impulseResponseBuffer, size_t impulseResponseBufferLength, size_t renderSliceSize, size_t maxFFTSize, size_t numberOfChannels, bool useBackgroundThreads, bool normalize, float sampleRate);
Reverb(mozilla::ThreadSharedFloatArrayBufferList* impulseResponseBuffer,
size_t impulseResponseBufferLength, size_t maxFFTSize,
size_t numberOfChannels, bool useBackgroundThreads, bool normalize,
float sampleRate);
void process(const mozilla::AudioBlock* sourceBus, mozilla::AudioBlock* destinationBus, size_t framesToProcess);
void reset();
void process(const mozilla::AudioBlock* sourceBus,
mozilla::AudioBlock* destinationBus);
size_t impulseResponseLength() const { return m_impulseResponseLength; }
size_t latencyFrames() const;
size_t sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
private:
void initialize(const nsTArray<const float*>& impulseResponseBuffer, size_t impulseResponseBufferLength, size_t renderSliceSize, size_t maxFFTSize, size_t numberOfChannels, bool useBackgroundThreads);
void initialize(const nsTArray<const float*>& impulseResponseBuffer,
size_t impulseResponseBufferLength, size_t maxFFTSize,
size_t numberOfChannels, bool useBackgroundThreads);
size_t m_impulseResponseLength;

View File

@ -76,7 +76,7 @@ void ReverbAccumulationBuffer::updateReadIndex(int* readIndex, size_t numberOfFr
*readIndex = (*readIndex + numberOfFrames) % m_buffer.Length();
}
int ReverbAccumulationBuffer::accumulate(float* source, size_t numberOfFrames, int* readIndex, size_t delayFrames)
int ReverbAccumulationBuffer::accumulate(const float* source, size_t numberOfFrames, int* readIndex, size_t delayFrames)
{
size_t bufferLength = m_buffer.Length();

View File

@ -50,7 +50,7 @@ public:
// We need to pass in and update readIndex here, since each ReverbConvolverStage may be running in
// a different thread than the realtime thread calling ReadAndClear() and maintaining m_readIndex
// Returns the writeIndex where the accumulation took place
int accumulate(float* source, size_t numberOfFrames, int* readIndex, size_t delayFrames);
int accumulate(const float* source, size_t numberOfFrames, int* readIndex, size_t delayFrames);
size_t readIndex() const { return m_readIndex; }
void updateReadIndex(int* readIndex, size_t numberOfFrames) const;

View File

@ -49,29 +49,32 @@ const int InputBufferSize = 8 * 16384;
// This was found to be a good value on Mac OS X, and may work well on other platforms as well, assuming
// the very rough scheduling latencies are similar on these time-scales. Of course, this code may need to be
// tuned for individual platforms if this assumption is found to be incorrect.
const size_t RealtimeFrameLimit = 8192 + 4096; // ~278msec @ 44.1KHz
const size_t RealtimeFrameLimit = 8192 + 4096 // ~278msec @ 44.1KHz
- WEBAUDIO_BLOCK_SIZE;
// First stage will have size MinFFTSize - successive stages will double in
// size each time until we hit the maximum size.
const size_t MinFFTSize = 256;
// If we are using background threads then don't exceed this FFT size for the
// stages which run in the real-time thread. This avoids having only one or
// two large stages (size 16384 or so) at the end which take a lot of time
// every several processing slices. This way we amortize the cost over more
// processing slices.
const size_t MaxRealtimeFFTSize = 4096;
const size_t MinFFTSize = 128;
const size_t MaxRealtimeFFTSize = 2048;
ReverbConvolver::ReverbConvolver(const float* impulseResponseData, size_t impulseResponseLength, size_t renderSliceSize, size_t maxFFTSize, size_t convolverRenderPhase, bool useBackgroundThreads)
ReverbConvolver::ReverbConvolver(const float* impulseResponseData,
size_t impulseResponseLength,
size_t maxFFTSize,
size_t convolverRenderPhase,
bool useBackgroundThreads)
: m_impulseResponseLength(impulseResponseLength)
, m_accumulationBuffer(impulseResponseLength + renderSliceSize)
, m_accumulationBuffer(impulseResponseLength + WEBAUDIO_BLOCK_SIZE)
, m_inputBuffer(InputBufferSize)
, m_minFFTSize(MinFFTSize) // First stage will have this size - successive stages will double in size each time
, m_maxFFTSize(maxFFTSize) // until we hit m_maxFFTSize
, m_backgroundThread("ConvolverWorker")
, m_backgroundThreadCondition(&m_backgroundThreadLock)
, m_useBackgroundThreads(useBackgroundThreads)
, m_wantsToExit(false)
, m_moreInputBuffered(false)
{
// If we are using background threads then don't exceed this FFT size for the
// stages which run in the real-time thread. This avoids having only one or two
// large stages (size 16384 or so) at the end which take a lot of time every several
// processing slices. This way we amortize the cost over more processing slices.
m_maxRealtimeFFTSize = MaxRealtimeFFTSize;
// For the moment, a good way to know if we have real-time constraint is to check if we're using background threads.
// Otherwise, assume we're being run from a command-line tool.
bool hasRealtimeConstraint = useBackgroundThreads;
@ -79,12 +82,13 @@ ReverbConvolver::ReverbConvolver(const float* impulseResponseData, size_t impuls
const float* response = impulseResponseData;
size_t totalResponseLength = impulseResponseLength;
// The total latency is zero because the direct-convolution is used in the leading portion.
// The total latency is zero because the first FFT stage is small enough
// to return output in the first block.
size_t reverbTotalLatency = 0;
size_t stageOffset = 0;
int i = 0;
size_t fftSize = m_minFFTSize;
size_t stagePhase = 0;
size_t fftSize = MinFFTSize;
while (stageOffset < totalResponseLength) {
size_t stageSize = fftSize / 2;
@ -94,11 +98,13 @@ ReverbConvolver::ReverbConvolver(const float* impulseResponseData, size_t impuls
stageSize = totalResponseLength - stageOffset;
// This "staggers" the time when each FFT happens so they don't all happen at the same time
int renderPhase = convolverRenderPhase + i * renderSliceSize;
int renderPhase = convolverRenderPhase + stagePhase;
bool useDirectConvolver = !stageOffset;
nsAutoPtr<ReverbConvolverStage> stage(new ReverbConvolverStage(response, totalResponseLength, reverbTotalLatency, stageOffset, stageSize, fftSize, renderPhase, renderSliceSize, &m_accumulationBuffer, useDirectConvolver));
nsAutoPtr<ReverbConvolverStage> stage
(new ReverbConvolverStage(response, totalResponseLength,
reverbTotalLatency, stageOffset, stageSize,
fftSize, renderPhase,
&m_accumulationBuffer));
bool isBackgroundStage = false;
@ -108,18 +114,35 @@ ReverbConvolver::ReverbConvolver(const float* impulseResponseData, size_t impuls
} else
m_stages.AppendElement(stage.forget());
// Figure out next FFT size
fftSize *= 2;
stageOffset += stageSize;
++i;
if (!useDirectConvolver) {
// Figure out next FFT size
fftSize *= 2;
if (hasRealtimeConstraint && !isBackgroundStage
&& fftSize > MaxRealtimeFFTSize) {
fftSize = MaxRealtimeFFTSize;
// Custom phase positions for all but the first of the realtime
// stages of largest size. These spread out the work of the
// larger realtime stages. None of the FFTs of size 1024, 2048 or
// 4096 are performed when processing the same block. The first
// MaxRealtimeFFTSize = 4096 stage, at the end of the doubling,
// performs its FFT at block 7. The FFTs of size 2048 are
// performed in blocks 3 + 8 * n and size 1024 at 1 + 4 * n.
const uint32_t phaseLookup[] = { 14, 0, 10, 4 };
stagePhase = WEBAUDIO_BLOCK_SIZE *
phaseLookup[m_stages.Length() % ArrayLength(phaseLookup)];
} else if (fftSize > maxFFTSize) {
fftSize = maxFFTSize;
// A prime offset spreads out FFTs in a way that all
// available phase positions will be used if there are sufficient
// stages.
stagePhase += 5 * WEBAUDIO_BLOCK_SIZE;
} else if (stageSize > WEBAUDIO_BLOCK_SIZE) {
// As the stages are doubling in size, the next FFT will occur
// mid-way between FFTs for this stage.
stagePhase = stageSize - WEBAUDIO_BLOCK_SIZE;
}
if (hasRealtimeConstraint && !isBackgroundStage && fftSize > m_maxRealtimeFFTSize)
fftSize = m_maxRealtimeFFTSize;
if (fftSize > m_maxFFTSize)
fftSize = m_maxFFTSize;
}
// Start up background thread
@ -199,25 +222,16 @@ void ReverbConvolver::backgroundThreadEntry()
int readIndex;
while ((readIndex = m_backgroundStages[0]->inputReadIndex()) != writeIndex) { // FIXME: do better to detect buffer overrun...
// The ReverbConvolverStages need to process in amounts which evenly divide half the FFT size
const int SliceSize = MinFFTSize / 2;
// Accumulate contributions from each stage
for (size_t i = 0; i < m_backgroundStages.Length(); ++i)
m_backgroundStages[i]->processInBackground(this, SliceSize);
m_backgroundStages[i]->processInBackground(this);
}
}
}
void ReverbConvolver::process(const float* sourceChannelData, size_t sourceChannelLength,
float* destinationChannelData, size_t destinationChannelLength,
size_t framesToProcess)
void ReverbConvolver::process(const float* sourceChannelData,
float* destinationChannelData)
{
bool isSafe = sourceChannelData && destinationChannelData && sourceChannelLength >= framesToProcess && destinationChannelLength >= framesToProcess;
MOZ_ASSERT(isSafe);
if (!isSafe)
return;
const float* source = sourceChannelData;
float* destination = destinationChannelData;
bool isDataSafe = source && destination;
@ -226,14 +240,14 @@ void ReverbConvolver::process(const float* sourceChannelData, size_t sourceChann
return;
// Feed input buffer (read by all threads)
m_inputBuffer.write(source, framesToProcess);
m_inputBuffer.write(source, WEBAUDIO_BLOCK_SIZE);
// Accumulate contributions from each stage
for (size_t i = 0; i < m_stages.Length(); ++i)
m_stages[i]->process(source, framesToProcess);
m_stages[i]->process(source);
// Finally read from accumulation buffer
m_accumulationBuffer.readAndClear(destination, framesToProcess);
m_accumulationBuffer.readAndClear(destination, WEBAUDIO_BLOCK_SIZE);
// Now that we've buffered more input, wake up our background thread.
@ -249,21 +263,4 @@ void ReverbConvolver::process(const float* sourceChannelData, size_t sourceChann
}
}
void ReverbConvolver::reset()
{
for (size_t i = 0; i < m_stages.Length(); ++i)
m_stages[i]->reset();
for (size_t i = 0; i < m_backgroundStages.Length(); ++i)
m_backgroundStages[i]->reset();
m_accumulationBuffer.reset();
m_inputBuffer.reset();
}
size_t ReverbConvolver::latencyFrames() const
{
return 0;
}
} // namespace WebCore

View File

@ -50,13 +50,13 @@ public:
// For certain tweaky de-convolving applications the phase errors add up quickly and lead to non-sensical results with
// larger FFT sizes and single-precision floats. In these cases 2048 is a good size.
// If not doing multi-threaded convolution, then should not go > 8192.
ReverbConvolver(const float* impulseResponseData, size_t impulseResponseLength, size_t renderSliceSize, size_t maxFFTSize, size_t convolverRenderPhase, bool useBackgroundThreads);
ReverbConvolver(const float* impulseResponseData,
size_t impulseResponseLength, size_t maxFFTSize,
size_t convolverRenderPhase, bool useBackgroundThreads);
~ReverbConvolver();
void process(const float* sourceChannelData, size_t sourceChannelLength,
float* destinationChannelData, size_t destinationChannelLength,
size_t framesToProcess);
void reset();
void process(const float* sourceChannelData,
float* destinationChannelData);
size_t impulseResponseLength() const { return m_impulseResponseLength; }
@ -65,8 +65,6 @@ public:
bool useBackgroundThreads() const { return m_useBackgroundThreads; }
void backgroundThreadEntry();
size_t latencyFrames() const;
size_t sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
private:
nsTArray<nsAutoPtr<ReverbConvolverStage> > m_stages;
@ -78,13 +76,6 @@ private:
// One or more background threads read from this input buffer which is fed from the realtime thread.
ReverbInputBuffer m_inputBuffer;
// First stage will be of size m_minFFTSize. Each next stage will be twice as big until we hit m_maxFFTSize.
size_t m_minFFTSize;
size_t m_maxFFTSize;
// But don't exceed this size in the real-time thread (if we're doing background processing).
size_t m_maxRealtimeFFTSize;
// Background thread and synchronization
base::Thread m_backgroundThread;
Lock m_backgroundThreadLock;

View File

@ -37,54 +37,32 @@ using namespace mozilla;
namespace WebCore {
ReverbConvolverStage::ReverbConvolverStage(const float* impulseResponse, size_t, size_t reverbTotalLatency, size_t stageOffset, size_t stageLength,
size_t fftSize, size_t renderPhase, size_t renderSliceSize, ReverbAccumulationBuffer* accumulationBuffer, bool directMode)
ReverbConvolverStage::ReverbConvolverStage(const float* impulseResponse, size_t,
size_t reverbTotalLatency,
size_t stageOffset,
size_t stageLength,
size_t fftSize, size_t renderPhase,
ReverbAccumulationBuffer* accumulationBuffer)
: m_accumulationBuffer(accumulationBuffer)
, m_accumulationReadIndex(0)
, m_inputReadIndex(0)
, m_directMode(directMode)
{
MOZ_ASSERT(impulseResponse);
MOZ_ASSERT(accumulationBuffer);
if (!m_directMode) {
m_fftKernel = new FFTBlock(fftSize);
m_fftKernel->PadAndMakeScaledDFT(impulseResponse + stageOffset, stageLength);
m_fftConvolver = new FFTConvolver(fftSize);
} else {
m_directKernel.SetLength(fftSize / 2);
PodCopy(m_directKernel.Elements(), impulseResponse + stageOffset, fftSize / 2);
m_directConvolver = new DirectConvolver(renderSliceSize);
}
m_temporaryBuffer.SetLength(renderSliceSize);
PodZero(m_temporaryBuffer.Elements(), m_temporaryBuffer.Length());
m_fftKernel = new FFTBlock(fftSize);
m_fftKernel->PadAndMakeScaledDFT(impulseResponse + stageOffset, stageLength);
m_fftConvolver = new FFTConvolver(fftSize, renderPhase);
// The convolution stage at offset stageOffset needs to have a corresponding delay to cancel out the offset.
size_t totalDelay = stageOffset + reverbTotalLatency;
// But, the FFT convolution itself incurs fftSize / 2 latency, so subtract this out...
size_t halfSize = fftSize / 2;
if (!m_directMode) {
MOZ_ASSERT(totalDelay >= halfSize);
if (totalDelay >= halfSize)
totalDelay -= halfSize;
}
// But, the FFT convolution itself incurs latency, so subtract this out...
size_t fftLatency = m_fftConvolver->latencyFrames();
MOZ_ASSERT(totalDelay >= fftLatency);
totalDelay -= fftLatency;
// We divide up the total delay, into pre and post delay sections so that we can schedule at exactly the moment when the FFT will happen.
// This is coordinated with the other stages, so they don't all do their FFTs at the same time...
int maxPreDelayLength = std::min(halfSize, totalDelay);
m_preDelayLength = totalDelay > 0 ? renderPhase % maxPreDelayLength : 0;
if (m_preDelayLength > totalDelay)
m_preDelayLength = 0;
m_postDelayLength = totalDelay - m_preDelayLength;
m_preReadWriteIndex = 0;
m_framesProcessed = 0; // total frames processed so far
size_t delayBufferSize = m_preDelayLength < fftSize ? fftSize : m_preDelayLength;
delayBufferSize = delayBufferSize < renderSliceSize ? renderSliceSize : delayBufferSize;
m_preDelayBuffer.SetLength(delayBufferSize);
PodZero(m_preDelayBuffer.Elements(), m_preDelayBuffer.Length());
m_postDelayLength = totalDelay;
}
size_t ReverbConvolverStage::sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
@ -99,101 +77,31 @@ size_t ReverbConvolverStage::sizeOfIncludingThis(mozilla::MallocSizeOf aMallocSi
amount += m_fftConvolver->sizeOfIncludingThis(aMallocSizeOf);
}
amount += m_preDelayBuffer.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += m_temporaryBuffer.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += m_directKernel.ShallowSizeOfExcludingThis(aMallocSizeOf);
if (m_directConvolver) {
amount += m_directConvolver->sizeOfIncludingThis(aMallocSizeOf);
}
return amount;
}
void ReverbConvolverStage::processInBackground(ReverbConvolver* convolver, size_t framesToProcess)
void ReverbConvolverStage::processInBackground(ReverbConvolver* convolver)
{
ReverbInputBuffer* inputBuffer = convolver->inputBuffer();
float* source = inputBuffer->directReadFrom(&m_inputReadIndex, framesToProcess);
process(source, framesToProcess);
float* source = inputBuffer->directReadFrom(&m_inputReadIndex,
WEBAUDIO_BLOCK_SIZE);
process(source);
}
void ReverbConvolverStage::process(const float* source, size_t framesToProcess)
void ReverbConvolverStage::process(const float* source)
{
MOZ_ASSERT(source);
if (!source)
return;
// Deal with pre-delay stream : note special handling of zero delay.
// Now, run the convolution (into the delay buffer).
// An expensive FFT will happen every fftSize / 2 frames.
const float* output = m_fftConvolver->process(m_fftKernel, source);
const float* preDelayedSource;
float* preDelayedDestination;
float* temporaryBuffer;
bool isTemporaryBufferSafe = false;
if (m_preDelayLength > 0) {
// Handles both the read case (call to process() ) and the write case (memcpy() )
bool isPreDelaySafe = m_preReadWriteIndex + framesToProcess <= m_preDelayBuffer.Length();
MOZ_ASSERT(isPreDelaySafe);
if (!isPreDelaySafe)
return;
isTemporaryBufferSafe = framesToProcess <= m_temporaryBuffer.Length();
preDelayedDestination = m_preDelayBuffer.Elements() + m_preReadWriteIndex;
preDelayedSource = preDelayedDestination;
temporaryBuffer = m_temporaryBuffer.Elements();
} else {
// Zero delay
preDelayedDestination = 0;
preDelayedSource = source;
temporaryBuffer = m_preDelayBuffer.Elements();
isTemporaryBufferSafe = framesToProcess <= m_preDelayBuffer.Length();
}
MOZ_ASSERT(isTemporaryBufferSafe);
if (!isTemporaryBufferSafe)
return;
if (m_framesProcessed < m_preDelayLength) {
// For the first m_preDelayLength frames don't process the convolver, instead simply buffer in the pre-delay.
// But while buffering the pre-delay, we still need to update our index.
m_accumulationBuffer->updateReadIndex(&m_accumulationReadIndex, framesToProcess);
} else {
// Now, run the convolution (into the delay buffer).
// An expensive FFT will happen every fftSize / 2 frames.
// We process in-place here...
if (!m_directMode)
m_fftConvolver->process(m_fftKernel, preDelayedSource, temporaryBuffer, framesToProcess);
else
m_directConvolver->process(&m_directKernel, preDelayedSource, temporaryBuffer, framesToProcess);
// Now accumulate into reverb's accumulation buffer.
m_accumulationBuffer->accumulate(temporaryBuffer, framesToProcess, &m_accumulationReadIndex, m_postDelayLength);
}
// Finally copy input to pre-delay.
if (m_preDelayLength > 0) {
memcpy(preDelayedDestination, source, sizeof(float) * framesToProcess);
m_preReadWriteIndex += framesToProcess;
MOZ_ASSERT(m_preReadWriteIndex <= m_preDelayLength);
if (m_preReadWriteIndex >= m_preDelayLength)
m_preReadWriteIndex = 0;
}
m_framesProcessed += framesToProcess;
}
void ReverbConvolverStage::reset()
{
if (!m_directMode)
m_fftConvolver->reset();
else
m_directConvolver->reset();
PodZero(m_preDelayBuffer.Elements(), m_preDelayBuffer.Length());
m_accumulationReadIndex = 0;
m_inputReadIndex = 0;
m_framesProcessed = 0;
// Now accumulate into reverb's accumulation buffer.
m_accumulationBuffer->accumulate(output, WEBAUDIO_BLOCK_SIZE,
&m_accumulationReadIndex,
m_postDelayLength);
}
} // namespace WebCore

View File

@ -29,7 +29,6 @@
#ifndef ReverbConvolverStage_h
#define ReverbConvolverStage_h
#include "DirectConvolver.h"
#include "FFTConvolver.h"
#include "nsTArray.h"
@ -49,14 +48,12 @@ class ReverbConvolverStage {
public:
// renderPhase is useful to know so that we can manipulate the pre versus post delay so that stages will perform
// their heavy work (FFT processing) on different slices to balance the load in a real-time thread.
ReverbConvolverStage(const float* impulseResponse, size_t responseLength, size_t reverbTotalLatency, size_t stageOffset, size_t stageLength, size_t fftSize, size_t renderPhase, size_t renderSliceSize, ReverbAccumulationBuffer*, bool directMode = false);
ReverbConvolverStage(const float* impulseResponse, size_t responseLength, size_t reverbTotalLatency, size_t stageOffset, size_t stageLength, size_t fftSize, size_t renderPhase, ReverbAccumulationBuffer*);
// WARNING: framesToProcess must be such that it evenly divides the delay buffer size (stage_offset).
void process(const float* source, size_t framesToProcess);
// |source| must point to an array of WEBAUDIO_BLOCK_SIZE elements.
void process(const float* source);
void processInBackground(ReverbConvolver* convolver, size_t framesToProcess);
void reset();
void processInBackground(ReverbConvolver* convolver);
// Useful for background processing
int inputReadIndex() const { return m_inputReadIndex; }
@ -67,22 +64,13 @@ private:
nsAutoPtr<FFTBlock> m_fftKernel;
nsAutoPtr<FFTConvolver> m_fftConvolver;
nsTArray<float> m_preDelayBuffer;
ReverbAccumulationBuffer* m_accumulationBuffer;
int m_accumulationReadIndex;
int m_inputReadIndex;
size_t m_preDelayLength;
size_t m_postDelayLength;
size_t m_preReadWriteIndex;
size_t m_framesProcessed;
nsTArray<float> m_temporaryBuffer;
bool m_directMode;
nsTArray<float> m_directKernel;
nsAutoPtr<DirectConvolver> m_directConvolver;
};
} // namespace WebCore

View File

@ -6,7 +6,6 @@
UNIFIED_SOURCES += [
'Biquad.cpp',
'DirectConvolver.cpp',
'DynamicsCompressor.cpp',
'DynamicsCompressorKernel.cpp',
'FFTConvolver.cpp',

View File

@ -12,4 +12,4 @@ skip-if = (!e10s || os != "win")
[browser_tabswitchbetweenplugins.js]
skip-if = (!e10s || os != "win")
[browser_pluginscroll.js]
skip-if = (!e10s || os != "win")
skip-if = (true || !e10s || os != "win") # Bug 1213631

View File

@ -68,9 +68,12 @@ Push.prototype = {
debug("askPermission");
return this.createPromise((resolve, reject) => {
function permissionDenied() {
reject("PermissionDeniedError");
}
let permissionDenied = () => {
reject(new this._window.DOMException(
"User denied permission to use the Push API",
"PermissionDeniedError"
));
};
let permission = Ci.nsIPermissionManager.UNKNOWN_ACTION;
try {
@ -190,7 +193,10 @@ PushEndpointCallback.prototype = {
onPushEndpoint: function(ok, endpoint, keyLen, key) {
let {pushManager} = this;
if (!Components.isSuccessCode(ok)) {
this.reject("AbortError");
this.reject(new pushManager._window.DOMException(
"Error retrieving push subscription",
"AbortError"
));
return;
}

View File

@ -426,12 +426,13 @@ public:
do_CreateInstance("@mozilla.org/push/PushClient;1");
if (!client) {
callback->OnUnsubscribe(NS_ERROR_FAILURE, false);
return NS_OK;
}
nsCOMPtr<nsIPrincipal> principal = mProxy->GetWorkerPrivate()->GetPrincipal();
if (NS_WARN_IF(NS_FAILED(client->Unsubscribe(mScope, principal, callback)))) {
callback->OnUnsubscribe(NS_ERROR_FAILURE, false);
return NS_ERROR_FAILURE;
return NS_OK;
}
return NS_OK;
}
@ -521,7 +522,7 @@ public:
promise->MaybeResolve(sub);
}
} else {
promise->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
promise->MaybeReject(NS_ERROR_DOM_PUSH_ABORT_ERR);
}
mProxy->CleanUp(aCx);
@ -647,7 +648,7 @@ public:
if (NS_WARN_IF(NS_FAILED(rv))) {
callback->OnPushEndpoint(NS_ERROR_FAILURE, EmptyString(), 0, nullptr);
return rv;
return NS_OK;
}
return NS_OK;
@ -677,7 +678,7 @@ WorkerPushManager::PerformSubscriptionAction(SubscriptionAction aAction, ErrorRe
RefPtr<PromiseWorkerProxy> proxy = PromiseWorkerProxy::Create(worker, p);
if (!proxy) {
p->MaybeReject(NS_ERROR_DOM_ABORT_ERR);
p->MaybeReject(NS_ERROR_DOM_PUSH_ABORT_ERR);
return p.forget();
}

View File

@ -49,7 +49,8 @@ http://creativecommons.org/licenses/publicdomain/
yield registration.pushManager.subscribe();
ok(false, "subscribe() should fail because no permission for push");
} catch (error) {
ok(true, "subscribe() could not register for push notification");
ok(error instanceof DOMException, "Wrong exception type");
is(error.name, "PermissionDeniedError", "Wrong exception name");
}
});

View File

@ -21,7 +21,7 @@ typedef SVGGraphicsElement SVGSwitchElementBase;
class SVGSwitchElement final : public SVGSwitchElementBase
{
friend class nsSVGSwitchFrame;
friend class ::nsSVGSwitchFrame;
protected:
friend nsresult (::NS_NewSVGSwitchElement(nsIContent **aResult,
already_AddRefed<mozilla::dom::NodeInfo>&& aNodeInfo));

View File

@ -66,9 +66,9 @@ UNIFIED_SOURCES += [
]
if CONFIG['ANDROID_VERSION'] >= '17':
CXXFLAGS += ['-I%s/frameworks/av/media/mtp' % CONFIG['ANDROID_SOURCE']]
LOCAL_INCLUDES += ['%' + '%s/frameworks/av/media/mtp' % CONFIG['ANDROID_SOURCE']]
else:
CXXFLAGS += ['-I%s/frameworks/base/media/mtp' % CONFIG['ANDROID_SOURCE']]
LOCAL_INCLUDES += ['%' + '%s/frameworks/base/media/mtp' % CONFIG['ANDROID_SOURCE']]
if CONFIG['ENABLE_TESTS']:
XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell.ini']

View File

@ -129,7 +129,7 @@ if CONFIG['GNU_CXX']:
]
if CONFIG['MOZ_DIRECTX_SDK_PATH'] and not CONFIG['MOZ_HAS_WINSDK_WITH_D3D']:
CXXFLAGS += ['-I\'%s/include/\'' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
LOCAL_INCLUDES += ['%' + '%s/include/' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True
DEFINES['_HAS_EXCEPTIONS'] = 0

View File

@ -277,7 +277,7 @@ if CONFIG['GNU_CXX']:
]
if CONFIG['MOZ_DIRECTX_SDK_PATH'] and not CONFIG['MOZ_HAS_WINSDK_WITH_D3D']:
CXXFLAGS += ['-I\'%s/include/\'' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
LOCAL_INCLUDES += ['%' + '%s/include/' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True
DEFINES['_HAS_EXCEPTIONS'] = 0

View File

@ -33,7 +33,7 @@ if CONFIG['GNU_CXX']:
]
if CONFIG['MOZ_DIRECTX_SDK_PATH'] and not CONFIG['MOZ_HAS_WINSDK_WITH_D3D']:
CXXFLAGS += ['-I\'%s/include/\'' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
LOCAL_INCLUDES += ['%' + '%s/include/' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True
DEFINES['_HAS_EXCEPTIONS'] = 0

View File

@ -39,7 +39,7 @@ if CONFIG['GNU_CXX']:
]
if CONFIG['MOZ_DIRECTX_SDK_PATH'] and not CONFIG['MOZ_HAS_WINSDK_WITH_D3D']:
CXXFLAGS += ['-I\'%s/include/\'' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
LOCAL_INCLUDES += ['%' + '%s/include/' % CONFIG['MOZ_DIRECTX_SDK_PATH']]
DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True
DEFINES['_HAS_EXCEPTIONS'] = 0

View File

@ -9,8 +9,7 @@
#include "GLDefs.h"
#include "mozilla/gfx/Types.h"
#include "nsPoint.h"
class nsIntRegion;
#include "nsRegionFwd.h"
namespace mozilla {

View File

@ -86,7 +86,7 @@ if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gonk':
UNIFIED_SOURCES += ['SharedSurfaceGralloc.cpp']
EXPORTS += ['SharedSurfaceGralloc.h']
LOCAL_INCLUDES += ['/widget/gonk']
CXXFLAGS += ['-I%s/%s' % (CONFIG['ANDROID_SOURCE'], 'hardware/libhardware/include')]
LOCAL_INCLUDES += ['%' + '%s/%s' % (CONFIG['ANDROID_SOURCE'], 'hardware/libhardware/include')]
if gl_provider == 'CGL':
# These files include Mac headers that are unfriendly to unified builds

View File

@ -106,7 +106,6 @@
*/
class nsIWidget;
class nsIntRegion;
namespace mozilla {
namespace gfx {

View File

@ -8,9 +8,8 @@
#include <stdint.h> // for uint64_t
#include "nsRect.h" // for mozilla::gfx::IntRect
#include "nsRegionFwd.h" // for nsIntRegion
#include "nsTArray.h" // for nsTArray
class nsIntRegion;
namespace mozilla {
namespace layers {

View File

@ -161,8 +161,14 @@ SetDisplayPortMargins(nsIPresShell* aPresShell,
return;
}
bool hadDisplayPort = nsLayoutUtils::GetDisplayPort(aContent);
ScreenMargin margins = aMetrics.GetDisplayPortMargins();
nsLayoutUtils::SetDisplayPortMargins(aContent, aPresShell, margins, 0);
if (!hadDisplayPort) {
nsLayoutUtils::SetZeroMarginDisplayPortOnAsyncScrollableAncestors(
aContent->GetPrimaryFrame(), nsLayoutUtils::RepaintMode::Repaint);
}
CSSRect baseCSS = aMetrics.CalculateCompositedRectInCssPixels();
nsRect base(0, 0,
baseCSS.width * nsPresContext::AppUnitsPerCSSPixel(),
@ -237,6 +243,7 @@ APZCCallbackHelper::UpdateRootFrame(FrameMetrics& aMetrics)
// adjusts the display port margins, so do it before we set those.
ScrollFrame(content, aMetrics);
MOZ_ASSERT(nsLayoutUtils::GetDisplayPort(content));
SetDisplayPortMargins(shell, content, aMetrics);
}
@ -304,6 +311,8 @@ APZCCallbackHelper::InitializeRootDisplayport(nsIPresShell* aPresShell)
// nsRootBoxFrame::BuildDisplayList.
nsLayoutUtils::SetDisplayPortMargins(content, aPresShell, ScreenMargin(), 0,
nsLayoutUtils::RepaintMode::DoNotRepaint);
nsLayoutUtils::SetZeroMarginDisplayPortOnAsyncScrollableAncestors(
content->GetPrimaryFrame(), nsLayoutUtils::RepaintMode::DoNotRepaint);
}
}
@ -551,15 +560,6 @@ APZCCallbackHelper::FireSingleTapEvent(const LayoutDevicePoint& aPoint,
DispatchSynthesizedMouseEvent(eMouseUp, time, aPoint, aModifiers, aWidget);
}
static nsIScrollableFrame*
GetScrollableAncestorFrame(nsIFrame* aTarget)
{
uint32_t flags = nsLayoutUtils::SCROLLABLE_ALWAYS_MATCH_ROOT
| nsLayoutUtils::SCROLLABLE_ONLY_ASYNC_SCROLLABLE
| nsLayoutUtils::SCROLLABLE_FIXEDPOS_FINDS_ROOT;
return nsLayoutUtils::GetNearestScrollableFrame(aTarget, flags);
}
static dom::Element*
GetDisplayportElementFor(nsIScrollableFrame* aScrollableFrame)
{
@ -610,7 +610,7 @@ PrepareForSetTargetAPZCNotification(nsIWidget* aWidget,
nsIFrame* target =
nsLayoutUtils::GetFrameForPoint(aRootFrame, point, nsLayoutUtils::IGNORE_ROOT_SCROLL_FRAME);
nsIScrollableFrame* scrollAncestor = target
? GetScrollableAncestorFrame(target)
? nsLayoutUtils::GetAsyncScrollableAncestorFrame(target)
: aRootFrame->PresContext()->PresShell()->GetRootScrollFrameAsScrollable();
// Assuming that if there's no scrollAncestor, there's already a displayPort.
@ -651,8 +651,17 @@ PrepareForSetTargetAPZCNotification(nsIWidget* aWidget,
}
APZCCH_LOG("%p didn't have a displayport, so setting one...\n", dpElement.get());
return nsLayoutUtils::CalculateAndSetDisplayPortMargins(
bool activated = nsLayoutUtils::CalculateAndSetDisplayPortMargins(
scrollAncestor, nsLayoutUtils::RepaintMode::Repaint);
if (!activated) {
return false;
}
nsIFrame* frame = do_QueryFrame(scrollAncestor);
nsLayoutUtils::SetZeroMarginDisplayPortOnAsyncScrollableAncestors(frame,
nsLayoutUtils::RepaintMode::Repaint);
return true;
}
static void

View File

@ -17,8 +17,6 @@
#include "nsPrintfCString.h" // for nsPrintfCString
#include "nsString.h" // for nsAutoCString
class nsIntRegion;
#define BIAS_TIME_MS 1.0
namespace mozilla {

Some files were not shown because too many files have changed in this diff Show More