mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Merge m-i to m-c, a=merge
This commit is contained in:
commit
accaa766dd
@ -26,7 +26,7 @@ function getDefaultSettings() {
|
||||
let chan = NetUtil.newChannel({
|
||||
uri: NetUtil.newURI(settingsFile),
|
||||
loadUsingSystemPrincipal: true});
|
||||
let stream = chan.open();
|
||||
let stream = chan.open2();
|
||||
// Obtain a converter to read from a UTF-8 encoded input stream.
|
||||
let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"]
|
||||
.createInstance(Ci.nsIScriptableUnicodeConverter);
|
||||
|
@ -28,6 +28,7 @@ PostMessageEvent::PostMessageEvent(nsGlobalWindow* aSource,
|
||||
const nsAString& aCallerOrigin,
|
||||
nsGlobalWindow* aTargetWindow,
|
||||
nsIPrincipal* aProvidedPrincipal,
|
||||
nsIDocument* aSourceDocument,
|
||||
bool aTrustedCaller)
|
||||
: StructuredCloneHolder(CloningSupported, TransferringSupported,
|
||||
SameProcessSameThread),
|
||||
@ -35,6 +36,7 @@ PostMessageEvent::PostMessageEvent(nsGlobalWindow* aSource,
|
||||
mCallerOrigin(aCallerOrigin),
|
||||
mTargetWindow(aTargetWindow),
|
||||
mProvidedPrincipal(aProvidedPrincipal),
|
||||
mSourceDocument(aSourceDocument),
|
||||
mTrustedCaller(aTrustedCaller)
|
||||
{
|
||||
MOZ_COUNT_CTOR(PostMessageEvent);
|
||||
@ -57,6 +59,12 @@ PostMessageEvent::Run()
|
||||
jsapi.Init();
|
||||
JSContext* cx = jsapi.cx();
|
||||
|
||||
// The document is just used for the principal mismatch error message below.
|
||||
// Use a stack variable so mSourceDocument is not held onto after this method
|
||||
// finishes, regardless of the method outcome.
|
||||
nsCOMPtr<nsIDocument> sourceDocument;
|
||||
sourceDocument.swap(mSourceDocument);
|
||||
|
||||
// If we bailed before this point we're going to leak mMessage, but
|
||||
// that's probably better than crashing.
|
||||
|
||||
@ -92,6 +100,20 @@ PostMessageEvent::Run()
|
||||
// now. Long-term, we want HTML5 to address this so that we can
|
||||
// be compliant while being safer.
|
||||
if (!targetPrin->Equals(mProvidedPrincipal)) {
|
||||
nsAutoString providedOrigin, targetOrigin;
|
||||
nsresult rv = nsContentUtils::GetUTFOrigin(targetPrin, targetOrigin);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
rv = nsContentUtils::GetUTFOrigin(mProvidedPrincipal, providedOrigin);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
const char16_t* params[] = { providedOrigin.get(), targetOrigin.get() };
|
||||
|
||||
nsContentUtils::ReportToConsole(nsIScriptError::errorFlag,
|
||||
NS_LITERAL_CSTRING("DOM Window"), sourceDocument,
|
||||
nsContentUtils::eDOM_PROPERTIES,
|
||||
"TargetPrincipalDoesNotMatch",
|
||||
params, ArrayLength(params));
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
}
|
||||
|
@ -34,6 +34,7 @@ public:
|
||||
const nsAString& aCallerOrigin,
|
||||
nsGlobalWindow* aTargetWindow,
|
||||
nsIPrincipal* aProvidedPrincipal,
|
||||
nsIDocument* aSourceDocument,
|
||||
bool aTrustedCaller);
|
||||
|
||||
private:
|
||||
@ -43,6 +44,7 @@ private:
|
||||
nsString mCallerOrigin;
|
||||
RefPtr<nsGlobalWindow> mTargetWindow;
|
||||
nsCOMPtr<nsIPrincipal> mProvidedPrincipal;
|
||||
nsCOMPtr<nsIDocument> mSourceDocument;
|
||||
bool mTrustedCaller;
|
||||
};
|
||||
|
||||
|
@ -106,6 +106,7 @@
|
||||
#include "HTMLImageElement.h"
|
||||
#include "mozilla/css/ImageLoader.h"
|
||||
#include "mozilla/layers/APZCTreeManager.h" // for layers::ZoomToRectBehavior
|
||||
#include "mozilla/dom/Promise.h"
|
||||
|
||||
#ifdef XP_WIN
|
||||
#undef GetClassName
|
||||
@ -811,7 +812,7 @@ nsDOMWindowUtils::SendWheelEvent(float aX,
|
||||
|
||||
wheelEvent.refPoint = nsContentUtils::ToWidgetPoint(CSSPoint(aX, aY), offset, presContext);
|
||||
|
||||
widget->DispatchAPZAwareEvent(&wheelEvent);
|
||||
widget->DispatchInputEvent(&wheelEvent);
|
||||
|
||||
if (widget->AsyncPanZoomEnabled()) {
|
||||
// Computing overflow deltas is not compatible with APZ, so if APZ is
|
||||
@ -2150,26 +2151,29 @@ nsDOMWindowUtils::GetLayerManagerRemote(bool* retval)
|
||||
}
|
||||
|
||||
NS_IMETHODIMP
|
||||
nsDOMWindowUtils::GetSupportsHardwareH264Decoding(nsAString& aRetval)
|
||||
nsDOMWindowUtils::GetSupportsHardwareH264Decoding(JS::MutableHandle<JS::Value> aPromise)
|
||||
{
|
||||
nsCOMPtr<nsPIDOMWindow> window = do_QueryReferent(mWindow);
|
||||
NS_ENSURE_STATE(window);
|
||||
nsCOMPtr<nsIGlobalObject> parentObject = do_QueryInterface(window);
|
||||
NS_ENSURE_STATE(parentObject);
|
||||
#ifdef MOZ_FMP4
|
||||
nsCOMPtr<nsIWidget> widget = GetWidget();
|
||||
if (!widget)
|
||||
return NS_ERROR_FAILURE;
|
||||
|
||||
NS_ENSURE_STATE(widget);
|
||||
LayerManager *mgr = widget->GetLayerManager();
|
||||
if (!mgr)
|
||||
return NS_ERROR_FAILURE;
|
||||
|
||||
nsCString failureReason;
|
||||
if (MP4Decoder::IsVideoAccelerated(mgr->GetCompositorBackendType(), failureReason)) {
|
||||
aRetval.AssignLiteral("Yes");
|
||||
} else {
|
||||
aRetval.AssignLiteral("No; ");
|
||||
AppendUTF8toUTF16(failureReason, aRetval);
|
||||
}
|
||||
NS_ENSURE_STATE(mgr);
|
||||
RefPtr<Promise> promise =
|
||||
MP4Decoder::IsVideoAccelerated(mgr->GetCompositorBackendType(), parentObject);
|
||||
NS_ENSURE_STATE(promise);
|
||||
aPromise.setObject(*promise->GetWrapper());
|
||||
#else
|
||||
aRetval.AssignLiteral("No; Compiled without MP4 support.");
|
||||
ErrorResult rv;
|
||||
RefPtr<Promise> promise = Promise::Create(parentObject, rv);
|
||||
if (rv.Failed()) {
|
||||
return rv.StealNSResult();
|
||||
}
|
||||
promise.MaybeResolve(NS_LITERAL_STRING("No; Compiled without MP4 support."));
|
||||
aPromise.setObject(*promise->GetWrapper());
|
||||
#endif
|
||||
return NS_OK;
|
||||
}
|
||||
|
@ -7969,6 +7969,9 @@ nsGlobalWindow::PostMessageMozOuter(JSContext* aCx, JS::Handle<JS::Value> aMessa
|
||||
origin,
|
||||
this,
|
||||
providedPrincipal,
|
||||
callerInnerWin
|
||||
? callerInnerWin->GetDoc()
|
||||
: nullptr,
|
||||
nsContentUtils::IsCallerChrome());
|
||||
|
||||
JS::Rooted<JS::Value> message(aCx, aMessage);
|
||||
|
@ -4,7 +4,7 @@
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
Components.utils.import("resource://testing-common/httpd.js");
|
||||
Components.utils.import("resource://gre/modules/Services.jsm");
|
||||
Components.utils.import("resource://gre/modules/NetUtil.jsm");
|
||||
|
||||
const nsIDocumentEncoder = Components.interfaces.nsIDocumentEncoder;
|
||||
const replacementChar = Components.interfaces.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER;
|
||||
@ -17,23 +17,21 @@ function loadContentFile(aFile, aCharset) {
|
||||
var file = do_get_file(aFile);
|
||||
var ios = Components.classes['@mozilla.org/network/io-service;1']
|
||||
.getService(Components.interfaces.nsIIOService);
|
||||
var chann = ios.newChannelFromURI2(ios.newFileURI(file),
|
||||
null, // aLoadingNode
|
||||
Services.scriptSecurityManager.getSystemPrincipal(),
|
||||
null, // aTriggeringPrincipal
|
||||
Components.interfaces.nsILoadInfo.SEC_NORMAL,
|
||||
Components.interfaces.nsIContentPolicy.TYPE_OTHER);
|
||||
var chann = NetUtil.newChannel({
|
||||
uri: ios.newFileURI(file),
|
||||
loadUsingSystemPrincipal: true
|
||||
});
|
||||
chann.contentCharset = aCharset;
|
||||
|
||||
/*var inputStream = Components.classes["@mozilla.org/scriptableinputstream;1"]
|
||||
.createInstance(Components.interfaces.nsIScriptableInputStream);
|
||||
inputStream.init(chann.open());
|
||||
inputStream.init(chann.open2());
|
||||
return inputStream.read(file.fileSize);
|
||||
*/
|
||||
|
||||
var inputStream = Components.classes["@mozilla.org/intl/converter-input-stream;1"]
|
||||
.createInstance(Components.interfaces.nsIConverterInputStream);
|
||||
inputStream.init(chann.open(), aCharset, 1024, replacementChar);
|
||||
inputStream.init(chann.open2(), aCharset, 1024, replacementChar);
|
||||
var str = {}, content = '';
|
||||
while (inputStream.readString(4096, str) != 0) {
|
||||
content += str.value;
|
||||
|
@ -1036,10 +1036,7 @@ class CGHeaders(CGWrapper):
|
||||
headerSet.add("mozilla/dom/Nullable.h")
|
||||
unrolled = t.unroll()
|
||||
if unrolled.isUnion():
|
||||
if len(config.filenamesPerUnion[unrolled.name]) > 1:
|
||||
headerSet.add("mozilla/dom/UnionTypes.h")
|
||||
else:
|
||||
headerSet.add(self.getDeclarationFilename(unrolled))
|
||||
headerSet.add(self.getUnionDeclarationFilename(config, unrolled))
|
||||
bindingHeaders.add("mozilla/dom/UnionConversions.h")
|
||||
elif unrolled.isDate():
|
||||
if dictionary or jsImplementedDescriptors:
|
||||
@ -1195,6 +1192,20 @@ class CGHeaders(CGWrapper):
|
||||
basename = os.path.basename(decl.filename())
|
||||
return basename.replace('.webidl', 'Binding.h')
|
||||
|
||||
@staticmethod
|
||||
def getUnionDeclarationFilename(config, unionType):
|
||||
assert unionType.isUnion()
|
||||
assert unionType.unroll() == unionType
|
||||
# If a union is "defined" in multiple files, it goes in UnionTypes.h.
|
||||
if len(config.filenamesPerUnion[unionType.name]) > 1:
|
||||
return "mozilla/dom/UnionTypes.h"
|
||||
# If a union is defined by a built-in typedef, it also goes in
|
||||
# UnionTypes.h.
|
||||
assert len(config.filenamesPerUnion[unionType.name]) == 1
|
||||
if "<unknown>" in config.filenamesPerUnion[unionType.name]:
|
||||
return "mozilla/dom/UnionTypes.h"
|
||||
return CGHeaders.getDeclarationFilename(unionType)
|
||||
|
||||
|
||||
def SortedDictValues(d):
|
||||
"""
|
||||
@ -1290,10 +1301,7 @@ def UnionTypes(unionTypes, config):
|
||||
# And add headers for the type we're parametrized over
|
||||
addHeadersForType(f.inner)
|
||||
|
||||
if len(config.filenamesPerUnion[t.name]) > 1:
|
||||
implheaders.add("mozilla/dom/UnionTypes.h")
|
||||
else:
|
||||
implheaders.add(CGHeaders.getDeclarationFilename(t))
|
||||
implheaders.add(CGHeaders.getUnionDeclarationFilename(config, t))
|
||||
for f in t.flatMemberTypes:
|
||||
assert not f.nullable()
|
||||
addHeadersForType(f)
|
||||
@ -1356,8 +1364,9 @@ def UnionConversions(unionTypes, config):
|
||||
# And the internal type of the MozMap
|
||||
addHeadersForType(f.inner, providers)
|
||||
|
||||
if len(config.filenamesPerUnion[t.name]) == 1:
|
||||
headers.add(CGHeaders.getDeclarationFilename(t))
|
||||
# We plan to include UnionTypes.h no matter what, so it's
|
||||
# OK if we throw it into the set here.
|
||||
headers.add(CGHeaders.getUnionDeclarationFilename(config, t))
|
||||
|
||||
for f in t.flatMemberTypes:
|
||||
addHeadersForType(f, providers)
|
||||
|
@ -150,7 +150,15 @@ class Configuration:
|
||||
if t.isUnion():
|
||||
filenamesForUnion = self.filenamesPerUnion[t.name]
|
||||
if t.filename() not in filenamesForUnion:
|
||||
if len(filenamesForUnion) == 0:
|
||||
# We have a to be a bit careful: some of our built-in
|
||||
# typedefs are for unions, and those unions end up with
|
||||
# "<unknown>" as the filename. If that happens, we don't
|
||||
# want to try associating this union with one particular
|
||||
# filename, since there isn't one to associate it with,
|
||||
# really.
|
||||
if t.filename() == "<unknown>":
|
||||
uniqueFilenameForUnion = None
|
||||
elif len(filenamesForUnion) == 0:
|
||||
# This is the first file that we found a union with this
|
||||
# name in, record the union as part of the file.
|
||||
uniqueFilenameForUnion = t.filename()
|
||||
|
@ -159,6 +159,7 @@ CreateException(JSContext* aCx, nsresult aRv, const nsACString& aMessage)
|
||||
case NS_ERROR_MODULE_DOM_FILEHANDLE:
|
||||
case NS_ERROR_MODULE_DOM_BLUETOOTH:
|
||||
case NS_ERROR_MODULE_DOM_ANIM:
|
||||
case NS_ERROR_MODULE_DOM_PUSH:
|
||||
if (aMessage.IsEmpty()) {
|
||||
return DOMException::Create(aRv);
|
||||
}
|
||||
|
@ -862,12 +862,12 @@ CheckDBusReply(DBusMessage* aMsg, void* aServiceClass, bool aConnect)
|
||||
nsAutoString replyError;
|
||||
UnpackVoidMessage(aMsg, nullptr, v, replyError);
|
||||
|
||||
nsAutoPtr<BluetoothServiceClass> serviceClass(
|
||||
static_cast<BluetoothServiceClass*>(aServiceClass));
|
||||
BluetoothServiceClass serviceClass =
|
||||
static_cast<BluetoothServiceClass>(NS_PTR_TO_INT32(aServiceClass));
|
||||
|
||||
if (!replyError.IsEmpty()) {
|
||||
NS_DispatchToMainThread(
|
||||
new ReplyErrorToProfileManager(*serviceClass, aConnect, replyError));
|
||||
new ReplyErrorToProfileManager(serviceClass, aConnect, replyError));
|
||||
}
|
||||
}
|
||||
|
||||
@ -2592,7 +2592,7 @@ class SendAsyncDBusMessageTask : public Task
|
||||
{
|
||||
public:
|
||||
SendAsyncDBusMessageTask(DBusReplyCallback aCallback,
|
||||
BluetoothServiceClass* aServiceClass,
|
||||
BluetoothServiceClass aServiceClass,
|
||||
const nsACString& aObjectPath,
|
||||
const char* aInterface,
|
||||
const nsACString& aMessage)
|
||||
@ -2602,7 +2602,6 @@ public:
|
||||
, mInterface(aInterface)
|
||||
, mMessage(aMessage)
|
||||
{
|
||||
MOZ_ASSERT(mServiceClass);
|
||||
MOZ_ASSERT(!mObjectPath.IsEmpty());
|
||||
MOZ_ASSERT(!mInterface.IsEmpty());
|
||||
MOZ_ASSERT(!mMessage.IsEmpty());
|
||||
@ -2613,18 +2612,18 @@ public:
|
||||
MOZ_ASSERT(!NS_IsMainThread()); // I/O thread
|
||||
MOZ_ASSERT(sDBusConnection);
|
||||
|
||||
static_assert(sizeof(BluetoothServiceClass) <= sizeof(intptr_t),
|
||||
"BluetoothServiceClass cannot be passed via intptr_t");
|
||||
bool success = sDBusConnection->SendWithReply(
|
||||
mCallback, static_cast<void*>(mServiceClass), -1,
|
||||
mCallback, NS_INT32_TO_PTR(mServiceClass), -1,
|
||||
BLUEZ_DBUS_BASE_IFC, mObjectPath.get(), mInterface.get(),
|
||||
mMessage.get(), DBUS_TYPE_INVALID);
|
||||
NS_ENSURE_TRUE_VOID(success);
|
||||
|
||||
mServiceClass.forget();
|
||||
}
|
||||
|
||||
private:
|
||||
DBusReplyCallback mCallback;
|
||||
nsAutoPtr<BluetoothServiceClass> mServiceClass;
|
||||
BluetoothServiceClass mServiceClass;
|
||||
const nsCString mObjectPath;
|
||||
const nsCString mInterface;
|
||||
const nsCString mMessage;
|
||||
@ -2642,18 +2641,18 @@ BluetoothDBusService::SendAsyncDBusMessage(const nsAString& aObjectPath,
|
||||
MOZ_ASSERT(!aObjectPath.IsEmpty());
|
||||
MOZ_ASSERT(aInterface);
|
||||
|
||||
nsAutoPtr<BluetoothServiceClass> serviceClass(new BluetoothServiceClass());
|
||||
BluetoothServiceClass serviceClass;
|
||||
if (!strcmp(aInterface, DBUS_SINK_IFACE)) {
|
||||
*serviceClass = BluetoothServiceClass::A2DP;
|
||||
serviceClass = BluetoothServiceClass::A2DP;
|
||||
} else if (!strcmp(aInterface, DBUS_INPUT_IFACE)) {
|
||||
*serviceClass = BluetoothServiceClass::HID;
|
||||
serviceClass = BluetoothServiceClass::HID;
|
||||
} else {
|
||||
MOZ_ASSERT(false);
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
|
||||
Task* task = new SendAsyncDBusMessageTask(aCallback,
|
||||
serviceClass.forget(),
|
||||
serviceClass,
|
||||
NS_ConvertUTF16toUTF8(aObjectPath),
|
||||
aInterface,
|
||||
NS_ConvertUTF16toUTF8(aMessage));
|
||||
|
@ -128,7 +128,7 @@ ContactDB.prototype = {
|
||||
uri: NetUtil.newURI(contactsFile),
|
||||
loadUsingSystemPrincipal: true});
|
||||
|
||||
let stream = chan.open();
|
||||
let stream = chan.open2();
|
||||
// Obtain a converter to read from a UTF-8 encoded input stream.
|
||||
let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"]
|
||||
.createInstance(Ci.nsIScriptableUnicodeConverter);
|
||||
|
@ -49,7 +49,7 @@ interface nsIJSRAIIHelper;
|
||||
interface nsIContentPermissionRequest;
|
||||
interface nsIObserver;
|
||||
|
||||
[scriptable, uuid(ca6a458c-82e7-4979-886e-6d214eac6f0b)]
|
||||
[scriptable, uuid(46b44e33-13c2-4eb3-bf80-76a4e0857ccc)]
|
||||
interface nsIDOMWindowUtils : nsISupports {
|
||||
|
||||
/**
|
||||
@ -1334,11 +1334,12 @@ interface nsIDOMWindowUtils : nsISupports {
|
||||
readonly attribute boolean layerManagerRemote;
|
||||
|
||||
/**
|
||||
* True if we can initialize a hardware-backed h264 decoder for a simple
|
||||
* test video, does not mean that all h264 video decoding will be done
|
||||
* Returns a Promise that will be resolved with a string once the capabilities
|
||||
* of the h264 decoder have been determined.
|
||||
* Success does not mean that all h264 video decoding will be done
|
||||
* in hardware.
|
||||
*/
|
||||
readonly attribute AString supportsHardwareH264Decoding;
|
||||
readonly attribute jsval supportsHardwareH264Decoding;
|
||||
|
||||
/**
|
||||
* Record (and return) frame-intervals for frames which were presented
|
||||
|
@ -1795,35 +1795,37 @@ TabChild::RecvMouseEvent(const nsString& aType,
|
||||
}
|
||||
|
||||
bool
|
||||
TabChild::RecvRealMouseMoveEvent(const WidgetMouseEvent& event,
|
||||
TabChild::RecvRealMouseMoveEvent(const WidgetMouseEvent& aEvent,
|
||||
const ScrollableLayerGuid& aGuid,
|
||||
const uint64_t& aInputBlockId)
|
||||
{
|
||||
return RecvRealMouseButtonEvent(event, aGuid, aInputBlockId);
|
||||
return RecvRealMouseButtonEvent(aEvent, aGuid, aInputBlockId);
|
||||
}
|
||||
|
||||
bool
|
||||
TabChild::RecvSynthMouseMoveEvent(const WidgetMouseEvent& event,
|
||||
TabChild::RecvSynthMouseMoveEvent(const WidgetMouseEvent& aEvent,
|
||||
const ScrollableLayerGuid& aGuid,
|
||||
const uint64_t& aInputBlockId)
|
||||
{
|
||||
return RecvRealMouseButtonEvent(event, aGuid, aInputBlockId);
|
||||
return RecvRealMouseButtonEvent(aEvent, aGuid, aInputBlockId);
|
||||
}
|
||||
|
||||
bool
|
||||
TabChild::RecvRealMouseButtonEvent(const WidgetMouseEvent& event,
|
||||
TabChild::RecvRealMouseButtonEvent(const WidgetMouseEvent& aEvent,
|
||||
const ScrollableLayerGuid& aGuid,
|
||||
const uint64_t& aInputBlockId)
|
||||
{
|
||||
nsEventStatus unused;
|
||||
InputAPZContext context(aGuid, aInputBlockId, unused);
|
||||
|
||||
WidgetMouseEvent localEvent(event);
|
||||
WidgetMouseEvent localEvent(aEvent);
|
||||
localEvent.widget = mPuppetWidget;
|
||||
APZCCallbackHelper::ApplyCallbackTransform(localEvent, aGuid,
|
||||
mPuppetWidget->GetDefaultScale());
|
||||
APZCCallbackHelper::DispatchWidgetEvent(localEvent);
|
||||
|
||||
if (event.mFlags.mHandledByAPZ) {
|
||||
mAPZEventState->ProcessMouseEvent(event, aGuid, aInputBlockId);
|
||||
if (aEvent.mFlags.mHandledByAPZ) {
|
||||
mAPZEventState->ProcessMouseEvent(aEvent, aGuid, aInputBlockId);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -1839,16 +1841,18 @@ TabChild::RecvMouseWheelEvent(const WidgetWheelEvent& aEvent,
|
||||
mPuppetWidget, document, aEvent, aGuid, aInputBlockId);
|
||||
}
|
||||
|
||||
WidgetWheelEvent event(aEvent);
|
||||
event.widget = mPuppetWidget;
|
||||
APZCCallbackHelper::DispatchWidgetEvent(event);
|
||||
WidgetWheelEvent localEvent(aEvent);
|
||||
localEvent.widget = mPuppetWidget;
|
||||
APZCCallbackHelper::ApplyCallbackTransform(localEvent, aGuid,
|
||||
mPuppetWidget->GetDefaultScale());
|
||||
APZCCallbackHelper::DispatchWidgetEvent(localEvent);
|
||||
|
||||
if (event.mCanTriggerSwipe) {
|
||||
SendRespondStartSwipeEvent(aInputBlockId, event.TriggersSwipe());
|
||||
if (localEvent.mCanTriggerSwipe) {
|
||||
SendRespondStartSwipeEvent(aInputBlockId, localEvent.TriggersSwipe());
|
||||
}
|
||||
|
||||
if (aEvent.mFlags.mHandledByAPZ) {
|
||||
mAPZEventState->ProcessWheelEvent(event, aGuid, aInputBlockId);
|
||||
mAPZEventState->ProcessWheelEvent(localEvent, aGuid, aInputBlockId);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -1450,7 +1450,7 @@ bool TabParent::RecvDispatchWheelEvent(const mozilla::WidgetWheelEvent& aEvent)
|
||||
localEvent.widget = widget;
|
||||
localEvent.refPoint -= GetChildProcessOffset();
|
||||
|
||||
widget->DispatchAPZAwareEvent(&localEvent);
|
||||
widget->DispatchInputEvent(&localEvent);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -190,3 +190,5 @@ InterceptionRejectedResponseWithURL=Failed to load '%1$S'. A ServiceWorker passe
|
||||
InterceptedNonResponseWithURL=Failed to load '%1$S'. A ServiceWorker passed a promise to FetchEvent.respondWith() that resolved with non-Response value '%2$S'.
|
||||
ExecCommandCutCopyDeniedNotInputDriven=document.execCommand('cut'/'copy') was denied because it was not called from inside a short running user-generated event handler.
|
||||
PatternAttributeCompileFailure=Unable to check <input pattern='%S'> because the pattern is not a valid regexp: %S
|
||||
# LOCALIZATION NOTE: Do not translate "postMessage" or DOMWindow. %S values are origins, like https://domain.com:port
|
||||
TargetPrincipalDoesNotMatch=Failed to execute 'postMessage' on 'DOMWindow': The target origin provided ('%S') does not match the recipient window's origin ('%S').
|
||||
|
@ -8,7 +8,7 @@
|
||||
|
||||
#include <mozilla/PodOperations.h>
|
||||
#include <mozilla/Assertions.h>
|
||||
#include <nsAutoPtr.h>
|
||||
#include <mozilla/UniquePtr.h>
|
||||
#include <AudioSampleFormat.h>
|
||||
|
||||
// Enable this to warn when `Output` has been called but not enough data was
|
||||
@ -62,8 +62,8 @@ public:
|
||||
// the exact right size in order to not waste space.
|
||||
uint32_t newLength = AvailableSamples() + inputSamples;
|
||||
uint32_t toCopy = AvailableSamples();
|
||||
nsAutoPtr<InputType> oldStorage = mStorage;
|
||||
mStorage = new InputType[newLength];
|
||||
UniquePtr<InputType[]> oldStorage = mozilla::Move(mStorage);
|
||||
mStorage = mozilla::MakeUnique<InputType[]>(newLength);
|
||||
// Copy the old data at the beginning of the new storage.
|
||||
if (WriteIndex() >= ReadIndex()) {
|
||||
PodCopy(mStorage.get(),
|
||||
@ -186,7 +186,7 @@ private:
|
||||
uint64_t mReadIndex;
|
||||
uint64_t mWriteIndex;
|
||||
// Storage for the samples
|
||||
nsAutoPtr<InputType> mStorage;
|
||||
mozilla::UniquePtr<InputType[]> mStorage;
|
||||
// Length of the buffer, in samples
|
||||
uint32_t mLength;
|
||||
};
|
||||
|
@ -182,23 +182,46 @@ MP4Decoder::IsEnabled()
|
||||
return Preferences::GetBool("media.mp4.enabled");
|
||||
}
|
||||
|
||||
// sTestH264ExtraData represents the content of the avcC atom found in
|
||||
// an AVC1 h264 video. It contains the H264 SPS and PPS NAL.
|
||||
// the structure of the avcC atom is as follow:
|
||||
// write(0x1); // version, always 1
|
||||
// write(sps[0].data[1]); // profile
|
||||
// write(sps[0].data[2]); // compatibility
|
||||
// write(sps[0].data[3]); // level
|
||||
// write(0xFC | 3); // reserved (6 bits), NULA length size - 1 (2 bits)
|
||||
// write(0xE0 | 1); // reserved (3 bits), num of SPS (5 bits)
|
||||
// write_word(sps[0].size); // 2 bytes for length of SPS
|
||||
// for(size_t i=0 ; i < sps[0].size ; ++i)
|
||||
// write(sps[0].data[i]); // data of SPS
|
||||
// write(&b, pps.size()); // num of PPS
|
||||
// for(size_t i=0 ; i < pps.size() ; ++i) {
|
||||
// write_word(pps[i].size); // 2 bytes for length of PPS
|
||||
// for(size_t j=0 ; j < pps[i].size ; ++j)
|
||||
// write(pps[i].data[j]); // data of PPS
|
||||
// }
|
||||
// }
|
||||
// here we have a h264 Baseline, 640x360
|
||||
// We use a 640x360 extradata, as some video framework (Apple VT) will never
|
||||
// attempt to use hardware decoding for small videos.
|
||||
static const uint8_t sTestH264ExtraData[] = {
|
||||
0x01, 0x64, 0x00, 0x0a, 0xff, 0xe1, 0x00, 0x17, 0x67, 0x64,
|
||||
0x00, 0x0a, 0xac, 0xd9, 0x44, 0x26, 0x84, 0x00, 0x00, 0x03,
|
||||
0x00, 0x04, 0x00, 0x00, 0x03, 0x00, 0xc8, 0x3c, 0x48, 0x96,
|
||||
0x58, 0x01, 0x00, 0x06, 0x68, 0xeb, 0xe3, 0xcb, 0x22, 0xc0
|
||||
0x01, 0x42, 0xc0, 0x1e, 0xff, 0xe1, 0x00, 0x17, 0x67, 0x42,
|
||||
0xc0, 0x1e, 0xbb, 0x40, 0x50, 0x17, 0xfc, 0xb8, 0x08, 0x80,
|
||||
0x00, 0x00, 0x32, 0x00, 0x00, 0x0b, 0xb5, 0x07, 0x8b, 0x17,
|
||||
0x50, 0x01, 0x00, 0x04, 0x68, 0xce, 0x32, 0xc8
|
||||
};
|
||||
|
||||
static already_AddRefed<MediaDataDecoder>
|
||||
CreateTestH264Decoder(layers::LayersBackend aBackend,
|
||||
VideoInfo& aConfig)
|
||||
VideoInfo& aConfig,
|
||||
FlushableTaskQueue* aTaskQueue)
|
||||
{
|
||||
aConfig.mMimeType = "video/avc";
|
||||
aConfig.mId = 1;
|
||||
aConfig.mDuration = 40000;
|
||||
aConfig.mMediaTime = 0;
|
||||
aConfig.mDisplay = nsIntSize(64, 64);
|
||||
aConfig.mImage = nsIntRect(0, 0, 64, 64);
|
||||
aConfig.mDisplay = nsIntSize(640, 360);
|
||||
aConfig.mImage = nsIntRect(0, 0, 640, 360);
|
||||
aConfig.mExtraData = new MediaByteBuffer();
|
||||
aConfig.mExtraData->AppendElements(sTestH264ExtraData,
|
||||
MOZ_ARRAY_LENGTH(sTestH264ExtraData));
|
||||
@ -207,23 +230,63 @@ CreateTestH264Decoder(layers::LayersBackend aBackend,
|
||||
|
||||
RefPtr<PDMFactory> platform = new PDMFactory();
|
||||
RefPtr<MediaDataDecoder> decoder(
|
||||
platform->CreateDecoder(aConfig, nullptr, nullptr, aBackend, nullptr));
|
||||
platform->CreateDecoder(aConfig, aTaskQueue, nullptr, aBackend, nullptr));
|
||||
|
||||
return decoder.forget();
|
||||
}
|
||||
|
||||
/* static */ bool
|
||||
MP4Decoder::IsVideoAccelerated(layers::LayersBackend aBackend, nsACString& aFailureReason)
|
||||
/* static */ already_AddRefed<dom::Promise>
|
||||
MP4Decoder::IsVideoAccelerated(layers::LayersBackend aBackend, nsIGlobalObject* aParent)
|
||||
{
|
||||
VideoInfo config;
|
||||
RefPtr<MediaDataDecoder> decoder(CreateTestH264Decoder(aBackend, config));
|
||||
if (!decoder) {
|
||||
aFailureReason.AssignLiteral("Failed to create H264 decoder");
|
||||
return false;
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
||||
ErrorResult rv;
|
||||
RefPtr<dom::Promise> promise;
|
||||
promise = dom::Promise::Create(aParent, rv);
|
||||
if (rv.Failed()) {
|
||||
rv.SuppressException();
|
||||
return nullptr;
|
||||
}
|
||||
bool result = decoder->IsHardwareAccelerated(aFailureReason);
|
||||
decoder->Shutdown();
|
||||
return result;
|
||||
|
||||
RefPtr<FlushableTaskQueue> taskQueue =
|
||||
new FlushableTaskQueue(GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER));
|
||||
VideoInfo config;
|
||||
RefPtr<MediaDataDecoder> decoder(CreateTestH264Decoder(aBackend, config, taskQueue));
|
||||
if (!decoder) {
|
||||
taskQueue->BeginShutdown();
|
||||
taskQueue->AwaitShutdownAndIdle();
|
||||
promise->MaybeResolve(NS_LITERAL_STRING("No; Failed to create H264 decoder"));
|
||||
return promise.forget();
|
||||
}
|
||||
|
||||
decoder->Init()
|
||||
->Then(AbstractThread::MainThread(), __func__,
|
||||
[promise, decoder, taskQueue] (TrackInfo::TrackType aTrack) {
|
||||
nsCString failureReason;
|
||||
bool ok = decoder->IsHardwareAccelerated(failureReason);
|
||||
nsAutoString result;
|
||||
if (ok) {
|
||||
result.AssignLiteral("Yes");
|
||||
} else {
|
||||
result.AssignLiteral("No");
|
||||
if (failureReason.Length()) {
|
||||
result.AppendLiteral("; ");
|
||||
AppendUTF8toUTF16(failureReason, result);
|
||||
}
|
||||
}
|
||||
decoder->Shutdown();
|
||||
taskQueue->BeginShutdown();
|
||||
taskQueue->AwaitShutdownAndIdle();
|
||||
promise->MaybeResolve(result);
|
||||
},
|
||||
[promise, decoder, taskQueue] (MediaDataDecoder::DecoderFailureReason aResult) {
|
||||
decoder->Shutdown();
|
||||
taskQueue->BeginShutdown();
|
||||
taskQueue->AwaitShutdownAndIdle();
|
||||
promise->MaybeResolve(NS_LITERAL_STRING("No; Failed to initialize H264 decoder"));
|
||||
});
|
||||
|
||||
return promise.forget();
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -8,6 +8,7 @@
|
||||
|
||||
#include "MediaDecoder.h"
|
||||
#include "MediaFormatReader.h"
|
||||
#include "mozilla/dom/Promise.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
@ -38,7 +39,8 @@ public:
|
||||
// Returns true if the MP4 backend is preffed on.
|
||||
static bool IsEnabled();
|
||||
|
||||
static bool IsVideoAccelerated(layers::LayersBackend aBackend, nsACString& aReason);
|
||||
static already_AddRefed<dom::Promise>
|
||||
IsVideoAccelerated(layers::LayersBackend aBackend, nsIGlobalObject* aParent);
|
||||
|
||||
void GetMozDebugReaderData(nsAString& aString) override;
|
||||
|
||||
|
1
dom/media/test/external/MANIFEST.in
vendored
1
dom/media/test/external/MANIFEST.in
vendored
@ -1,4 +1,5 @@
|
||||
exclude MANIFEST.in
|
||||
include external-media-tests-requirements.txt
|
||||
recursive-include external_media_harness *
|
||||
recursive-include external_media_tests *
|
||||
|
||||
|
@ -640,10 +640,11 @@ nsPicoService::LoadEngine(PicoVoice* aVoice)
|
||||
}
|
||||
|
||||
if (!mPicoMemArea) {
|
||||
mPicoMemArea = new uint8_t[PICO_MEM_SIZE];
|
||||
mPicoMemArea = MakeUnique<uint8_t[]>(PICO_MEM_SIZE);
|
||||
}
|
||||
|
||||
status = sPicoApi.pico_initialize(mPicoMemArea, PICO_MEM_SIZE, &mPicoSystem);
|
||||
status = sPicoApi.pico_initialize(mPicoMemArea.get(),
|
||||
PICO_MEM_SIZE, &mPicoSystem);
|
||||
PICO_ENSURE_SUCCESS_VOID("pico_initialize", status);
|
||||
|
||||
status = sPicoApi.pico_loadResource(mPicoSystem, aVoice->mTaFile.get(), &mTaResource);
|
||||
|
@ -8,7 +8,6 @@
|
||||
#define nsPicoService_h
|
||||
|
||||
#include "mozilla/Mutex.h"
|
||||
#include "nsAutoPtr.h"
|
||||
#include "nsTArray.h"
|
||||
#include "nsIObserver.h"
|
||||
#include "nsIThread.h"
|
||||
@ -16,6 +15,7 @@
|
||||
#include "nsRefPtrHashtable.h"
|
||||
#include "mozilla/StaticPtr.h"
|
||||
#include "mozilla/Monitor.h"
|
||||
#include "mozilla/UniquePtr.h"
|
||||
|
||||
namespace mozilla {
|
||||
namespace dom {
|
||||
@ -82,7 +82,7 @@ private:
|
||||
|
||||
pico_Resource mTaResource;
|
||||
|
||||
nsAutoPtr<uint8_t> mPicoMemArea;
|
||||
mozilla::UniquePtr<uint8_t[]> mPicoMemArea;
|
||||
|
||||
static StaticRefPtr<nsPicoService> sSingleton;
|
||||
};
|
||||
|
@ -656,6 +656,8 @@ NotificationPermissionRequest::ResolvePromise()
|
||||
mCallback->Call(mPermission, error);
|
||||
rv = error.StealNSResult();
|
||||
}
|
||||
Telemetry::Accumulate(
|
||||
Telemetry::WEB_NOTIFICATION_REQUEST_PERMISSION_CALLBACK, !!mCallback);
|
||||
mPromise->MaybeResolve(mPermission);
|
||||
return rv;
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ public:
|
||||
if (NS_SUCCEEDED(aStatus)) {
|
||||
mPromise->MaybeResolve(aSuccess);
|
||||
} else {
|
||||
mPromise->MaybeReject(NS_ERROR_DOM_NETWORK_ERR);
|
||||
mPromise->MaybeReject(NS_ERROR_DOM_PUSH_SERVICE_UNREACHABLE);
|
||||
}
|
||||
|
||||
return NS_OK;
|
||||
@ -403,7 +403,7 @@ public:
|
||||
if (NS_SUCCEEDED(mStatus)) {
|
||||
promise->MaybeResolve(mSuccess);
|
||||
} else {
|
||||
promise->MaybeReject(NS_ERROR_DOM_NETWORK_ERR);
|
||||
promise->MaybeReject(NS_ERROR_DOM_PUSH_SERVICE_UNREACHABLE);
|
||||
}
|
||||
|
||||
mProxy->CleanUp(aCx);
|
||||
@ -528,7 +528,7 @@ WorkerPushSubscription::Unsubscribe(ErrorResult &aRv)
|
||||
|
||||
RefPtr<PromiseWorkerProxy> proxy = PromiseWorkerProxy::Create(worker, p);
|
||||
if (!proxy) {
|
||||
p->MaybeReject(NS_ERROR_DOM_NETWORK_ERR);
|
||||
p->MaybeReject(NS_ERROR_DOM_PUSH_SERVICE_UNREACHABLE);
|
||||
return p.forget();
|
||||
}
|
||||
|
||||
@ -598,6 +598,8 @@ public:
|
||||
mRawP256dhKey, mAuthSecret);
|
||||
promise->MaybeResolve(sub);
|
||||
}
|
||||
} else if (NS_ERROR_GET_MODULE(mStatus) == NS_ERROR_MODULE_DOM_PUSH ) {
|
||||
promise->MaybeReject(mStatus);
|
||||
} else {
|
||||
promise->MaybeReject(NS_ERROR_DOM_PUSH_ABORT_ERR);
|
||||
}
|
||||
@ -776,7 +778,7 @@ public:
|
||||
callback->OnPushSubscriptionError(NS_OK);
|
||||
return NS_OK;
|
||||
}
|
||||
callback->OnPushSubscriptionError(NS_ERROR_FAILURE);
|
||||
callback->OnPushSubscriptionError(NS_ERROR_DOM_PUSH_DENIED_ERR);
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
@ -4,20 +4,18 @@
|
||||
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
|
||||
<script>
|
||||
|
||||
|
||||
function waitOnPushMessage(pushSubscription)
|
||||
{
|
||||
var p = new Promise(function(res, rej) {
|
||||
navigator.serviceWorker.onmessage = function(e) {
|
||||
if (e.data.type == "finished") {
|
||||
function waitOnWorkerMessage(type) {
|
||||
return new Promise(function(res, rej) {
|
||||
function onMessage(e) {
|
||||
if (e.data.type == type) {
|
||||
navigator.serviceWorker.removeEventListener("message", onMessage);
|
||||
(e.data.okay == "yes" ? res : rej)(e.data);
|
||||
}
|
||||
};
|
||||
}
|
||||
navigator.serviceWorker.addEventListener("message", onMessage);
|
||||
});
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
|
@ -6,6 +6,7 @@ support-files =
|
||||
frame.html
|
||||
webpush.js
|
||||
lifetime_worker.js
|
||||
test_utils.js
|
||||
|
||||
[test_has_permissions.html]
|
||||
skip-if = os == "android" || toolkit == "gonk"
|
||||
@ -21,6 +22,8 @@ skip-if = os == "android" || toolkit == "gonk"
|
||||
skip-if = os == "android" || toolkit == "gonk"
|
||||
[test_multiple_register_different_scope.html]
|
||||
skip-if = os == "android" || toolkit == "gonk"
|
||||
[test_subscription_change.html]
|
||||
skip-if = os == "android" || toolkit == "gonk"
|
||||
[test_data.html]
|
||||
skip-if = os == "android" || toolkit == "gonk"
|
||||
# Disabled for too many intermittent failures (bug 1164432)
|
||||
|
@ -11,6 +11,7 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
<title>Test for Bug 1185544</title>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SpawnTask.js"></script>
|
||||
<script type="text/javascript" src="/tests/dom/push/test/test_utils.js"></script>
|
||||
<script type="text/javascript" src="/tests/dom/push/test/webpush.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
|
||||
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
|
||||
@ -25,25 +26,10 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
|
||||
<script class="testbody" type="text/javascript">
|
||||
|
||||
SimpleTest.registerCleanupFunction(() =>
|
||||
new Promise(resolve => SpecialPowers.popPermissions(resolve))
|
||||
);
|
||||
|
||||
var registration;
|
||||
add_task(function* start() {
|
||||
yield new Promise(resolve => {
|
||||
SpecialPowers.pushPermissions([
|
||||
{ type: "desktop-notification", allow: true, context: document },
|
||||
], resolve);
|
||||
});
|
||||
yield new Promise(resolve => {
|
||||
SpecialPowers.pushPrefEnv({"set": [
|
||||
["dom.push.enabled", true],
|
||||
["dom.serviceWorkers.exemptFromPerDomainMax", true],
|
||||
["dom.serviceWorkers.enabled", true],
|
||||
["dom.serviceWorkers.testing.enabled", true]
|
||||
]}, resolve);
|
||||
});
|
||||
yield setupPrefs();
|
||||
yield setPushPermission(true);
|
||||
|
||||
var url = "worker.js" + "?" + (Math.random());
|
||||
registration = yield navigator.serviceWorker.register(url, {scope: "."});
|
||||
@ -51,15 +37,7 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
|
||||
var controlledFrame;
|
||||
add_task(function* createControlledIFrame() {
|
||||
yield new Promise(function(res, rej) {
|
||||
var iframe = document.createElement('iframe');
|
||||
iframe.id = "controlledFrame";
|
||||
iframe.src = "http://mochi.test:8888/tests/dom/push/test/frame.html";
|
||||
|
||||
iframe.onload = () => res();
|
||||
controlledFrame = iframe;
|
||||
document.body.appendChild(iframe);
|
||||
});
|
||||
controlledFrame = yield injectControlledFrame();
|
||||
});
|
||||
|
||||
var pushSubscription;
|
||||
@ -67,16 +45,6 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
pushSubscription = yield registration.pushManager.subscribe();
|
||||
});
|
||||
|
||||
function sendRequestToWorker(request) {
|
||||
return new Promise((resolve, reject) => {
|
||||
var channel = new MessageChannel();
|
||||
channel.port1.onmessage = e => {
|
||||
(e.data.error ? reject : resolve)(e.data);
|
||||
};
|
||||
registration.active.postMessage(request, [channel.port2]);
|
||||
});
|
||||
}
|
||||
|
||||
function base64UrlDecode(s) {
|
||||
s = s.replace(/-/g, '+').replace(/_/g, '/');
|
||||
|
||||
@ -138,7 +106,7 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
|
||||
function waitForMessage(pushSubscription, message) {
|
||||
return Promise.all([
|
||||
controlledFrame.contentWindow.waitOnPushMessage(pushSubscription),
|
||||
controlledFrame.waitOnWorkerMessage("finished"),
|
||||
webpush(pushSubscription, message),
|
||||
]).then(([message]) => message);
|
||||
}
|
||||
@ -190,11 +158,10 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
reader.readAsText(message.data.blob);
|
||||
});
|
||||
is(text, "Hi! \ud83d\udc40", "Wrong blob data for message with emoji");
|
||||
is(text, "Hi! \ud83d\udc40", "Wrong blob data for message with emoji");
|
||||
|
||||
// Send a blank message.
|
||||
var [message] = yield Promise.all([
|
||||
controlledFrame.contentWindow.waitOnPushMessage(pushSubscription),
|
||||
controlledFrame.waitOnWorkerMessage("finished"),
|
||||
fetch("http://mochi.test:8888/tests/dom/push/test/push-server.sjs", {
|
||||
method: "PUT",
|
||||
headers: {
|
||||
@ -207,8 +174,7 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
});
|
||||
|
||||
add_task(function* unsubscribe() {
|
||||
controlledFrame.parentNode.removeChild(controlledFrame);
|
||||
controlledFrame = null;
|
||||
controlledFrame.remove();
|
||||
yield pushSubscription.unsubscribe();
|
||||
});
|
||||
|
||||
|
@ -11,6 +11,7 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
<title>Test for Bug 1038811</title>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SpawnTask.js"></script>
|
||||
<script type="text/javascript" src="/tests/dom/push/test/test_utils.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
|
||||
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
|
||||
</head>
|
||||
@ -30,21 +31,14 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
|
||||
var registration;
|
||||
add_task(function* start() {
|
||||
SpecialPowers.addPermission("desktop-notification", false, document);
|
||||
yield new Promise(resolve => {
|
||||
SpecialPowers.pushPrefEnv({"set": [
|
||||
["dom.push.enabled", true],
|
||||
["dom.serviceWorkers.exemptFromPerDomainMax", true],
|
||||
["dom.serviceWorkers.enabled", true],
|
||||
["dom.serviceWorkers.testing.enabled", true]
|
||||
]}, resolve);
|
||||
});
|
||||
yield setupPrefs();
|
||||
yield setPushPermission(false);
|
||||
|
||||
var url = "worker.js" + "?" + Math.random();
|
||||
registration = yield navigator.serviceWorker.register(url, {scope: "."});
|
||||
});
|
||||
|
||||
add_task(function* setupPushNotification() {
|
||||
add_task(function* denySubscribe() {
|
||||
try {
|
||||
yield registration.pushManager.subscribe();
|
||||
ok(false, "subscribe() should fail because no permission for push");
|
||||
@ -54,6 +48,17 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
}
|
||||
});
|
||||
|
||||
add_task(function* denySubscribeInWorker() {
|
||||
// If permission is revoked, `getSubscription()` should return `null`, and
|
||||
// `subscribe()` should reject immediately. Calling these from the worker
|
||||
// should not deadlock the main thread (see bug 1228723).
|
||||
var errorInfo = yield sendRequestToWorker({
|
||||
type: "denySubscribe",
|
||||
});
|
||||
ok(errorInfo.isDOMException, "Wrong exception type");
|
||||
is(errorInfo.name, "PermissionDeniedError", "Wrong exception name");
|
||||
});
|
||||
|
||||
add_task(function* getEndpoint() {
|
||||
var pushSubscription = yield registration.pushManager.getSubscription();
|
||||
is(pushSubscription, null, "getSubscription() should return null because no permission for push");
|
||||
@ -75,12 +80,7 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
state: "prompt",
|
||||
}];
|
||||
for (var test of tests) {
|
||||
if (test.action == permissionManager.UNKNOWN_ACTION) {
|
||||
SpecialPowers.removePermission("desktop-notification", document);
|
||||
} else {
|
||||
SpecialPowers.addPermission("desktop-notification",
|
||||
test.action, document);
|
||||
}
|
||||
yield setPushPermission(test.action);
|
||||
var state = yield registration.pushManager.permissionState();
|
||||
is(state, test.state, JSON.stringify(test));
|
||||
}
|
||||
|
@ -10,6 +10,8 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
<head>
|
||||
<title>Test for Bug 1038811</title>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SpawnTask.js"></script>
|
||||
<script type="text/javascript" src="/tests/dom/push/test/test_utils.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
|
||||
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
|
||||
</head>
|
||||
@ -27,112 +29,63 @@ http://creativecommons.org/licenses/publicdomain/
|
||||
// console.log(str + "\n");
|
||||
}
|
||||
|
||||
var controlledFrame;
|
||||
function createControlledIFrame(swr) {
|
||||
var p = new Promise(function(res, rej) {
|
||||
var iframe = document.createElement('iframe');
|
||||
iframe.id = "controlledFrame";
|
||||
iframe.src = "http://mochi.test:8888/tests/dom/push/test/frame.html";
|
||||
|
||||
iframe.onload = function() {
|
||||
res(swr)
|
||||
}
|
||||
controlledFrame = iframe;
|
||||
document.body.appendChild(iframe);
|
||||
});
|
||||
return p;
|
||||
}
|
||||
|
||||
function checkPermissionState(swr) {
|
||||
return swr.pushManager.permissionState().then(function(state) {
|
||||
ok(state === "granted", "permissionState() should resolve to granted.");
|
||||
return swr;
|
||||
}).catch(function(e) {
|
||||
ok(false, "permissionState() should resolve to granted.");
|
||||
return swr;
|
||||
});
|
||||
}
|
||||
|
||||
function sendPushToPushServer(pushEndpoint) {
|
||||
// Work around CORS for now.
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', "http://mochi.test:8888/tests/dom/push/test/push-server.sjs", true);
|
||||
xhr.setRequestHeader("X-Push-Method", "PUT");
|
||||
xhr.setRequestHeader("X-Push-Server", pushEndpoint);
|
||||
xhr.onload = function(e) {
|
||||
debug("xhr : " + this.status);
|
||||
}
|
||||
xhr.onerror = function(e) {
|
||||
debug("xhr error: " + e);
|
||||
}
|
||||
xhr.send("version=24601");
|
||||
}
|
||||
|
||||
var registration;
|
||||
add_task(function* start() {
|
||||
yield setupPrefs();
|
||||
yield setPushPermission(true);
|
||||
|
||||
function start() {
|
||||
return navigator.serviceWorker.register("worker.js" + "?" + (Math.random()), {scope: "."})
|
||||
.then((swr) => registration = swr);
|
||||
}
|
||||
var url = "worker.js" + "?" + (Math.random());
|
||||
registration = yield navigator.serviceWorker.register(url, {scope: "."});
|
||||
});
|
||||
|
||||
function unregister() {
|
||||
return registration.unregister().then(function(result) {
|
||||
ok(result, "Unregister should return true.");
|
||||
}, function(e) {
|
||||
dump("Unregistering the SW failed with " + e + "\n");
|
||||
var controlledFrame;
|
||||
add_task(function* createControlledIFrame() {
|
||||
controlledFrame = yield injectControlledFrame();
|
||||
});
|
||||
|
||||
add_task(function* checkPermissionState() {
|
||||
var state = yield registration.pushManager.permissionState();
|
||||
is(state, "granted", "permissionState() should resolve to granted.");
|
||||
});
|
||||
|
||||
var pushSubscription;
|
||||
add_task(function* subscribe() {
|
||||
pushSubscription = yield registration.pushManager.subscribe();
|
||||
});
|
||||
|
||||
add_task(function* resubscribe() {
|
||||
var data = yield sendRequestToWorker({
|
||||
type: "resubscribe",
|
||||
endpoint: pushSubscription.endpoint,
|
||||
});
|
||||
}
|
||||
pushSubscription = yield registration.pushManager.getSubscription();
|
||||
is(data.endpoint, pushSubscription.endpoint,
|
||||
"Subscription endpoints should match after resubscribing in worker");
|
||||
});
|
||||
|
||||
function setupPushNotification(swr) {
|
||||
var p = new Promise(function(res, rej) {
|
||||
swr.pushManager.subscribe().then(
|
||||
function(pushSubscription) {
|
||||
ok(true, "successful registered for push notification");
|
||||
res(pushSubscription);
|
||||
}, function(error) {
|
||||
ok(false, "could not register for push notification");
|
||||
res(null);
|
||||
}
|
||||
);
|
||||
});
|
||||
return p;
|
||||
}
|
||||
add_task(function* waitForPushNotification() {
|
||||
yield Promise.all([
|
||||
controlledFrame.waitOnWorkerMessage("finished"),
|
||||
fetch("http://mochi.test:8888/tests/dom/push/test/push-server.sjs", {
|
||||
method: "PUT",
|
||||
headers: {
|
||||
"X-Push-Method": "POST",
|
||||
"X-Push-Server": pushSubscription.endpoint,
|
||||
},
|
||||
}),
|
||||
]);
|
||||
});
|
||||
|
||||
function unregisterPushNotification(pushSubscription) {
|
||||
controlledFrame.parentNode.removeChild(controlledFrame);
|
||||
controlledFrame = null;
|
||||
return pushSubscription.unsubscribe();
|
||||
}
|
||||
add_task(function* unsubscribe() {
|
||||
controlledFrame.remove();
|
||||
yield pushSubscription.unsubscribe();
|
||||
});
|
||||
|
||||
function waitForPushNotification(pushSubscription) {
|
||||
var p = controlledFrame.contentWindow.waitOnPushMessage();
|
||||
sendPushToPushServer(pushSubscription.endpoint);
|
||||
return p.then(function() {
|
||||
return pushSubscription;
|
||||
});
|
||||
}
|
||||
add_task(function* unregister() {
|
||||
var result = yield registration.unregister();
|
||||
ok(result, "Unregister should return true.");
|
||||
});
|
||||
|
||||
function runTest() {
|
||||
start()
|
||||
.then(createControlledIFrame)
|
||||
.then(checkPermissionState)
|
||||
.then(setupPushNotification)
|
||||
.then(waitForPushNotification)
|
||||
.then(unregisterPushNotification)
|
||||
.then(unregister)
|
||||
.catch(function(e) {
|
||||
ok(false, "Some test failed with error " + e);
|
||||
}).then(SimpleTest.finish);
|
||||
}
|
||||
|
||||
SpecialPowers.pushPrefEnv({"set": [
|
||||
["dom.push.enabled", true],
|
||||
["dom.serviceWorkers.exemptFromPerDomainMax", true],
|
||||
["dom.serviceWorkers.enabled", true],
|
||||
["dom.serviceWorkers.testing.enabled", true]
|
||||
]}, runTest);
|
||||
SpecialPowers.addPermission("desktop-notification", true, document);
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
68
dom/push/test/test_subscription_change.html
Normal file
68
dom/push/test/test_subscription_change.html
Normal file
@ -0,0 +1,68 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<!--
|
||||
Bug 1205109: Make `pushsubscriptionchange` extendable.
|
||||
|
||||
Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/licenses/publicdomain/
|
||||
|
||||
-->
|
||||
<head>
|
||||
<title>Test for Bug 1205109</title>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
|
||||
<script type="text/javascript" src="/tests/SimpleTest/SpawnTask.js"></script>
|
||||
<script type="text/javascript" src="/tests/dom/push/test/test_utils.js"></script>
|
||||
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
|
||||
<meta http-equiv="Content-type" content="text/html;charset=UTF-8">
|
||||
</head>
|
||||
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1205109">Mozilla Bug 1205109</a>
|
||||
<p id="display"></p>
|
||||
<div id="content" style="display: none">
|
||||
|
||||
</div>
|
||||
<pre id="test">
|
||||
</pre>
|
||||
|
||||
<script class="testbody" type="text/javascript">
|
||||
|
||||
var registration;
|
||||
add_task(function* start() {
|
||||
yield setupPrefs();
|
||||
yield setPushPermission(true);
|
||||
|
||||
var url = "worker.js" + "?" + (Math.random());
|
||||
registration = yield navigator.serviceWorker.register(url, {scope: "."});
|
||||
});
|
||||
|
||||
var controlledFrame;
|
||||
add_task(function* createControlledIFrame() {
|
||||
controlledFrame = yield injectControlledFrame();
|
||||
});
|
||||
|
||||
add_task(function* togglePermission() {
|
||||
var subscription = yield registration.pushManager.subscribe();
|
||||
ok(subscription, "Should create a push subscription");
|
||||
|
||||
yield setPushPermission(false);
|
||||
var permissionState = yield registration.pushManager.permissionState();
|
||||
is(permissionState, "denied", "Should deny push permission");
|
||||
|
||||
var subscription = yield registration.pushManager.getSubscription();
|
||||
is(subscription, null, "Should not return subscription when permission is revoked");
|
||||
|
||||
var changePromise = controlledFrame.waitOnWorkerMessage("changed");
|
||||
yield setPushPermission(true);
|
||||
yield changePromise;
|
||||
|
||||
subscription = yield registration.pushManager.getSubscription();
|
||||
is(subscription, null, "Should drop subscription after reinstating permission");
|
||||
});
|
||||
|
||||
add_task(function* unsubscribe() {
|
||||
controlledFrame.remove();
|
||||
yield registration.unregister();
|
||||
});
|
||||
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
60
dom/push/test/test_utils.js
Normal file
60
dom/push/test/test_utils.js
Normal file
@ -0,0 +1,60 @@
|
||||
// Remove permissions and prefs when the test finishes.
|
||||
SimpleTest.registerCleanupFunction(() =>
|
||||
new Promise(resolve => {
|
||||
SpecialPowers.flushPermissions(_ => {
|
||||
SpecialPowers.flushPrefEnv(resolve);
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
function setPushPermission(allow) {
|
||||
return new Promise(resolve => {
|
||||
SpecialPowers.pushPermissions([
|
||||
{ type: "desktop-notification", allow, context: document },
|
||||
], resolve);
|
||||
});
|
||||
}
|
||||
|
||||
function setupPrefs() {
|
||||
return new Promise(resolve => {
|
||||
SpecialPowers.pushPrefEnv({"set": [
|
||||
["dom.push.enabled", true],
|
||||
["dom.serviceWorkers.exemptFromPerDomainMax", true],
|
||||
["dom.serviceWorkers.enabled", true],
|
||||
["dom.serviceWorkers.testing.enabled", true]
|
||||
]}, resolve);
|
||||
});
|
||||
}
|
||||
|
||||
function injectControlledFrame(target = document.body) {
|
||||
return new Promise(function(res, rej) {
|
||||
var iframe = document.createElement("iframe");
|
||||
iframe.src = "/tests/dom/push/test/frame.html";
|
||||
|
||||
var controlledFrame = {
|
||||
remove() {
|
||||
target.removeChild(iframe);
|
||||
iframe = null;
|
||||
},
|
||||
waitOnWorkerMessage(type) {
|
||||
return iframe ? iframe.contentWindow.waitOnWorkerMessage(type) :
|
||||
Promise.reject(new Error("Frame removed from document"));
|
||||
},
|
||||
};
|
||||
|
||||
iframe.onload = () => res(controlledFrame);
|
||||
target.appendChild(iframe);
|
||||
});
|
||||
}
|
||||
|
||||
function sendRequestToWorker(request) {
|
||||
return navigator.serviceWorker.ready.then(registration => {
|
||||
return new Promise((resolve, reject) => {
|
||||
var channel = new MessageChannel();
|
||||
channel.port1.onmessage = e => {
|
||||
(e.data.error ? reject : resolve)(e.data);
|
||||
};
|
||||
registration.active.postMessage(request, [channel.port2]);
|
||||
});
|
||||
});
|
||||
}
|
@ -1,8 +1,16 @@
|
||||
// Any copyright is dedicated to the Public Domain.
|
||||
// http://creativecommons.org/licenses/publicdomain/
|
||||
|
||||
// This worker is used for two types of tests. `handlePush` sends messages to
|
||||
// `frame.html`, which verifies that the worker can receive push messages.
|
||||
|
||||
// `handleMessage` receives messages from `test_push_manager_worker.html`
|
||||
// and `test_data.html`, and verifies that `PushManager` can be used from
|
||||
// the worker.
|
||||
|
||||
this.onpush = handlePush;
|
||||
this.onmessage = handleMessage;
|
||||
this.onpushsubscriptionchange = handlePushSubscriptionChange;
|
||||
|
||||
function getJSON(data) {
|
||||
var result = {
|
||||
@ -17,48 +25,105 @@ function getJSON(data) {
|
||||
return result;
|
||||
}
|
||||
|
||||
function handlePush(event) {
|
||||
function assert(value, message) {
|
||||
if (!value) {
|
||||
throw new Error(message);
|
||||
}
|
||||
}
|
||||
|
||||
event.waitUntil(self.clients.matchAll().then(function(result) {
|
||||
if (event instanceof PushEvent) {
|
||||
if (!('data' in event)) {
|
||||
result[0].postMessage({type: "finished", okay: "yes"});
|
||||
return;
|
||||
}
|
||||
var message = {
|
||||
type: "finished",
|
||||
okay: "yes",
|
||||
};
|
||||
if (event.data) {
|
||||
message.data = {
|
||||
text: event.data.text(),
|
||||
arrayBuffer: event.data.arrayBuffer(),
|
||||
json: getJSON(event.data),
|
||||
blob: event.data.blob(),
|
||||
};
|
||||
}
|
||||
result[0].postMessage(message);
|
||||
function broadcast(event, promise) {
|
||||
event.waitUntil(Promise.resolve(promise).then(message => {
|
||||
return self.clients.matchAll().then(clients => {
|
||||
clients.forEach(client => client.postMessage(message));
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
function reply(event, promise) {
|
||||
event.waitUntil(Promise.resolve(promise).then(result => {
|
||||
event.ports[0].postMessage(result);
|
||||
}).catch(error => {
|
||||
event.ports[0].postMessage({
|
||||
error: String(error),
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
function handlePush(event) {
|
||||
if (event instanceof PushEvent) {
|
||||
if (!('data' in event)) {
|
||||
broadcast(event, {type: "finished", okay: "yes"});
|
||||
return;
|
||||
}
|
||||
result[0].postMessage({type: "finished", okay: "no"});
|
||||
}));
|
||||
var message = {
|
||||
type: "finished",
|
||||
okay: "yes",
|
||||
};
|
||||
if (event.data) {
|
||||
message.data = {
|
||||
text: event.data.text(),
|
||||
arrayBuffer: event.data.arrayBuffer(),
|
||||
json: getJSON(event.data),
|
||||
blob: event.data.blob(),
|
||||
};
|
||||
}
|
||||
broadcast(event, message);
|
||||
return;
|
||||
}
|
||||
broadcast(event, {type: "finished", okay: "no"});
|
||||
}
|
||||
|
||||
function handleMessage(event) {
|
||||
if (event.data.type == "publicKey") {
|
||||
event.waitUntil(self.registration.pushManager.getSubscription().then(subscription => {
|
||||
event.ports[0].postMessage({
|
||||
reply(event, self.registration.pushManager.getSubscription().then(
|
||||
subscription => ({
|
||||
p256dh: subscription.getKey("p256dh"),
|
||||
auth: subscription.getKey("auth"),
|
||||
});
|
||||
}).catch(error => {
|
||||
event.ports[0].postMessage({
|
||||
error: String(error),
|
||||
});
|
||||
})
|
||||
));
|
||||
return;
|
||||
}
|
||||
if (event.data.type == "resubscribe") {
|
||||
reply(event, self.registration.pushManager.getSubscription().then(
|
||||
subscription => {
|
||||
assert(subscription.endpoint == event.data.endpoint,
|
||||
"Wrong push endpoint in worker");
|
||||
return subscription.unsubscribe();
|
||||
}
|
||||
).then(result => {
|
||||
assert(result, "Error unsubscribing in worker");
|
||||
return self.registration.pushManager.getSubscription();
|
||||
}).then(subscription => {
|
||||
assert(!subscription, "Subscription not removed in worker");
|
||||
return self.registration.pushManager.subscribe();
|
||||
}).then(subscription => {
|
||||
return {
|
||||
endpoint: subscription.endpoint,
|
||||
};
|
||||
}));
|
||||
return;
|
||||
}
|
||||
event.ports[0].postMessage({
|
||||
error: "Invalid message type: " + event.data.type,
|
||||
});
|
||||
if (event.data.type == "denySubscribe") {
|
||||
reply(event, self.registration.pushManager.getSubscription().then(
|
||||
subscription => {
|
||||
assert(!subscription,
|
||||
"Should not return worker subscription with revoked permission");
|
||||
return self.registration.pushManager.subscribe().then(_ => {
|
||||
assert(false, "Expected error subscribing with revoked permission");
|
||||
}, error => {
|
||||
return {
|
||||
isDOMException: error instanceof DOMException,
|
||||
name: error.name,
|
||||
};
|
||||
});
|
||||
}
|
||||
));
|
||||
return;
|
||||
}
|
||||
reply(event, Promise.reject(
|
||||
"Invalid message type: " + event.data.type));
|
||||
}
|
||||
|
||||
function handlePushSubscriptionChange(event) {
|
||||
broadcast(event, {type: "changed", okay: "yes"});
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ SettingsDB.prototype = {
|
||||
let chan = NetUtil.newChannel({
|
||||
uri: NetUtil.newURI(settingsFile),
|
||||
loadUsingSystemPrincipal: true});
|
||||
let stream = chan.open();
|
||||
let stream = chan.open2();
|
||||
// Obtain a converter to read from a UTF-8 encoded input stream.
|
||||
let converter = Cc["@mozilla.org/intl/scriptableunicodeconverter"]
|
||||
.createInstance(Ci.nsIScriptableUnicodeConverter);
|
||||
|
@ -12,6 +12,8 @@ const nsIFilePicker = Components.interfaces.nsIFilePicker;
|
||||
const STDURL_CTRID = "@mozilla.org/network/standard-url;1";
|
||||
const nsIURI = Components.interfaces.nsIURI;
|
||||
|
||||
Components.utils.import("resource://gre/modules/NetUtil.jsm");
|
||||
|
||||
var gStop = false;
|
||||
|
||||
function loadFile(aUriSpec)
|
||||
@ -22,17 +24,13 @@ function loadFile(aUriSpec)
|
||||
if (!serv) {
|
||||
throw Components.results.ERR_FAILURE;
|
||||
}
|
||||
var chan = serv.newChannel2(aUriSpec,
|
||||
null,
|
||||
null,
|
||||
null, // aLoadingNode
|
||||
Services.scriptSecurityManager.getSystemPrincipal(),
|
||||
null, // aTriggeringPrincipal
|
||||
Ci.nsILoadInfo.SEC_NORMAL,
|
||||
Ci.nsIContentPolicy.TYPE_OTHER);
|
||||
var chan = NetUtil.newChannel({
|
||||
uri: aUriSpec,
|
||||
loadUsingSystemPrincipal: true
|
||||
});
|
||||
var instream =
|
||||
Components.classes[SIS_CTRID].createInstance(nsISIS);
|
||||
instream.init(chan.open());
|
||||
instream.init(chan.open2());
|
||||
|
||||
return instream.read(instream.available());
|
||||
}
|
||||
|
@ -3,6 +3,8 @@
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
Components.utils.import("resource://gre/modules/NetUtil.jsm");
|
||||
|
||||
var parser = new DOMParser();
|
||||
var methodExpr = (new XPathEvaluator).createExpression("xsl:output/@method",
|
||||
{
|
||||
@ -307,21 +309,12 @@ runItem.prototype =
|
||||
|
||||
loadTextFile : function(url)
|
||||
{
|
||||
var serv = Components.classes[IOSERVICE_CTRID].
|
||||
getService(nsIIOService);
|
||||
if (!serv) {
|
||||
throw Components.results.ERR_FAILURE;
|
||||
}
|
||||
var chan = serv.newChannel2(url,
|
||||
null,
|
||||
null,
|
||||
null, // aLoadingNode
|
||||
Services.scriptSecurityManager.getSystemPrincipal(),
|
||||
null, // aTriggeringPrincipal
|
||||
Ci.nsILoadInfo.SEC_NORMAL,
|
||||
Ci.nsIContentPolicy.TYPE_OTHER);
|
||||
var chan = NetUtil.newChannel({
|
||||
uri: url,
|
||||
loadUsingSystemPrincipal: true
|
||||
});
|
||||
var instream = doCreate(SIS_CTRID, nsISIS);
|
||||
instream.init(chan.open());
|
||||
instream.init(chan.open2());
|
||||
|
||||
return instream.read(instream.available());
|
||||
}
|
||||
|
@ -134,7 +134,17 @@ mozInlineSpellStatus::InitForEditorChange(
|
||||
getter_AddRefs(mAnchorRange));
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
if (aAction == EditAction::deleteSelection) {
|
||||
nsCOMPtr<nsINode> prevNode = do_QueryInterface(aPreviousNode);
|
||||
NS_ENSURE_STATE(prevNode);
|
||||
|
||||
bool deleted = aAction == EditAction::deleteSelection;
|
||||
if (aAction == EditAction::insertIMEText) {
|
||||
// IME may remove the previous node if it cancels composition when
|
||||
// there is no text around the composition.
|
||||
deleted = !prevNode->IsInComposedDoc();
|
||||
}
|
||||
|
||||
if (deleted) {
|
||||
// Deletes are easy, the range is just the current anchor. We set the range
|
||||
// to check to be empty, FinishInitOnEvent will fill in the range to be
|
||||
// the current word.
|
||||
@ -146,9 +156,6 @@ mozInlineSpellStatus::InitForEditorChange(
|
||||
mOp = eOpChange;
|
||||
|
||||
// range to check
|
||||
nsCOMPtr<nsINode> prevNode = do_QueryInterface(aPreviousNode);
|
||||
NS_ENSURE_STATE(prevNode);
|
||||
|
||||
mRange = new nsRange(prevNode);
|
||||
|
||||
// ...we need to put the start and end in the correct order
|
||||
|
@ -1087,10 +1087,9 @@ APZCTreeManager::ReceiveInputEvent(WidgetInputEvent& aEvent,
|
||||
ScrollableLayerGuid* aOutTargetGuid,
|
||||
uint64_t* aOutInputBlockId)
|
||||
{
|
||||
// This function will be removed once metro code is modified to use the
|
||||
// InputData version of ReceiveInputEvent.
|
||||
// In general it is preferable to use the version of ReceiveInputEvent
|
||||
// that takes an InputData, as that is usable from off-main-thread.
|
||||
// that takes an InputData, as that is usable from off-main-thread. On some
|
||||
// platforms OMT input isn't possible, and there we can use this version.
|
||||
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
APZThreadUtils::AssertOnControllerThread();
|
||||
|
@ -117,10 +117,14 @@ function synthesizeNativeWheelAndWaitForWheelEvent(aElement, aX, aY, aDeltaX, aD
|
||||
// If the event targets content in a subdocument, |aElement| should be inside
|
||||
// the subdocument. See synthesizeNativeWheel for details on the other
|
||||
// parameters.
|
||||
var scrollActionId = 0;
|
||||
function synthesizeNativeWheelAndWaitForScrollEvent(aElement, aX, aY, aDeltaX, aDeltaY, aCallback) {
|
||||
scrollActionId++;
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] initiating scroll action " + scrollActionId + "\n");
|
||||
var targetWindow = aElement.ownerDocument.defaultView;
|
||||
var useCapture = true; // scroll events don't always bubble
|
||||
targetWindow.addEventListener("scroll", function scrollWaiter(e) {
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] scroll action " + scrollActionId + ": received scroll event, target is " + e.target + " with id " + e.target.id + "\n");
|
||||
targetWindow.removeEventListener("scroll", scrollWaiter, useCapture);
|
||||
setTimeout(aCallback, 0);
|
||||
}, useCapture);
|
||||
|
@ -43,6 +43,10 @@ function scrollWheelOver(element, deltaY) {
|
||||
synthesizeNativeWheelAndWaitForScrollEvent(element, 10, 10, 0, deltaY, driveTest);
|
||||
}
|
||||
|
||||
function reportPositions(outer, inner) {
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] outer.scrollTop = " + outer.scrollTop + " and inner.scrollTop = " + inner.scrollTop + "\n");
|
||||
}
|
||||
|
||||
function* runTest() {
|
||||
var outer = document.getElementById('outer-frame');
|
||||
var inner = document.getElementById('inner-frame');
|
||||
@ -55,8 +59,10 @@ function* runTest() {
|
||||
window.addEventListener("wheel", wheelTargetRecorder);
|
||||
|
||||
// Scroll |outer| to the bottom.
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] scrolling outer to the bottom\n");
|
||||
while (outer.scrollTop < outer.scrollTopMax) {
|
||||
yield scrollWheelOver(outer, -10);
|
||||
reportPositions(outer, inner);
|
||||
}
|
||||
|
||||
// Verify that this has brought |inner| under the wheel.
|
||||
@ -64,7 +70,9 @@ function* runTest() {
|
||||
window.removeEventListener("wheel", wheelTargetRecorder);
|
||||
|
||||
// Immediately after, scroll it back up a bit.
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] scroll outer back up a bit\n");
|
||||
yield scrollWheelOver(outer, 10);
|
||||
reportPositions(outer, inner);
|
||||
|
||||
// Check that it was |outer| that scrolled back, and |inner| didn't
|
||||
// scroll at all, as all the above scrolls should be in the same
|
||||
@ -81,21 +89,27 @@ function* runTest() {
|
||||
|
||||
// Scroll up a bit more. It's still |outer| scrolling because
|
||||
// |inner| is still scrolled all the way to the top.
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] scrolling up\n");
|
||||
yield scrollWheelOver(outer, 10);
|
||||
reportPositions(outer, inner);
|
||||
|
||||
// Wait for the transaction timeout to elapse.
|
||||
// timeout * 5 is used to make it less likely that the timeout is less than
|
||||
// the system timestamp resolution
|
||||
dump("[WHEEL_TRANS_LOG] waiting for timeout\n");
|
||||
yield window.setTimeout(driveTest, timeout * 5);
|
||||
|
||||
// Now scroll down. The transaction having timed out, the event
|
||||
// should pick up a new target, and that should be |inner|.
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] scrolling down after waiting for timeout\n");
|
||||
yield scrollWheelOver(outer, -10);
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] checking if inner has scrolled\n");
|
||||
ok(inner.scrollTop > 0, "'inner' should have been scrolled");
|
||||
|
||||
// Finally, test scroll handoff after a timeout.
|
||||
|
||||
// Continue scrolling |inner| down to the bottom.
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] scrolling inner to the bottom\n");
|
||||
var prevScrollTop = inner.scrollTop;
|
||||
while (inner.scrollTop < inner.scrollTopMax) {
|
||||
yield scrollWheelOver(outer, -10);
|
||||
@ -110,6 +124,7 @@ function* runTest() {
|
||||
yield window.setTimeout(driveTest, timeout * 5);
|
||||
|
||||
// Continued downward scrolling should scroll |outer| to the bottom.
|
||||
dump("[WHEEL_TRANS_LOG] [" + Date.now() + "] scrolling outer to the bottom\n");
|
||||
prevScrollTop = outer.scrollTop;
|
||||
while (outer.scrollTop < outer.scrollTopMax) {
|
||||
yield scrollWheelOver(outer, -10);
|
||||
@ -133,7 +148,9 @@ function driveTest() {
|
||||
function startTest() {
|
||||
// Disable smooth scrolling because it makes the test flaky (we don't have a good
|
||||
// way of detecting when the scrolling is finished).
|
||||
SpecialPowers.pushPrefEnv({"set": [["general.smoothScroll", false]]}, driveTest);
|
||||
SpecialPowers.pushPrefEnv({"set": [["general.smoothScroll", false],
|
||||
["layers.dump", true],
|
||||
["apz.printtree", true]]}, driveTest);
|
||||
}
|
||||
|
||||
SimpleTest.waitForExplicitFinish();
|
||||
|
@ -532,13 +532,19 @@ APZCCallbackHelper::ApplyCallbackTransform(const LayoutDeviceIntPoint& aPoint,
|
||||
}
|
||||
|
||||
void
|
||||
APZCCallbackHelper::ApplyCallbackTransform(WidgetTouchEvent& aEvent,
|
||||
APZCCallbackHelper::ApplyCallbackTransform(WidgetEvent& aEvent,
|
||||
const ScrollableLayerGuid& aGuid,
|
||||
const CSSToLayoutDeviceScale& aScale)
|
||||
{
|
||||
for (size_t i = 0; i < aEvent.touches.Length(); i++) {
|
||||
aEvent.touches[i]->mRefPoint = ApplyCallbackTransform(
|
||||
aEvent.touches[i]->mRefPoint, aGuid, aScale);
|
||||
if (aEvent.AsTouchEvent()) {
|
||||
WidgetTouchEvent& event = *(aEvent.AsTouchEvent());
|
||||
for (size_t i = 0; i < event.touches.Length(); i++) {
|
||||
event.touches[i]->mRefPoint = ApplyCallbackTransform(
|
||||
event.touches[i]->mRefPoint, aGuid, aScale);
|
||||
}
|
||||
} else {
|
||||
aEvent.refPoint = ApplyCallbackTransform(
|
||||
aEvent.refPoint, aGuid, aScale);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,9 +99,9 @@ public:
|
||||
const ScrollableLayerGuid& aGuid,
|
||||
const CSSToLayoutDeviceScale& aScale);
|
||||
|
||||
/* Convenience function for applying a callback transform to all touch
|
||||
* points of a touch event. */
|
||||
static void ApplyCallbackTransform(WidgetTouchEvent& aEvent,
|
||||
/* Convenience function for applying a callback transform to all refpoints
|
||||
* in the input event. */
|
||||
static void ApplyCallbackTransform(WidgetEvent& aEvent,
|
||||
const ScrollableLayerGuid& aGuid,
|
||||
const CSSToLayoutDeviceScale& aScale);
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
#include "base/message_pump.h"
|
||||
#include "base/time.h"
|
||||
#include "nsAutoPtr.h"
|
||||
#include "mozilla/UniquePtr.h"
|
||||
|
||||
// Declare structs we need from libevent.h rather than including it
|
||||
struct event_base;
|
||||
@ -192,7 +192,7 @@ public:
|
||||
mBufferSize(aBufferSize),
|
||||
mTerminator(aTerminator)
|
||||
{
|
||||
mReceiveBuffer = new char[mBufferSize];
|
||||
mReceiveBuffer = mozilla::MakeUnique<char[]>(mBufferSize);
|
||||
}
|
||||
|
||||
~LineWatcher() {}
|
||||
@ -208,7 +208,7 @@ protected:
|
||||
private:
|
||||
virtual void OnFileCanReadWithoutBlocking(int aFd) final override;
|
||||
|
||||
nsAutoPtr<char> mReceiveBuffer;
|
||||
mozilla::UniquePtr<char[]> mReceiveBuffer;
|
||||
int mReceivedIndex;
|
||||
int mBufferSize;
|
||||
char mTerminator;
|
||||
|
@ -24,6 +24,9 @@
|
||||
#include "asmjs/WasmGenerator.h"
|
||||
#include "asmjs/WasmText.h"
|
||||
|
||||
#include "jsatominlines.h"
|
||||
#include "jsobjinlines.h"
|
||||
|
||||
using namespace js;
|
||||
using namespace js::wasm;
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "jscompartment.h"
|
||||
#include "jsobj.h"
|
||||
|
||||
#include "builtin/TypedObject.h"
|
||||
#include "gc/Policy.h"
|
||||
#include "gc/Zone.h"
|
||||
#include "js/HashTable.h"
|
||||
|
@ -214,6 +214,7 @@
|
||||
#include "gc/GCTrace.h"
|
||||
#include "gc/Marking.h"
|
||||
#include "gc/Memory.h"
|
||||
#include "gc/Policy.h"
|
||||
#include "jit/BaselineJIT.h"
|
||||
#include "jit/IonCode.h"
|
||||
#include "jit/JitcodeMap.h"
|
||||
|
@ -17,6 +17,7 @@
|
||||
#include "jshashutil.h"
|
||||
#include "jsobj.h"
|
||||
|
||||
#include "gc/Policy.h"
|
||||
#include "js/HashTable.h"
|
||||
|
||||
#include "jscntxtinlines.h"
|
||||
|
@ -183,6 +183,7 @@
|
||||
#include "nsSubDocumentFrame.h"
|
||||
#include "nsQueryObject.h"
|
||||
#include "nsLayoutStylesheetCache.h"
|
||||
#include "mozilla/layers/InputAPZContext.h"
|
||||
|
||||
#ifdef ANDROID
|
||||
#include "nsIDocShellTreeOwner.h"
|
||||
@ -5454,6 +5455,13 @@ PresShell::ProcessSynthMouseMoveEvent(bool aFromScroll)
|
||||
|
||||
nsCOMPtr<nsIPresShell> shell = pointVM->GetPresShell();
|
||||
if (shell) {
|
||||
// Since this gets run in a refresh tick there isn't an InputAPZContext on
|
||||
// the stack from the nsBaseWidget. We need to simulate one with at least
|
||||
// the correct target guid, so that the correct callback transform gets
|
||||
// applied if this event goes to a child process. The input block id is set
|
||||
// to 0 because this is a synthetic event which doesn't really belong to any
|
||||
// input block. Same for the APZ response field.
|
||||
InputAPZContext apzContext(mMouseEventTargetGuid, 0, nsEventStatus_eIgnore);
|
||||
shell->DispatchSynthMouseMove(&event, !aFromScroll);
|
||||
}
|
||||
|
||||
@ -6422,9 +6430,11 @@ PresShell::RecordMouseLocation(WidgetGUIEvent* aEvent)
|
||||
nsView* rootView = mViewManager->GetRootView();
|
||||
mMouseLocation = nsLayoutUtils::TranslateWidgetToView(mPresContext,
|
||||
aEvent->widget, aEvent->refPoint, rootView);
|
||||
mMouseEventTargetGuid = InputAPZContext::GetTargetLayerGuid();
|
||||
} else {
|
||||
mMouseLocation =
|
||||
nsLayoutUtils::GetEventCoordinatesRelativeTo(aEvent, rootFrame);
|
||||
mMouseEventTargetGuid = InputAPZContext::GetTargetLayerGuid();
|
||||
}
|
||||
#ifdef DEBUG_MOUSE_LOCATION
|
||||
if (aEvent->mMessage == eMouseEnterIntoWidget) {
|
||||
@ -6444,6 +6454,7 @@ PresShell::RecordMouseLocation(WidgetGUIEvent* aEvent)
|
||||
// this won't matter at all since we'll get the mouse move or enter after
|
||||
// the mouse exit when the mouse moves from one of our widgets into another.
|
||||
mMouseLocation = nsPoint(NS_UNCONSTRAINEDSIZE, NS_UNCONSTRAINEDSIZE);
|
||||
mMouseEventTargetGuid = InputAPZContext::GetTargetLayerGuid();
|
||||
#ifdef DEBUG_MOUSE_LOCATION
|
||||
printf("[ps=%p]got mouse exit for %p\n",
|
||||
this, aEvent->widget);
|
||||
|
@ -779,6 +779,10 @@ protected:
|
||||
// over our window or there is no last observed mouse location for some
|
||||
// reason.
|
||||
nsPoint mMouseLocation;
|
||||
// This is an APZ state variable that tracks the target guid for the last
|
||||
// mouse event that was processed (corresponding to mMouseLocation). This is
|
||||
// needed for the synthetic mouse events.
|
||||
mozilla::layers::ScrollableLayerGuid mMouseEventTargetGuid;
|
||||
|
||||
// mStyleSet owns it but we maintain a ref, may be null
|
||||
RefPtr<mozilla::CSSStyleSheet> mPrefStyleSheet;
|
||||
|
27
layout/reftests/async-scrolling/position-fixed-body-ref.html
Normal file
27
layout/reftests/async-scrolling/position-fixed-body-ref.html
Normal file
@ -0,0 +1,27 @@
|
||||
<!DOCTYPE html>
|
||||
<html reftest-async-scroll>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0px;
|
||||
}
|
||||
#scrollbox {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
#scrolledContents {
|
||||
height: 10000px;
|
||||
background: radial-gradient(circle, blue 30%, transparent 0);
|
||||
background-size: 80px 80px;
|
||||
}
|
||||
</style>
|
||||
<body>
|
||||
<div id="scrollbox"
|
||||
reftest-dislayport-x="0" reftest-displayport-y="0"
|
||||
reftest-displayport-w="800" reftest-displayport-h="2000"
|
||||
reftest-async-scroll-x="0" reftest-async-scroll-y="20">
|
||||
<div id="scrolledContents"></div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
31
layout/reftests/async-scrolling/position-fixed-body.html
Normal file
31
layout/reftests/async-scrolling/position-fixed-body.html
Normal file
@ -0,0 +1,31 @@
|
||||
<!DOCTYPE html>
|
||||
<html reftest-async-scroll>
|
||||
<style>
|
||||
body {
|
||||
position: fixed;
|
||||
margin: 0px;
|
||||
top: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
}
|
||||
#scrollbox {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
#scrolledContents {
|
||||
height: 10000px;
|
||||
background: radial-gradient(circle, blue 30%, transparent 0);
|
||||
background-size: 80px 80px;
|
||||
}
|
||||
</style>
|
||||
<body>
|
||||
<div id="scrollbox"
|
||||
reftest-dislayport-x="0" reftest-displayport-y="0"
|
||||
reftest-displayport-w="800" reftest-displayport-h="2000"
|
||||
reftest-async-scroll-x="0" reftest-async-scroll-y="20">
|
||||
<div id="scrolledContents"></div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
@ -15,6 +15,7 @@ skip-if(!asyncPan) == nested-1.html nested-1-ref.html
|
||||
skip-if(!asyncPan) == nested-2.html nested-2-ref.html
|
||||
skip-if(!asyncPan) == position-fixed-1.html position-fixed-1-ref.html
|
||||
skip-if(!asyncPan) == position-fixed-2.html position-fixed-2-ref.html
|
||||
skip-if(!asyncPan) == position-fixed-body.html position-fixed-body-ref.html
|
||||
skip-if(!asyncPan) == position-fixed-cover-1.html position-fixed-cover-1-ref.html
|
||||
skip-if(!asyncPan) == position-fixed-cover-2.html position-fixed-cover-2-ref.html
|
||||
skip-if(!asyncPan) == position-fixed-cover-3.html position-fixed-cover-3-ref.html
|
||||
|
@ -355,6 +355,41 @@ struct IsArithmetic
|
||||
: IntegralConstant<bool, IsIntegral<T>::value || IsFloatingPoint<T>::value>
|
||||
{};
|
||||
|
||||
namespace detail {
|
||||
|
||||
template<typename T>
|
||||
struct IsMemberPointerHelper : FalseType {};
|
||||
|
||||
template<typename T, typename U>
|
||||
struct IsMemberPointerHelper<T U::*> : TrueType {};
|
||||
|
||||
} // namespace detail
|
||||
|
||||
/**
|
||||
* IsMemberPointer determines whether a type is pointer to non-static member
|
||||
* object or a pointer to non-static member function.
|
||||
*
|
||||
* mozilla::IsMemberPointer<int(cls::*)>::value is true
|
||||
* mozilla::IsMemberPointer<int*>::value is false
|
||||
*/
|
||||
template<typename T>
|
||||
struct IsMemberPointer
|
||||
: detail::IsMemberPointerHelper<typename RemoveCV<T>::Type>
|
||||
{};
|
||||
|
||||
/**
|
||||
* IsScalar determines whether a type is a scalar type.
|
||||
*
|
||||
* mozilla::IsScalar<int>::value is true
|
||||
* mozilla::IsScalar<int*>::value is true
|
||||
* mozilla::IsScalar<cls>::value is false
|
||||
*/
|
||||
template<typename T>
|
||||
struct IsScalar
|
||||
: IntegralConstant<bool, IsArithmetic<T>::value || IsEnum<T>::value ||
|
||||
IsPointer<T>::value || IsMemberPointer<T>::value>
|
||||
{};
|
||||
|
||||
/* 20.9.4.3 Type properties [meta.unary.prop] */
|
||||
|
||||
/**
|
||||
|
@ -7,6 +7,12 @@
|
||||
#include "mozilla/Assertions.h"
|
||||
#include "mozilla/TypeTraits.h"
|
||||
|
||||
#define TEST_CV_QUALIFIERS(test, type, ...) \
|
||||
test(type, __VA_ARGS__) \
|
||||
test(const type, __VA_ARGS__) \
|
||||
test(volatile type, __VA_ARGS__) \
|
||||
test(const volatile type, __VA_ARGS__)
|
||||
|
||||
using mozilla::AddLvalueReference;
|
||||
using mozilla::AddPointer;
|
||||
using mozilla::AddRvalueReference;
|
||||
@ -137,6 +143,77 @@ static_assert(IsReference<int&>::value,
|
||||
static_assert(IsReference<int&&>::value,
|
||||
"int&& is a reference");
|
||||
|
||||
namespace CPlusPlus11IsMemberPointer {
|
||||
|
||||
using mozilla::IsMemberPointer;
|
||||
|
||||
struct S {};
|
||||
union U {};
|
||||
|
||||
#define ASSERT_IS_MEMBER_POINTER(type, msg) \
|
||||
static_assert(IsMemberPointer<type>::value, #type msg);
|
||||
#define TEST_IS_MEMBER_POINTER(type) \
|
||||
TEST_CV_QUALIFIERS(ASSERT_IS_MEMBER_POINTER, type, \
|
||||
" is a member pointer type")
|
||||
|
||||
TEST_IS_MEMBER_POINTER(int S::*)
|
||||
TEST_IS_MEMBER_POINTER(int U::*)
|
||||
|
||||
#undef TEST_IS_MEMBER_POINTER
|
||||
#undef ASSERT_IS_MEMBER_POINTER
|
||||
|
||||
#define ASSERT_IS_NOT_MEMBER_POINTER(type, msg) \
|
||||
static_assert(!IsMemberPointer<type>::value, #type msg);
|
||||
#define TEST_IS_NOT_MEMBER_POINTER(type) \
|
||||
TEST_CV_QUALIFIERS(ASSERT_IS_NOT_MEMBER_POINTER, type, \
|
||||
" is not a member pointer type")
|
||||
|
||||
TEST_IS_NOT_MEMBER_POINTER(int*)
|
||||
|
||||
#undef TEST_IS_NOT_MEMBER_POINTER
|
||||
#undef ASSERT_IS_NOT_MEMBER_POINTER
|
||||
|
||||
} // CPlusPlus11IsMemberPointer
|
||||
|
||||
namespace CPlusPlus11IsScalar {
|
||||
|
||||
using mozilla::IsScalar;
|
||||
|
||||
enum E {};
|
||||
enum class EC {};
|
||||
class C {};
|
||||
struct S {};
|
||||
union U {};
|
||||
|
||||
#define ASSERT_IS_SCALAR(type, msg) \
|
||||
static_assert(IsScalar<type>::value, #type msg);
|
||||
#define TEST_IS_SCALAR(type) \
|
||||
TEST_CV_QUALIFIERS(ASSERT_IS_SCALAR, type, " is a scalar type")
|
||||
|
||||
TEST_IS_SCALAR(int)
|
||||
TEST_IS_SCALAR(float)
|
||||
TEST_IS_SCALAR(E)
|
||||
TEST_IS_SCALAR(EC)
|
||||
TEST_IS_SCALAR(S*)
|
||||
TEST_IS_SCALAR(int S::*)
|
||||
|
||||
#undef TEST_IS_SCALAR
|
||||
#undef ASSERT_IS_SCALAR
|
||||
|
||||
#define ASSERT_IS_NOT_SCALAR(type, msg) \
|
||||
static_assert(!IsScalar<type>::value, #type msg);
|
||||
#define TEST_IS_NOT_SCALAR(type) \
|
||||
TEST_CV_QUALIFIERS(ASSERT_IS_NOT_SCALAR, type, " is not a scalar type")
|
||||
|
||||
TEST_IS_NOT_SCALAR(C)
|
||||
TEST_IS_NOT_SCALAR(S)
|
||||
TEST_IS_NOT_SCALAR(U)
|
||||
|
||||
#undef TEST_IS_NOT_SCALAR
|
||||
#undef ASSERT_IS_NOT_SCALAR
|
||||
|
||||
} // CPlusPlus11IsScalar
|
||||
|
||||
struct S1 {};
|
||||
union U1 { int mX; };
|
||||
|
||||
|
@ -600,7 +600,11 @@ pref("apz.pan_repaint_interval", 16);
|
||||
// Whether to print the APZC tree for debugging
|
||||
pref("apz.printtree", false);
|
||||
|
||||
#ifdef NIGHTLY_BUILD
|
||||
pref("apz.record_checkerboarding", true);
|
||||
#else
|
||||
pref("apz.record_checkerboarding", false);
|
||||
#endif
|
||||
pref("apz.smooth_scroll_repaint_interval", 16);
|
||||
pref("apz.test.logging_enabled", false);
|
||||
pref("apz.touch_start_tolerance", "0.1");
|
||||
@ -4626,8 +4630,9 @@ pref("dom.mozPermissionSettings.enabled", false);
|
||||
|
||||
// W3C touch events
|
||||
// 0 - disabled, 1 - enabled, 2 - autodetect
|
||||
// Enabling it for Windows is tracked by bug 736048.
|
||||
#if defined(XP_WIN) || defined(XP_MACOSX)
|
||||
#if defined(XP_MACOSX)
|
||||
pref("dom.w3c_touch_events.enabled", 0);
|
||||
#elif defined(XP_WIN) && !defined(NIGHTLY_BUILD)
|
||||
pref("dom.w3c_touch_events.enabled", 0);
|
||||
#else
|
||||
pref("dom.w3c_touch_events.enabled", 2);
|
||||
|
@ -18,7 +18,6 @@
|
||||
#include "nsServiceManagerUtils.h"
|
||||
#include "nsNotifyAddrListener_Linux.h"
|
||||
#include "nsString.h"
|
||||
#include "nsAutoPtr.h"
|
||||
#include "mozilla/Logging.h"
|
||||
|
||||
#include "mozilla/Services.h"
|
||||
@ -158,9 +157,6 @@ void nsNotifyAddrListener::OnNetlinkMessage(int aNetlinkSocket)
|
||||
int attr_len;
|
||||
const struct ifaddrmsg* newifam;
|
||||
|
||||
// inspired by check_pf.c.
|
||||
nsAutoPtr<char> addr;
|
||||
nsAutoPtr<char> localaddr;
|
||||
|
||||
ssize_t rc = EINTR_RETRY(recv(aNetlinkSocket, buffer, sizeof(buffer), 0));
|
||||
if (rc < 0) {
|
||||
@ -174,6 +170,10 @@ void nsNotifyAddrListener::OnNetlinkMessage(int aNetlinkSocket)
|
||||
|
||||
for (; NLMSG_OK(nlh, netlink_bytes);
|
||||
nlh = NLMSG_NEXT(nlh, netlink_bytes)) {
|
||||
char prefixaddr[INET6_ADDRSTRLEN];
|
||||
char localaddr[INET6_ADDRSTRLEN];
|
||||
char* addr = nullptr;
|
||||
prefixaddr[0] = localaddr[0] = '\0';
|
||||
|
||||
if (NLMSG_DONE == nlh->nlmsg_type) {
|
||||
break;
|
||||
@ -194,34 +194,31 @@ void nsNotifyAddrListener::OnNetlinkMessage(int aNetlinkSocket)
|
||||
if (attr->rta_type == IFA_ADDRESS) {
|
||||
if (newifam->ifa_family == AF_INET) {
|
||||
struct in_addr* in = (struct in_addr*)RTA_DATA(attr);
|
||||
addr = new char[INET_ADDRSTRLEN];
|
||||
inet_ntop(AF_INET, in, addr.get(), INET_ADDRSTRLEN);
|
||||
inet_ntop(AF_INET, in, prefixaddr, INET_ADDRSTRLEN);
|
||||
} else {
|
||||
struct in6_addr* in = (struct in6_addr*)RTA_DATA(attr);
|
||||
addr = new char[INET6_ADDRSTRLEN];
|
||||
inet_ntop(AF_INET6, in, addr.get(), INET6_ADDRSTRLEN);
|
||||
inet_ntop(AF_INET6, in, prefixaddr, INET6_ADDRSTRLEN);
|
||||
}
|
||||
} else if (attr->rta_type == IFA_LOCAL) {
|
||||
if (newifam->ifa_family == AF_INET) {
|
||||
struct in_addr* in = (struct in_addr*)RTA_DATA(attr);
|
||||
localaddr = new char[INET_ADDRSTRLEN];
|
||||
inet_ntop(AF_INET, in, localaddr.get(), INET_ADDRSTRLEN);
|
||||
inet_ntop(AF_INET, in, localaddr, INET_ADDRSTRLEN);
|
||||
} else {
|
||||
struct in6_addr* in = (struct in6_addr*)RTA_DATA(attr);
|
||||
localaddr = new char[INET6_ADDRSTRLEN];
|
||||
inet_ntop(AF_INET6, in, localaddr.get(), INET6_ADDRSTRLEN);
|
||||
inet_ntop(AF_INET6, in, localaddr, INET6_ADDRSTRLEN);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (localaddr) {
|
||||
if (localaddr[0]) {
|
||||
addr = localaddr;
|
||||
}
|
||||
if (!addr) {
|
||||
} else if (prefixaddr[0]) {
|
||||
addr = prefixaddr;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
if (nlh->nlmsg_type == RTM_NEWADDR) {
|
||||
LOG(("nsNotifyAddrListener::OnNetlinkMessage: a new address "
|
||||
"- %s.", addr.get()));
|
||||
"- %s.", addr));
|
||||
struct ifaddrmsg* ifam;
|
||||
nsCString addrStr;
|
||||
addrStr.Assign(addr);
|
||||
@ -242,16 +239,12 @@ void nsNotifyAddrListener::OnNetlinkMessage(int aNetlinkSocket)
|
||||
}
|
||||
} else {
|
||||
LOG(("nsNotifyAddrListener::OnNetlinkMessage: an address "
|
||||
"has been deleted - %s.", addr.get()));
|
||||
"has been deleted - %s.", addr));
|
||||
networkChange = true;
|
||||
nsCString addrStr;
|
||||
addrStr.Assign(addr);
|
||||
mAddressInfo.Remove(addrStr);
|
||||
}
|
||||
|
||||
// clean it up.
|
||||
localaddr = nullptr;
|
||||
addr = nullptr;
|
||||
}
|
||||
|
||||
if (networkChange && mAllowChangedEvent) {
|
||||
|
@ -40,6 +40,7 @@ consumers will need to arrange this themselves.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import hashlib
|
||||
import logging
|
||||
@ -76,7 +77,6 @@ from mozregression.persist_limit import (
|
||||
PersistLimit,
|
||||
)
|
||||
|
||||
MAX_CACHED_PARENTS = 100 # Number of parent changesets to cache candidate pushheads for.
|
||||
NUM_PUSHHEADS_TO_QUERY_PER_PARENT = 50 # Number of candidate pushheads to cache per parent changeset.
|
||||
|
||||
MAX_CACHED_TASKS = 400 # Number of pushheads to cache Task Cluster task data for.
|
||||
@ -491,52 +491,32 @@ class CacheManager(object):
|
||||
def __exit__(self, type, value, traceback):
|
||||
self.dump_cache()
|
||||
|
||||
class TreeCache(CacheManager):
|
||||
'''Map pushhead revisions to trees with tasks/artifacts known to taskcluster.'''
|
||||
|
||||
class PushHeadCache(CacheManager):
|
||||
'''Map parent hg revisions to candidate pushheads.'''
|
||||
def __init__(self, cache_dir, log=None):
|
||||
CacheManager.__init__(self, cache_dir, 'artifact_tree', MAX_CACHED_TASKS, log=log)
|
||||
|
||||
def __init__(self, hg, cache_dir, log=None):
|
||||
# It's not unusual to pull hundreds of changesets at once, and perhaps
|
||||
# |hg up| back and forth a few times.
|
||||
CacheManager.__init__(self, cache_dir, 'pushheads', MAX_CACHED_PARENTS, log=log)
|
||||
self._hg = hg
|
||||
self._index = taskcluster.Index()
|
||||
|
||||
@cachedmethod(operator.attrgetter('_cache'))
|
||||
def pushheads(self, tree, parent):
|
||||
def artifact_trees(self, rev, trees):
|
||||
# The "trees" argument is intentionally ignored. If this value
|
||||
# changes over time it means a changeset we care about has become
|
||||
# a pushhead on another tree, and our cache may no longer be
|
||||
# valid.
|
||||
rev_ns = 'buildbot.revisions.{rev}'.format(rev=rev)
|
||||
try:
|
||||
pushheads = subprocess.check_output([self._hg, 'log',
|
||||
'--template', '{node}\n',
|
||||
'-r', 'last(pushhead("{tree}") and ::"{parent}", {num})'.format(
|
||||
tree=tree, parent=parent, num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT)])
|
||||
# Filter blank lines.
|
||||
pushheads = [ pushhead for pushhead in pushheads.strip().split('\n') if pushhead ]
|
||||
if pushheads:
|
||||
return pushheads
|
||||
except subprocess.CalledProcessError as e:
|
||||
# We probably don't have the mozext extension installed.
|
||||
ret = subprocess.call([self._hg, 'showconfig', 'extensions.mozext'])
|
||||
if ret:
|
||||
raise Exception('Could not find candidate pushheads.\n\n'
|
||||
'You need to enable the "mozext" hg extension: '
|
||||
'see https://developer.mozilla.org/en-US/docs/Artifact_builds')
|
||||
raise e
|
||||
|
||||
# We probably don't have the pushlog database present locally. Check.
|
||||
tree_pushheads = subprocess.check_output([self._hg, 'log',
|
||||
'--template', '{node}\n',
|
||||
'-r', 'last(pushhead("{tree}"))'.format(tree=tree)])
|
||||
# Filter blank lines.
|
||||
tree_pushheads = [ pushhead for pushhead in tree_pushheads.strip().split('\n') if pushhead ]
|
||||
if tree_pushheads:
|
||||
# Okay, we have some pushheads but no candidates. This can happen
|
||||
# for legitimate reasons: old revisions with no upstream builds
|
||||
# remaining; or new revisions that don't have upstream builds yet.
|
||||
result = self._index.listNamespaces(rev_ns, {"limit": 10})
|
||||
except Exception:
|
||||
return []
|
||||
return [ns['name'] for ns in result['namespaces']]
|
||||
|
||||
raise Exception('Could not find any pushheads for tree "{tree}".\n\n'
|
||||
'Try running |hg pushlogsync|; '
|
||||
'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(tree=tree))
|
||||
|
||||
def print_last_item(self, args, sorted_kwargs, result):
|
||||
rev, trees = args
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'rev': rev},
|
||||
'Last fetched trees for pushhead revision {rev}')
|
||||
|
||||
class TaskCache(CacheManager):
|
||||
'''Map candidate pushheads to Task Cluster task IDs and artifact URLs.'''
|
||||
@ -671,14 +651,84 @@ class Artifacts(object):
|
||||
'Unknown job {job}')
|
||||
raise KeyError("Unknown job")
|
||||
|
||||
self._pushhead_cache = PushHeadCache(self._hg, self._cache_dir, log=self._log)
|
||||
self._task_cache = TaskCache(self._cache_dir, log=self._log)
|
||||
self._artifact_cache = ArtifactCache(self._cache_dir, log=self._log)
|
||||
self._tree_cache = TreeCache(self._cache_dir, log=self._log)
|
||||
# A "tree" according to mozext and an integration branch isn't always
|
||||
# an exact match. For example, pushhead("central") refers to pushheads
|
||||
# with artifacts under the taskcluster namespace "mozilla-central".
|
||||
self._tree_replacements = {
|
||||
'inbound': 'mozilla-inbound',
|
||||
'central': 'mozilla-central',
|
||||
}
|
||||
|
||||
|
||||
def log(self, *args, **kwargs):
|
||||
if self._log:
|
||||
self._log(*args, **kwargs)
|
||||
|
||||
def _find_pushheads(self, parent):
|
||||
# Return an ordered dict associating revisions that are pushheads with
|
||||
# trees they are known to be in (starting with the first tree they're
|
||||
# known to be in).
|
||||
|
||||
try:
|
||||
output = subprocess.check_output([
|
||||
self._hg, 'log',
|
||||
'--template', '{node},{join(trees, ",")}\n',
|
||||
'-r', 'last(pushhead({tree}) and ::{parent}, {num})'.format(
|
||||
tree=self._tree or '', parent=parent, num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT)
|
||||
])
|
||||
except subprocess.CalledProcessError:
|
||||
# We probably don't have the mozext extension installed.
|
||||
ret = subprocess.call([self._hg, 'showconfig', 'extensions.mozext'])
|
||||
if ret:
|
||||
raise Exception('Could not find pushheads for recent revisions.\n\n'
|
||||
'You need to enable the "mozext" hg extension: '
|
||||
'see https://developer.mozilla.org/en-US/docs/Artifact_builds')
|
||||
raise
|
||||
|
||||
rev_trees = collections.OrderedDict()
|
||||
for line in output.splitlines():
|
||||
if not line:
|
||||
continue
|
||||
rev_info = line.split(',')
|
||||
if len(rev_info) == 1:
|
||||
# If pushhead() is true, it would seem "trees" should be
|
||||
# non-empty, but this is defensive.
|
||||
continue
|
||||
rev_trees[rev_info[0]] = tuple(rev_info[1:])
|
||||
|
||||
if not rev_trees:
|
||||
raise Exception('Could not find any candidate pushheads in the last {num} revisions.\n\n'
|
||||
'Try running |hg pushlogsync|;\n'
|
||||
'see https://developer.mozilla.org/en-US/docs/Artifact_builds'.format(
|
||||
num=NUM_PUSHHEADS_TO_QUERY_PER_PARENT))
|
||||
|
||||
return rev_trees
|
||||
|
||||
def find_pushhead_artifacts(self, task_cache, tree_cache, job, pushhead, trees):
|
||||
known_trees = set(tree_cache.artifact_trees(pushhead, trees))
|
||||
if not known_trees:
|
||||
return None
|
||||
# If we ever find a rev that's a pushhead on multiple trees, we want
|
||||
# the most recent one.
|
||||
for tree in reversed(trees):
|
||||
tree = self._tree_replacements.get(tree) or tree
|
||||
if tree not in known_trees:
|
||||
continue
|
||||
try:
|
||||
urls = task_cache.artifact_urls(tree, job, pushhead)
|
||||
except ValueError:
|
||||
continue
|
||||
if urls:
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'pushhead': pushhead,
|
||||
'tree': tree},
|
||||
'Installing from remote pushhead {pushhead} on {tree}')
|
||||
return urls
|
||||
return None
|
||||
|
||||
def install_from_file(self, filename, distdir, install_callback=None):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'filename': filename},
|
||||
@ -734,35 +784,22 @@ class Artifacts(object):
|
||||
def install_from_hg(self, revset, distdir, install_callback=None):
|
||||
if not revset:
|
||||
revset = '.'
|
||||
if len(revset) != 40:
|
||||
revset = subprocess.check_output([self._hg, 'log', '--template', '{node}\n', '-r', revset]).strip()
|
||||
if len(revset.split('\n')) != 1:
|
||||
raise ValueError('hg revision specification must resolve to exactly one commit')
|
||||
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'revset': revset},
|
||||
'Installing from local revision {revset}')
|
||||
|
||||
rev_pushheads = self._find_pushheads(revset)
|
||||
urls = None
|
||||
with self._task_cache as task_cache, self._pushhead_cache as pushhead_cache:
|
||||
# with blocks handle handle persistence.
|
||||
for pushhead in pushhead_cache.pushheads(self._tree, revset):
|
||||
# with blocks handle handle persistence.
|
||||
with self._task_cache as task_cache, self._tree_cache as tree_cache:
|
||||
while rev_pushheads:
|
||||
rev, trees = rev_pushheads.popitem(last=False)
|
||||
self.log(logging.DEBUG, 'artifact',
|
||||
{'pushhead': pushhead},
|
||||
'Trying to find artifacts for pushhead {pushhead}.')
|
||||
try:
|
||||
urls = task_cache.artifact_urls(self._tree, self._job, pushhead)
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{'pushhead': pushhead},
|
||||
'Installing from remote pushhead {pushhead}')
|
||||
break
|
||||
except ValueError:
|
||||
pass
|
||||
if urls:
|
||||
for url in urls:
|
||||
if self.install_from_url(url, distdir, install_callback=install_callback):
|
||||
return 1
|
||||
return 0
|
||||
{'rev': rev},
|
||||
'Trying to find artifacts for pushhead {rev}.')
|
||||
urls = self.find_pushhead_artifacts(task_cache, tree_cache,
|
||||
self._job, rev, trees)
|
||||
if urls:
|
||||
for url in urls:
|
||||
if self.install_from_url(url, distdir, install_callback=install_callback):
|
||||
return 1
|
||||
return 0
|
||||
self.log(logging.ERROR, 'artifact',
|
||||
{'revset': revset},
|
||||
'No built artifacts for {revset} found.')
|
||||
@ -804,6 +841,6 @@ class Artifacts(object):
|
||||
self.log(logging.INFO, 'artifact',
|
||||
{},
|
||||
'Printing cached artifacts and caches.')
|
||||
self._pushhead_cache.print_cache()
|
||||
self._tree_cache.print_cache()
|
||||
self._task_cache.print_cache()
|
||||
self._artifact_cache.print_cache()
|
||||
|
@ -1474,23 +1474,21 @@ class PackageFrontend(MachCommandBase):
|
||||
artifacts = Artifacts(tree, job, log=self.log, cache_dir=cache_dir, hg=hg)
|
||||
return artifacts
|
||||
|
||||
def _compute_defaults(self, tree=None, job=None):
|
||||
# Firefox front-end developers mostly use fx-team. Post auto-land, make this central.
|
||||
tree = tree or 'fx-team'
|
||||
def _compute_platform(self, job=None):
|
||||
if job:
|
||||
return (tree, job)
|
||||
return job
|
||||
if self.substs.get('MOZ_BUILD_APP', '') == 'mobile/android':
|
||||
if self.substs['ANDROID_CPU_ARCH'] == 'x86':
|
||||
return tree, 'android-x86'
|
||||
return tree, 'android-api-11'
|
||||
return 'android-x86'
|
||||
return 'android-api-11'
|
||||
# TODO: check for 32/64 bit builds. We'd like to use HAVE_64BIT_BUILD
|
||||
# but that relies on the compile environment.
|
||||
if self.defines.get('XP_LINUX', False):
|
||||
return tree, 'linux64'
|
||||
return 'linux64'
|
||||
if self.defines.get('XP_MACOSX', False):
|
||||
return tree, 'macosx64'
|
||||
return 'macosx64'
|
||||
if self.defines.get('XP_WIN', False):
|
||||
return tree, 'win32'
|
||||
return 'win32'
|
||||
raise Exception('Cannot determine default tree and job for |mach artifact|!')
|
||||
|
||||
@ArtifactSubCommand('artifact', 'install',
|
||||
@ -1502,7 +1500,7 @@ class PackageFrontend(MachCommandBase):
|
||||
default=None)
|
||||
def artifact_install(self, source=None, tree=None, job=None, verbose=False):
|
||||
self._set_log_level(verbose)
|
||||
tree, job = self._compute_defaults(tree, job)
|
||||
job = self._compute_platform(job)
|
||||
artifacts = self._make_artifacts(tree=tree, job=job)
|
||||
|
||||
manifest_path = mozpath.join(self.topobjdir, '_build_manifests', 'install', 'dist_bin')
|
||||
@ -1528,7 +1526,7 @@ class PackageFrontend(MachCommandBase):
|
||||
'Print the last pre-built artifact installed.')
|
||||
def artifact_print_last(self, tree=None, job=None, verbose=False):
|
||||
self._set_log_level(verbose)
|
||||
tree, job = self._compute_defaults(tree, job)
|
||||
job = self._compute_platform(job)
|
||||
artifacts = self._make_artifacts(tree=tree, job=job)
|
||||
artifacts.print_last()
|
||||
return 0
|
||||
@ -1537,7 +1535,7 @@ class PackageFrontend(MachCommandBase):
|
||||
'Print local artifact cache for debugging.')
|
||||
def artifact_print_cache(self, tree=None, job=None, verbose=False):
|
||||
self._set_log_level(verbose)
|
||||
tree, job = self._compute_defaults(tree, job)
|
||||
job = self._compute_platform(job)
|
||||
artifacts = self._make_artifacts(tree=tree, job=job)
|
||||
artifacts.print_cache()
|
||||
return 0
|
||||
@ -1546,7 +1544,7 @@ class PackageFrontend(MachCommandBase):
|
||||
'Delete local artifacts and reset local artifact cache.')
|
||||
def artifact_clear_cache(self, tree=None, job=None, verbose=False):
|
||||
self._set_log_level(verbose)
|
||||
tree, job = self._compute_defaults(tree, job)
|
||||
job = self._compute_platform(job)
|
||||
artifacts = self._make_artifacts(tree=tree, job=job)
|
||||
artifacts.clear_cache()
|
||||
return 0
|
||||
|
3
testing/config/external-media-tests-requirements.txt
Normal file
3
testing/config/external-media-tests-requirements.txt
Normal file
@ -0,0 +1,3 @@
|
||||
-r marionette_requirements.txt
|
||||
../external-media-tests/
|
||||
../puppeteer/firefox/
|
@ -223,6 +223,9 @@ GeckoDriver.prototype.sendAsync = function(name, msg, cmdId) {
|
||||
let curRemoteFrame = this.curBrowser.frameManager.currentRemoteFrame;
|
||||
name = "Marionette:" + name;
|
||||
|
||||
// TODO(ato): When proxy.AsyncMessageChannel
|
||||
// is used for all chrome <-> content communication
|
||||
// this can be removed.
|
||||
if (cmdId) {
|
||||
msg.command_id = cmdId;
|
||||
}
|
||||
@ -242,8 +245,8 @@ GeckoDriver.prototype.sendAsync = function(name, msg, cmdId) {
|
||||
this.mm.sendAsyncMessage(name + remoteFrameId, msg);
|
||||
} catch (e) {
|
||||
switch(e.result) {
|
||||
case Components.results.NS_ERROR_FAILURE:
|
||||
case Components.results.NS_ERROR_NOT_INITIALIZED:
|
||||
case Cr.NS_ERROR_FAILURE:
|
||||
case Cr.NS_ERROR_NOT_INITIALIZED:
|
||||
throw new NoSuchWindowError();
|
||||
default:
|
||||
throw new WebDriverError(e.toString());
|
||||
|
@ -1,2 +1,2 @@
|
||||
marionette-transport == 1.1.0
|
||||
marionette-transport == 1.2.0
|
||||
mozrunner >= 6.9
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
"use strict";
|
||||
|
||||
var {interfaces: Ci, utils: Cu} = Components;
|
||||
const {interfaces: Ci, utils: Cu} = Components;
|
||||
|
||||
const errors = [
|
||||
"ElementNotAccessibleError",
|
||||
@ -90,15 +90,21 @@ error.stringify = function(err) {
|
||||
};
|
||||
|
||||
/**
|
||||
* Marshal an Error to a JSON structure.
|
||||
* Marshal a WebDriverError prototype to a JSON dictionary.
|
||||
*
|
||||
* @param {Error} err
|
||||
* The Error to serialise.
|
||||
* @param {WebDriverError} err
|
||||
* Error to serialise.
|
||||
*
|
||||
* @return {Object.<string, Object>}
|
||||
* JSON structure with the keys "error", "message", and "stacktrace".
|
||||
* JSON dictionary with the keys "error", "message", and "stacktrace".
|
||||
* @throws {TypeError}
|
||||
* If error type is not serialisable.
|
||||
*/
|
||||
error.toJson = function(err) {
|
||||
if (!error.isWebDriverError(err)) {
|
||||
throw new TypeError(`Unserialisable error type: ${err}`);
|
||||
}
|
||||
|
||||
let json = {
|
||||
error: err.status,
|
||||
message: err.message || null,
|
||||
@ -107,6 +113,28 @@ error.toJson = function(err) {
|
||||
return json;
|
||||
};
|
||||
|
||||
/**
|
||||
* Unmarshal a JSON dictionary to a WebDriverError prototype.
|
||||
*
|
||||
* @param {Object.<string, string>} json
|
||||
* JSON dictionary with the keys "error", "message", and "stacktrace".
|
||||
*
|
||||
* @return {WebDriverError}
|
||||
* Deserialised error prototype.
|
||||
*/
|
||||
error.fromJson = function(json) {
|
||||
if (!statusLookup.has(json.error)) {
|
||||
throw new TypeError(`Undeserialisable error type: ${json.error}`);
|
||||
}
|
||||
|
||||
let errCls = statusLookup.get(json.error);
|
||||
let err = new errCls(json.message);
|
||||
if ("stacktrace" in json) {
|
||||
err.stack = json.stacktrace;
|
||||
}
|
||||
return err;
|
||||
};
|
||||
|
||||
/**
|
||||
* WebDriverError is the prototypal parent of all WebDriver errors.
|
||||
* It should not be used directly, as it does not correspond to a real
|
||||
@ -297,3 +325,12 @@ this.UnsupportedOperationError = function(msg) {
|
||||
this.status = "unsupported operation";
|
||||
};
|
||||
UnsupportedOperationError.prototype = Object.create(WebDriverError.prototype);
|
||||
|
||||
const nameLookup = new Map();
|
||||
const statusLookup = new Map();
|
||||
for (let s of errors) {
|
||||
let cls = this[s];
|
||||
let inst = new cls();
|
||||
nameLookup.set(inst.name, cls);
|
||||
statusLookup.set(inst.status, cls);
|
||||
};
|
||||
|
@ -184,7 +184,7 @@ function dispatch(fn) {
|
||||
if (typeof rv == "undefined") {
|
||||
sendOk(id);
|
||||
} else {
|
||||
sendResponse({value: rv}, id);
|
||||
sendResponse(rv, id);
|
||||
}
|
||||
};
|
||||
|
||||
@ -398,42 +398,56 @@ function deleteSession(msg) {
|
||||
actions.touchIds = {};
|
||||
}
|
||||
|
||||
/*
|
||||
* Helper methods
|
||||
*/
|
||||
|
||||
/**
|
||||
* Generic method to send a message to the server
|
||||
* Send asynchronous reply to chrome.
|
||||
*
|
||||
* @param {UUID} uuid
|
||||
* Unique identifier of the request.
|
||||
* @param {AsyncContentSender.ResponseType} type
|
||||
* Type of response.
|
||||
* @param {?=} data
|
||||
* JSON serialisable object to accompany the message. Defaults to
|
||||
* an empty dictionary.
|
||||
*/
|
||||
function sendToServer(path, data = {}, objs, id) {
|
||||
if (id) {
|
||||
data.command_id = id;
|
||||
}
|
||||
sendAsyncMessage(path, data, objs);
|
||||
function sendToServer(uuid, data = undefined) {
|
||||
let channel = new proxy.AsyncMessageChannel(
|
||||
() => this,
|
||||
sendAsyncMessage.bind(this));
|
||||
channel.reply(uuid, data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send response back to server
|
||||
* Send asynchronous reply with value to chrome.
|
||||
*
|
||||
* @param {?} obj
|
||||
* JSON serialisable object of arbitrary type and complexity.
|
||||
* @param {UUID} uuid
|
||||
* Unique identifier of the request.
|
||||
*/
|
||||
function sendResponse(value, id) {
|
||||
let path = proxy.AsyncContentSender.makeReplyPath(id);
|
||||
sendToServer(path, value, null, id);
|
||||
function sendResponse(obj, id) {
|
||||
sendToServer(id, obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send ack back to server
|
||||
* Send asynchronous reply to chrome.
|
||||
*
|
||||
* @param {UUID} uuid
|
||||
* Unique identifier of the request.
|
||||
*/
|
||||
function sendOk(id) {
|
||||
let path = proxy.AsyncContentSender.makeReplyPath(id);
|
||||
sendToServer(path, {}, null, id);
|
||||
function sendOk(uuid) {
|
||||
sendToServer(uuid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send error message to server
|
||||
* Send asynchronous error reply to chrome.
|
||||
*
|
||||
* @param {Error} err
|
||||
* Error to notify chrome of.
|
||||
* @param {UUID} uuid
|
||||
* Unique identifier of the request.
|
||||
*/
|
||||
function sendError(err, id) {
|
||||
let path = proxy.AsyncContentSender.makeReplyPath(id);
|
||||
sendToServer(path, {error: null}, {error: err}, id);
|
||||
function sendError(err, uuid) {
|
||||
sendToServer(uuid, err);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -549,7 +563,7 @@ function createExecuteContentSandbox(win, timeout) {
|
||||
_emu_cbs = {};
|
||||
sendError(new WebDriverError("Emulator callback still pending when finish() called"), id);
|
||||
} else {
|
||||
sendResponse({value: elementManager.wrapValue(obj)}, id);
|
||||
sendResponse(elementManager.wrapValue(obj), id);
|
||||
}
|
||||
}
|
||||
|
||||
@ -631,7 +645,7 @@ function executeScript(msg, directInject) {
|
||||
sendError(new JavaScriptError("Marionette.finish() not called"), asyncTestCommandId);
|
||||
}
|
||||
else {
|
||||
sendResponse({value: elementManager.wrapValue(res)}, asyncTestCommandId);
|
||||
sendResponse(elementManager.wrapValue(res), asyncTestCommandId);
|
||||
}
|
||||
}
|
||||
else {
|
||||
@ -657,7 +671,7 @@ function executeScript(msg, directInject) {
|
||||
sendSyncMessage("Marionette:shareData",
|
||||
{log: elementManager.wrapValue(marionetteLogObj.getLogs())});
|
||||
marionetteLogObj.clearLogs();
|
||||
sendResponse({value: elementManager.wrapValue(res)}, asyncTestCommandId);
|
||||
sendResponse(elementManager.wrapValue(res), asyncTestCommandId);
|
||||
}
|
||||
} catch (e) {
|
||||
let err = new JavaScriptError(
|
||||
@ -1713,7 +1727,7 @@ function switchToFrame(msg) {
|
||||
checkTimer.initWithCallback(checkLoad, 100, Ci.nsITimer.TYPE_ONE_SHOT);
|
||||
}
|
||||
|
||||
sendResponse({value: rv}, command_id);
|
||||
sendResponse(rv, command_id);
|
||||
}
|
||||
|
||||
function addCookie(cookie) {
|
||||
@ -1766,8 +1780,8 @@ function deleteAllCookies() {
|
||||
}
|
||||
|
||||
function getAppCacheStatus(msg) {
|
||||
sendResponse({ value: curContainer.frame.applicationCache.status },
|
||||
msg.json.command_id);
|
||||
sendResponse(
|
||||
curContainer.frame.applicationCache.status, msg.json.command_id);
|
||||
}
|
||||
|
||||
// emulator callbacks
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
|
||||
|
||||
Cu.import("chrome://marionette/content/error.js");
|
||||
Cu.import("chrome://marionette/content/modal.js");
|
||||
|
||||
this.EXPORTED_SYMBOLS = ["proxy"];
|
||||
@ -44,20 +45,19 @@ this.proxy = {};
|
||||
* Callback for sending async messages.
|
||||
*/
|
||||
proxy.toListener = function(mmFn, sendAsyncFn) {
|
||||
let sender = new proxy.AsyncContentSender(mmFn, sendAsyncFn);
|
||||
let sender = new proxy.AsyncMessageChannel(mmFn, sendAsyncFn);
|
||||
return new Proxy(sender, ownPriorityGetterTrap);
|
||||
};
|
||||
|
||||
/**
|
||||
* With the AsyncContentSender it is possible to make asynchronous calls
|
||||
* to the message listener in a frame script.
|
||||
* Provides a transparent interface between chrome- and content space.
|
||||
*
|
||||
* The responses from content are expected to be JSON Objects, where an
|
||||
* {@code error} key indicates that an error occured, and a {@code value}
|
||||
* entry that the operation was successful. It is the value of the
|
||||
* {@code value} key that is returned to the consumer through a promise.
|
||||
* The AsyncMessageChannel is an abstraction of the message manager
|
||||
* IPC architecture allowing calls to be made to any registered message
|
||||
* listener in Marionette. The {@code #send(...)} method returns a promise
|
||||
* that gets resolved when the message handler calls {@code .reply(...)}.
|
||||
*/
|
||||
proxy.AsyncContentSender = class {
|
||||
proxy.AsyncMessageChannel = class {
|
||||
constructor(mmFn, sendAsyncFn) {
|
||||
this.sendAsync = sendAsyncFn;
|
||||
// TODO(ato): Bug 1242595
|
||||
@ -73,8 +73,14 @@ proxy.AsyncContentSender = class {
|
||||
}
|
||||
|
||||
/**
|
||||
* Call registered function in the frame script environment of the
|
||||
* current browsing context's content frame.
|
||||
* Send a message across the channel. The name of the function to
|
||||
* call must be registered as a message listener.
|
||||
*
|
||||
* Usage:
|
||||
*
|
||||
* let channel = new AsyncMessageChannel(
|
||||
* messageManager, sendAsyncMessage.bind(this));
|
||||
* let rv = yield channel.send("remoteFunction", ["argument"]);
|
||||
*
|
||||
* @param {string} name
|
||||
* Function to call in the listener, e.g. for the message listener
|
||||
@ -86,6 +92,10 @@ proxy.AsyncContentSender = class {
|
||||
*
|
||||
* @return {Promise}
|
||||
* A promise that resolves to the result of the command.
|
||||
* @throws {TypeError}
|
||||
* If an unsupported reply type is received.
|
||||
* @throws {WebDriverError}
|
||||
* If an error is returned over the channel.
|
||||
*/
|
||||
send(name, args = []) {
|
||||
let uuid = uuidgen.generateUUID().toString();
|
||||
@ -93,15 +103,27 @@ proxy.AsyncContentSender = class {
|
||||
this.activeMessageId = uuid;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let path = proxy.AsyncContentSender.makeReplyPath(uuid);
|
||||
let path = proxy.AsyncMessageChannel.makePath(uuid);
|
||||
let cb = msg => {
|
||||
this.activeMessageId = null;
|
||||
if ("error" in msg.json) {
|
||||
reject(msg.objects.error);
|
||||
} else {
|
||||
resolve(msg.json.value);
|
||||
|
||||
switch (msg.json.type) {
|
||||
case proxy.AsyncMessageChannel.ReplyType.Ok:
|
||||
case proxy.AsyncMessageChannel.ReplyType.Value:
|
||||
resolve(msg.json.data);
|
||||
break;
|
||||
|
||||
case proxy.AsyncMessageChannel.ReplyType.Error:
|
||||
let err = error.fromJson(msg.json.data);
|
||||
reject(err);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new TypeError(
|
||||
`Unknown async response type: ${msg.json.type}`);
|
||||
}
|
||||
};
|
||||
|
||||
this.dialogueObserver_ = (subject, topic) => {
|
||||
this.cancelAll();
|
||||
resolve();
|
||||
@ -112,13 +134,80 @@ proxy.AsyncContentSender = class {
|
||||
this.addListener_(path, cb);
|
||||
modal.addHandler(this.dialogueObserver_);
|
||||
|
||||
// sendAsync is GeckoDriver#sendAsync
|
||||
this.sendAsync(name, marshal(args), uuid);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Reply to an asynchronous request.
|
||||
*
|
||||
* Passing an WebDriverError prototype will cause the receiving channel
|
||||
* to throw this error.
|
||||
*
|
||||
* Usage:
|
||||
*
|
||||
* let channel = proxy.AsyncMessageChannel(
|
||||
* messageManager, sendAsyncMessage.bind(this));
|
||||
*
|
||||
* // throws in requester:
|
||||
* channel.reply(uuid, new WebDriverError());
|
||||
*
|
||||
* // returns with value:
|
||||
* channel.reply(uuid, "hello world!");
|
||||
*
|
||||
* // returns with undefined:
|
||||
* channel.reply(uuid);
|
||||
*
|
||||
* @param {UUID} uuid
|
||||
* Unique identifier of the request.
|
||||
* @param {?=} obj
|
||||
* Message data to reply with.
|
||||
*/
|
||||
reply(uuid, obj = undefined) {
|
||||
// TODO(ato): Eventually the uuid will be hidden in the dispatcher
|
||||
// in listener, and passing it explicitly to this function will be
|
||||
// unnecessary.
|
||||
if (typeof obj == "undefined") {
|
||||
this.sendReply_(uuid, proxy.AsyncMessageChannel.ReplyType.Ok);
|
||||
} else if (error.isError(obj)) {
|
||||
let serr = error.toJson(obj);
|
||||
this.sendReply_(uuid, proxy.AsyncMessageChannel.ReplyType.Error, serr);
|
||||
} else {
|
||||
this.sendReply_(uuid, proxy.AsyncMessageChannel.ReplyType.Value, obj);
|
||||
}
|
||||
}
|
||||
|
||||
sendReply_(uuid, type, data = undefined) {
|
||||
let path = proxy.AsyncMessageChannel.makePath(uuid);
|
||||
let msg = {type: type, data: data};
|
||||
// here sendAsync is actually the content frame's
|
||||
// sendAsyncMessage(path, message) global
|
||||
this.sendAsync(path, msg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Produces a path, or a name, for the message listener handler that
|
||||
* listens for a reply.
|
||||
*
|
||||
* @param {UUID} uuid
|
||||
* Unique identifier of the channel request.
|
||||
*
|
||||
* @return {string}
|
||||
* Path to be used for nsIMessageListener.addMessageListener.
|
||||
*/
|
||||
static makePath(uuid) {
|
||||
return "Marionette:asyncReply:" + uuid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort listening for responses, remove all modal dialogue handlers,
|
||||
* and cancel any ongoing requests in the listener.
|
||||
*/
|
||||
cancelAll() {
|
||||
this.removeAllListeners_();
|
||||
modal.removeHandler(this.dialogueObserver_);
|
||||
// TODO(ato): It's not ideal to have listener specific behaviour here:
|
||||
this.sendAsync("cancelRequest");
|
||||
}
|
||||
|
||||
@ -146,10 +235,11 @@ proxy.AsyncContentSender = class {
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
static makeReplyPath(uuid) {
|
||||
return "Marionette:asyncReply:" + uuid;
|
||||
}
|
||||
};
|
||||
proxy.AsyncMessageChannel.ReplyType = {
|
||||
Ok: 0,
|
||||
Value: 1,
|
||||
Error: 2,
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -52,19 +52,40 @@ add_test(function test_stringify() {
|
||||
});
|
||||
|
||||
add_test(function test_toJson() {
|
||||
deepEqual({error: "a", message: null, stacktrace: null},
|
||||
error.toJson({status: "a"}));
|
||||
deepEqual({error: "a", message: "b", stacktrace: null},
|
||||
error.toJson({status: "a", message: "b"}));
|
||||
deepEqual({error: "a", message: "b", stacktrace: "c"},
|
||||
error.toJson({status: "a", message: "b", stack: "c"}));
|
||||
Assert.throws(() => error.toJson(new Error()),
|
||||
/Unserialisable error type: [object Error]/);
|
||||
|
||||
let e1 = new Error("b");
|
||||
deepEqual({error: undefined, message: "b", stacktrace: e1.stack},
|
||||
let e1 = new WebDriverError("a");
|
||||
deepEqual({error: e1.status, message: "a", stacktrace: null},
|
||||
error.toJson(e1));
|
||||
let e2 = new WebDriverError("b");
|
||||
deepEqual({error: e2.status, message: "b", stacktrace: null},
|
||||
error.toJson(e2));
|
||||
|
||||
let e2 = new JavaScriptError("first", "second", "third", "fourth");
|
||||
let e2s = error.toJson(e2);
|
||||
equal(e2.status, e2s.error);
|
||||
equal(e2.message, e2s.message);
|
||||
ok(e2s.stacktrace.match(/second/));
|
||||
ok(e2s.stacktrace.match(/third/));
|
||||
ok(e2s.stacktrace.match(/fourth/));
|
||||
|
||||
run_next_test();
|
||||
});
|
||||
|
||||
add_test(function test_fromJson() {
|
||||
Assert.throws(() => error.fromJson({error: "foo"}),
|
||||
/Undeserialisable error type: foo/);
|
||||
Assert.throws(() => error.fromJson({error: "Error"}),
|
||||
/Undeserialisable error type: Error/);
|
||||
Assert.throws(() => error.fromJson({}),
|
||||
/Undeserialisable error type: undefined/);
|
||||
|
||||
let e1 = new WebDriverError("1");
|
||||
deepEqual(e1, error.fromJson({error: "webdriver error", message: "1"}));
|
||||
let e2 = new InvalidArgumentError("2");
|
||||
deepEqual(e2, error.fromJson({error: "invalid argument", message: "2"}));
|
||||
|
||||
let e3 = new JavaScriptError("first", "second", "third", "fourth");
|
||||
let e3s = error.toJson(e3);
|
||||
deepEqual(e3, error.fromJson(e3s));
|
||||
|
||||
run_next_test();
|
||||
});
|
||||
|
@ -33,6 +33,7 @@ ifeq ($(OS_ARCH),WINNT)
|
||||
TEST_HARNESS_BINS += \
|
||||
crashinject$(BIN_SUFFIX) \
|
||||
crashinjectdll$(DLL_SUFFIX) \
|
||||
minidumpwriter$(BIN_SUFFIX) \
|
||||
$(NULL)
|
||||
endif
|
||||
|
||||
|
@ -57,6 +57,13 @@ from mozrunner.utils import get_stack_fixer_function, test_environment
|
||||
from mozscreenshot import dump_screen
|
||||
import mozleak
|
||||
|
||||
HAVE_PSUTIL = False
|
||||
try:
|
||||
import psutil
|
||||
HAVE_PSUTIL = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
@ -1768,7 +1775,7 @@ class MochitestDesktop(MochitestBase):
|
||||
Also attempts to obtain a screenshot before killing the process
|
||||
if specified.
|
||||
"""
|
||||
|
||||
self.log.info("Killing process: %s" % processPID)
|
||||
if dump_screen:
|
||||
self.dumpScreen(utilityPath)
|
||||
|
||||
@ -1776,7 +1783,8 @@ class MochitestDesktop(MochitestBase):
|
||||
try:
|
||||
minidump_path = os.path.join(self.profile.profile,
|
||||
'minidumps')
|
||||
mozcrash.kill_and_get_minidump(processPID, minidump_path)
|
||||
mozcrash.kill_and_get_minidump(processPID, minidump_path,
|
||||
utilityPath)
|
||||
except OSError:
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=921509
|
||||
self.log.info(
|
||||
@ -1785,6 +1793,26 @@ class MochitestDesktop(MochitestBase):
|
||||
self.log.info("Can't trigger Breakpad, just killing process")
|
||||
killPid(processPID, self.log)
|
||||
|
||||
def extract_child_pids(self, process_log, parent_pid=None):
|
||||
"""Parses the given log file for the pids of any processes launched by
|
||||
the main process and returns them as a list.
|
||||
If parent_pid is provided, and psutil is available, returns children of
|
||||
parent_pid according to psutil.
|
||||
"""
|
||||
if parent_pid and HAVE_PSUTIL:
|
||||
self.log.info("Determining child pids from psutil")
|
||||
return [p.pid for p in psutil.Process(parent_pid).children()]
|
||||
|
||||
rv = []
|
||||
pid_re = re.compile(r'==> process \d+ launched child process (\d+)')
|
||||
with open(process_log) as fd:
|
||||
for line in fd:
|
||||
self.log.info(line.rstrip())
|
||||
m = pid_re.search(line)
|
||||
if m:
|
||||
rv.append(int(m.group(1)))
|
||||
return rv
|
||||
|
||||
def checkForZombies(self, processLog, utilityPath, debuggerInfo):
|
||||
"""Look for hung processes"""
|
||||
|
||||
@ -1798,15 +1826,7 @@ class MochitestDesktop(MochitestBase):
|
||||
|
||||
# scan processLog for zombies
|
||||
self.log.info('zombiecheck | Reading PID log: %s' % processLog)
|
||||
processList = []
|
||||
pidRE = re.compile(r'launched child process (\d+)$')
|
||||
with open(processLog) as processLogFD:
|
||||
for line in processLogFD:
|
||||
self.log.info(line.rstrip())
|
||||
m = pidRE.search(line)
|
||||
if m:
|
||||
processList.append(int(m.group(1)))
|
||||
|
||||
processList = self.extract_child_pids(processLog)
|
||||
# kill zombies
|
||||
foundZombie = False
|
||||
for processPID in processList:
|
||||
@ -1943,7 +1963,8 @@ class MochitestDesktop(MochitestBase):
|
||||
proc,
|
||||
utilityPath,
|
||||
debuggerInfo,
|
||||
browserProcessId)
|
||||
browserProcessId,
|
||||
processLog)
|
||||
kp_kwargs = {'kill_on_timeout': False,
|
||||
'cwd': SCRIPT_DIR,
|
||||
'onTimeout': [timeoutHandler]}
|
||||
@ -2373,29 +2394,44 @@ class MochitestDesktop(MochitestBase):
|
||||
|
||||
return status
|
||||
|
||||
def handleTimeout(
|
||||
self,
|
||||
timeout,
|
||||
proc,
|
||||
utilityPath,
|
||||
debuggerInfo,
|
||||
browserProcessId):
|
||||
def handleTimeout(self, timeout, proc, utilityPath, debuggerInfo,
|
||||
browser_pid, processLog):
|
||||
"""handle process output timeout"""
|
||||
# TODO: bug 913975 : _processOutput should call self.processOutputLine
|
||||
# one more time one timeout (I think)
|
||||
error_message = "TEST-UNEXPECTED-TIMEOUT | %s | application timed out after %d seconds with no output" % (
|
||||
self.lastTestSeen, int(timeout))
|
||||
|
||||
self.message_logger.dump_buffered()
|
||||
self.message_logger.buffering = False
|
||||
self.log.info(error_message)
|
||||
|
||||
browserProcessId = browserProcessId or proc.pid
|
||||
self.killAndGetStack(
|
||||
browserProcessId,
|
||||
utilityPath,
|
||||
debuggerInfo,
|
||||
dump_screen=not debuggerInfo)
|
||||
browser_pid = browser_pid or proc.pid
|
||||
child_pids = self.extract_child_pids(processLog, browser_pid)
|
||||
self.log.info('Found child pids: %s' % child_pids)
|
||||
|
||||
if HAVE_PSUTIL:
|
||||
child_procs = [psutil.Process(pid) for pid in child_pids]
|
||||
for pid in child_pids:
|
||||
self.killAndGetStack(pid, utilityPath, debuggerInfo,
|
||||
dump_screen=not debuggerInfo)
|
||||
gone, alive = psutil.wait_procs(child_procs, timeout=30)
|
||||
for p in gone:
|
||||
self.log.info('psutil found pid %s dead' % p.pid)
|
||||
for p in alive:
|
||||
self.log.warning('failed to kill pid %d after 30s' %
|
||||
p.pid)
|
||||
else:
|
||||
self.log.error("psutil not available! Will wait 30s before "
|
||||
"attempting to kill parent process. This should "
|
||||
"not occur in mozilla automation. See bug 1143547.")
|
||||
for pid in child_pids:
|
||||
self.killAndGetStack(pid, utilityPath, debuggerInfo,
|
||||
dump_screen=not debuggerInfo)
|
||||
if child_pids:
|
||||
time.sleep(30)
|
||||
|
||||
self.killAndGetStack(browser_pid, utilityPath, debuggerInfo,
|
||||
dump_screen=not debuggerInfo)
|
||||
|
||||
class OutputHandler(object):
|
||||
|
||||
|
@ -364,7 +364,7 @@ if mozinfo.isWin:
|
||||
OpenProcess = kernel32.OpenProcess
|
||||
CloseHandle = kernel32.CloseHandle
|
||||
|
||||
def write_minidump(pid, dump_directory):
|
||||
def write_minidump(pid, dump_directory, utility_path):
|
||||
"""
|
||||
Write a minidump for a process.
|
||||
|
||||
@ -379,13 +379,38 @@ if mozinfo.isWin:
|
||||
FILE_ATTRIBUTE_NORMAL = 0x80
|
||||
INVALID_HANDLE_VALUE = -1
|
||||
|
||||
file_name = os.path.join(dump_directory,
|
||||
str(uuid.uuid4()) + ".dmp")
|
||||
|
||||
if (mozinfo.info['bits'] != ctypes.sizeof(ctypes.c_voidp) * 8 and
|
||||
utility_path):
|
||||
# We're not going to be able to write a minidump with ctypes if our
|
||||
# python process was compiled for a different architecture than
|
||||
# firefox, so we invoke the minidumpwriter utility program.
|
||||
|
||||
log = get_logger()
|
||||
minidumpwriter = os.path.normpath(os.path.join(utility_path,
|
||||
"minidumpwriter.exe"))
|
||||
log.info("Using %s to write a dump to %s for [%d]" %
|
||||
(minidumpwriter, file_name, pid))
|
||||
if not os.path.exists(minidumpwriter):
|
||||
log.error("minidumpwriter not found in %s" % utility_path)
|
||||
return
|
||||
|
||||
if isinstance(file_name, unicode):
|
||||
# Convert to a byte string before sending to the shell.
|
||||
file_name = file_name.encode(sys.getfilesystemencoding())
|
||||
|
||||
status = subprocess.Popen([minidumpwriter, str(pid), file_name]).wait()
|
||||
if status:
|
||||
log.error("minidumpwriter exited with status: %d" % status)
|
||||
return
|
||||
|
||||
proc_handle = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,
|
||||
0, pid)
|
||||
if not proc_handle:
|
||||
return
|
||||
|
||||
file_name = os.path.join(dump_directory,
|
||||
str(uuid.uuid4()) + ".dmp")
|
||||
if not isinstance(file_name, unicode):
|
||||
# Convert to unicode explicitly so our path will be valid as input
|
||||
# to CreateFileW
|
||||
@ -433,7 +458,7 @@ else:
|
||||
"""
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
|
||||
def kill_and_get_minidump(pid, dump_directory=None):
|
||||
def kill_and_get_minidump(pid, dump_directory, utility_path=None):
|
||||
"""
|
||||
Attempt to kill a process and leave behind a minidump describing its
|
||||
execution state.
|
||||
@ -453,7 +478,7 @@ def kill_and_get_minidump(pid, dump_directory=None):
|
||||
"""
|
||||
needs_killing = True
|
||||
if mozinfo.isWin:
|
||||
write_minidump(pid, dump_directory)
|
||||
write_minidump(pid, dump_directory, utility_path)
|
||||
elif mozinfo.isLinux or mozinfo.isMac:
|
||||
os.kill(pid, signal.SIGABRT)
|
||||
needs_killing = False
|
||||
|
@ -12,7 +12,6 @@ config = {
|
||||
'python': '/tools/buildbot/bin/python',
|
||||
'virtualenv': ['/tools/buildbot/bin/python', '/tools/misc-python/virtualenv.py'],
|
||||
'tooltool.py': "/tools/tooltool.py",
|
||||
'gittool.py': os.path.join(external_tools_path, 'gittool.py'),
|
||||
},
|
||||
|
||||
"find_links": [
|
||||
@ -26,7 +25,6 @@ config = {
|
||||
"default_actions": [
|
||||
'clobber',
|
||||
'read-buildbot-config',
|
||||
'checkout',
|
||||
'download-and-extract',
|
||||
'create-virtualenv',
|
||||
'install',
|
||||
@ -39,10 +37,6 @@ config = {
|
||||
"download_minidump_stackwalk": True,
|
||||
"download_symbols": "ondemand",
|
||||
|
||||
"firefox_media_repo": 'https://github.com/mjzffr/firefox-media-tests.git',
|
||||
"firefox_media_branch": 'master',
|
||||
"firefox_media_rev": '0830e972e4b95fef3507207fc6bce028da27f2d3',
|
||||
|
||||
"suite_definitions": {
|
||||
"media-tests": {
|
||||
"options": [],
|
||||
|
@ -17,8 +17,6 @@ config = {
|
||||
'mozinstall': ['%s/build/venv/scripts/python' % os.getcwd(),
|
||||
'%s/build/venv/scripts/mozinstall-script.py' % os.getcwd()],
|
||||
'tooltool.py': [sys.executable, 'C:/mozilla-build/tooltool.py'],
|
||||
'gittool.py': [sys.executable,
|
||||
os.path.join(external_tools_path, 'gittool.py')],
|
||||
'hgtool.py': [sys.executable,
|
||||
os.path.join(external_tools_path, 'hgtool.py')],
|
||||
|
||||
@ -36,7 +34,6 @@ config = {
|
||||
"default_actions": [
|
||||
'clobber',
|
||||
'read-buildbot-config',
|
||||
'checkout',
|
||||
'download-and-extract',
|
||||
'create-virtualenv',
|
||||
'install',
|
||||
@ -50,10 +47,6 @@ config = {
|
||||
"download_minidump_stackwalk": True,
|
||||
"download_symbols": "ondemand",
|
||||
|
||||
"firefox_media_repo": 'https://github.com/mjzffr/firefox-media-tests.git',
|
||||
"firefox_media_branch": 'master',
|
||||
"firefox_media_rev": '0830e972e4b95fef3507207fc6bce028da27f2d3',
|
||||
|
||||
"suite_definitions": {
|
||||
"media-tests": {
|
||||
"options": [],
|
||||
|
@ -26,10 +26,6 @@ config = {
|
||||
'download_symbols': 'ondemand',
|
||||
'download_tooltool': True,
|
||||
|
||||
# Version control information
|
||||
'firefox_media_repo': 'https://github.com/mjzffr/firefox-media-tests.git',
|
||||
'firefox_media_branch': 'master',
|
||||
|
||||
# Default test suite
|
||||
'test_suite': 'media-tests',
|
||||
|
||||
@ -46,7 +42,6 @@ config = {
|
||||
|
||||
'default_actions': [
|
||||
'clobber',
|
||||
'checkout',
|
||||
'download-and-extract',
|
||||
'create-virtualenv',
|
||||
'install',
|
||||
@ -55,18 +50,3 @@ config = {
|
||||
|
||||
}
|
||||
|
||||
# General local variable overwrite
|
||||
# Bug 1227079 - Python executable eeded to get it executed on Windows
|
||||
if platform.system() == 'windows':
|
||||
gittool = [
|
||||
sys.executable,
|
||||
os.path.join(external_tools_path, 'gittool.py')
|
||||
]
|
||||
else:
|
||||
gittool = os.path.join(external_tools_path, 'gittool.py')
|
||||
|
||||
exes = {
|
||||
'gittool.py' : gittool,
|
||||
}
|
||||
|
||||
config['exes'] = exes
|
||||
|
@ -57,20 +57,6 @@ media_test_config_options = [
|
||||
"default": False,
|
||||
"help": "Enable e10s when running marionette tests."
|
||||
}],
|
||||
[['--firefox-media-repo'], {
|
||||
'dest': 'firefox_media_repo',
|
||||
'default': 'https://github.com/mjzffr/firefox-media-tests.git',
|
||||
'help': 'which firefox_media_tests repo to use',
|
||||
}],
|
||||
[['--firefox-media-branch'], {
|
||||
'dest': 'firefox_media_branch',
|
||||
'default': 'master',
|
||||
'help': 'which branch to use for firefox_media_tests',
|
||||
}],
|
||||
[['--firefox-media-rev'], {
|
||||
'dest': 'firefox_media_rev',
|
||||
'help': 'which firefox_media_tests revision to use',
|
||||
}],
|
||||
[["--suite"],
|
||||
{"action": "store",
|
||||
"dest": "test_suite",
|
||||
@ -127,7 +113,6 @@ class FirefoxMediaTestsBase(TestingMixin, VCSToolsScript):
|
||||
self.config_options = media_test_config_options + (config_options or [])
|
||||
actions = [
|
||||
'clobber',
|
||||
'checkout',
|
||||
'download-and-extract',
|
||||
'create-virtualenv',
|
||||
'install',
|
||||
@ -155,72 +140,60 @@ class FirefoxMediaTestsBase(TestingMixin, VCSToolsScript):
|
||||
@PreScriptAction('create-virtualenv')
|
||||
def _pre_create_virtualenv(self, action):
|
||||
dirs = self.query_abs_dirs()
|
||||
marionette_requirements = os.path.join(dirs['abs_test_install_dir'],
|
||||
'config',
|
||||
'marionette_requirements.txt')
|
||||
if os.access(marionette_requirements, os.F_OK):
|
||||
self.register_virtualenv_module(requirements=[marionette_requirements],
|
||||
two_pass=True)
|
||||
|
||||
media_tests_requirements = os.path.join(dirs['firefox_media_dir'],
|
||||
'requirements.txt')
|
||||
media_tests_requirements = os.path.join(dirs['abs_test_install_dir'],
|
||||
'config',
|
||||
'external-media-tests-requirements.txt')
|
||||
|
||||
if os.access(media_tests_requirements, os.F_OK):
|
||||
self.register_virtualenv_module(requirements=[media_tests_requirements],
|
||||
two_pass=True)
|
||||
|
||||
def download_and_extract(self):
|
||||
"""Overriding method from TestingMixin until firefox-media-tests are in tree.
|
||||
"""Overriding method from TestingMixin for more specific behavior.
|
||||
|
||||
Right now we only care about the installer and symbolds.
|
||||
We use the test_packages_url command line argument to check where to get the
|
||||
harness, puppeteer, and tests from and how to set them up.
|
||||
|
||||
"""
|
||||
self._download_installer()
|
||||
|
||||
if self.config.get('download_symbols'):
|
||||
self._download_and_extract_symbols()
|
||||
target_unzip_dirs = ['config/*',
|
||||
'external-media-tests/*',
|
||||
'marionette/*',
|
||||
'mozbase/*',
|
||||
'puppeteer/*',
|
||||
'tools/wptserve/*',
|
||||
]
|
||||
super(FirefoxMediaTestsBase, self).download_and_extract(
|
||||
target_unzip_dirs=target_unzip_dirs)
|
||||
|
||||
def query_abs_dirs(self):
|
||||
if self.abs_dirs:
|
||||
return self.abs_dirs
|
||||
abs_dirs = super(FirefoxMediaTestsBase, self).query_abs_dirs()
|
||||
dirs = {
|
||||
'firefox_media_dir': os.path.join(abs_dirs['abs_work_dir'],
|
||||
'firefox-media-tests')
|
||||
}
|
||||
dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'],
|
||||
'abs_test_install_dir' : os.path.join(abs_dirs['abs_work_dir'],
|
||||
'tests')
|
||||
}
|
||||
dirs['external-media-tests'] = os.path.join(dirs['abs_test_install_dir'],
|
||||
'external-media-tests')
|
||||
abs_dirs.update(dirs)
|
||||
self.abs_dirs = abs_dirs
|
||||
return self.abs_dirs
|
||||
|
||||
@PreScriptAction('checkout')
|
||||
def _pre_checkout(self, action):
|
||||
super(FirefoxMediaTestsBase, self)._pre_checkout(action)
|
||||
c = self.config
|
||||
dirs = self.query_abs_dirs()
|
||||
self.firefox_media_vc = {
|
||||
'branch': c['firefox_media_branch'],
|
||||
'repo': c['firefox_media_repo'],
|
||||
'dest': dirs['firefox_media_dir'],
|
||||
}
|
||||
if 'firefox-media-rev' in c:
|
||||
self.firefox_media_vc['revision'] = c['firefox_media_rev']
|
||||
|
||||
def checkout(self):
|
||||
self.vcs_checkout(vcs='gittool', **self.firefox_media_vc)
|
||||
|
||||
def _query_cmd(self):
|
||||
""" Determine how to call firefox-media-tests """
|
||||
if not self.binary_path:
|
||||
self.fatal("Binary path could not be determined. "
|
||||
"Should be set by default during 'install' action.")
|
||||
dirs = self.query_abs_dirs()
|
||||
venv_python_path = self.query_python_path()
|
||||
runner_script = os.path.join(dirs['firefox_media_dir'],
|
||||
'media_test_harness',
|
||||
'runtests.py')
|
||||
cmd = [venv_python_path, runner_script]
|
||||
|
||||
import external_media_harness.runtests
|
||||
|
||||
cmd = [
|
||||
self.query_python_path(),
|
||||
external_media_harness.runtests.__file__
|
||||
]
|
||||
|
||||
cmd += ['--binary', self.binary_path]
|
||||
if self.symbols_path:
|
||||
cmd += ['--symbols-path', self.symbols_path]
|
||||
@ -238,8 +211,8 @@ class FirefoxMediaTestsBase(TestingMixin, VCSToolsScript):
|
||||
self.fatal("%s is not defined in the config!" % test_suite)
|
||||
|
||||
test_manifest = None if test_suite != 'media-youtube-tests' else \
|
||||
os.path.join(dirs['firefox_media_dir'],
|
||||
'firefox_media_tests',
|
||||
os.path.join(dirs['external-media-tests'],
|
||||
'external_media_tests',
|
||||
'playback', 'youtube', 'manifest.ini')
|
||||
config_fmt_args = {
|
||||
'test_manifest': test_manifest,
|
||||
|
@ -37,7 +37,6 @@ class FirefoxMediaTestsBuildbot(FirefoxMediaTestsBase, BlobUploadMixin):
|
||||
config_options=config_options,
|
||||
all_actions=['clobber',
|
||||
'read-buildbot-config',
|
||||
'checkout',
|
||||
'download-and-extract',
|
||||
'create-virtualenv',
|
||||
'install',
|
||||
|
@ -26,7 +26,6 @@ class FirefoxMediaTestsJenkins(FirefoxMediaTestsBase):
|
||||
def __init__(self):
|
||||
super(FirefoxMediaTestsJenkins, self).__init__(
|
||||
all_actions=['clobber',
|
||||
'checkout',
|
||||
'download-and-extract',
|
||||
'create-virtualenv',
|
||||
'install',
|
||||
|
59
testing/tools/minidumpwriter/minidumpwriter.cpp
Normal file
59
testing/tools/minidumpwriter/minidumpwriter.cpp
Normal file
@ -0,0 +1,59 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
|
||||
/*
|
||||
* Given a PID and a path to a target file, write a minidump of the
|
||||
* corresponding process in that file. This is taken more or less
|
||||
* verbatim from mozcrash and translated to C++ to avoid problems
|
||||
* writing a minidump of 64 bit Firefox from a 32 bit python.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <windows.h>
|
||||
#include <dbghelp.h>
|
||||
|
||||
int wmain(int argc, wchar_t** argv)
|
||||
{
|
||||
if (argc != 3) {
|
||||
fprintf(stderr, "Usage: minidumpwriter <PID> <DUMP_FILE>\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
DWORD pid = (DWORD) _wtoi(argv[1]);
|
||||
|
||||
if (pid <= 0) {
|
||||
fprintf(stderr, "Usage: minidumpwriter <PID> <DUMP_FILE>\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
wchar_t* dumpfile = argv[2];
|
||||
int rv = 1;
|
||||
HANDLE hProcess = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,
|
||||
0, pid);
|
||||
if (!hProcess) {
|
||||
fprintf(stderr, "Couldn't get handle for %d\n", pid);
|
||||
return rv;
|
||||
}
|
||||
|
||||
HANDLE file = CreateFileW(dumpfile, GENERIC_WRITE, 0, nullptr, CREATE_ALWAYS,
|
||||
FILE_ATTRIBUTE_NORMAL, nullptr);
|
||||
if (file == INVALID_HANDLE_VALUE) {
|
||||
fprintf(stderr, "Couldn't open dump file at %S\n", dumpfile);
|
||||
CloseHandle(hProcess);
|
||||
return rv;
|
||||
}
|
||||
|
||||
rv = 0;
|
||||
if (!MiniDumpWriteDump(hProcess, pid, file, MiniDumpNormal,
|
||||
nullptr, nullptr, nullptr)) {
|
||||
fprintf(stderr, "Error 0x%X in MiniDumpWriteDump\n", GetLastError());
|
||||
rv = 1;
|
||||
}
|
||||
|
||||
CloseHandle(file);
|
||||
CloseHandle(hProcess);
|
||||
return rv;
|
||||
}
|
17
testing/tools/minidumpwriter/moz.build
Normal file
17
testing/tools/minidumpwriter/moz.build
Normal file
@ -0,0 +1,17 @@
|
||||
# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*-
|
||||
# vim: set filetype=python:
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
if CONFIG['ENABLE_TESTS'] and CONFIG['CPU_ARCH'] == 'x86_64' and CONFIG['OS_ARCH'] == 'WINNT':
|
||||
Program('minidumpwriter')
|
||||
OS_LIBS += [
|
||||
'dbghelp',
|
||||
]
|
||||
SOURCES += [
|
||||
'minidumpwriter.cpp',
|
||||
]
|
||||
USE_STATIC_LIBS = True
|
||||
|
||||
NO_PGO = True
|
@ -2,18 +2,16 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>HTML5 video with autoplay attribute.</title>
|
||||
<script type="text/javascript" src="/common/media.js"></script>
|
||||
<script src="/common/media.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script type="text/javascript">
|
||||
|
||||
function do_play(event)
|
||||
{
|
||||
<script>
|
||||
function do_play(event) {
|
||||
parent.window.postMessage("play event fired", "*");
|
||||
}
|
||||
|
||||
|
||||
document.write(
|
||||
"<video id='video0' src='" + getVideoURI("/media/green-at-15") + "'" +
|
||||
"<video id='video0' src='" + getVideoURI("/media/green-at-15") + "'" +
|
||||
" autoplay onplay='do_play(event);'>"
|
||||
);
|
||||
</script>
|
||||
|
@ -2,33 +2,24 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>HTML5 Sandbox: Allow autoplay for HTML5 Video inside iframe with sandbox attribute if sandbox='allow-scripts'.</title>
|
||||
<meta name=timeout content=long>
|
||||
<meta content="text/html; charset=UTF-8" http-equiv="Content-Type" />
|
||||
<link rel="author" title="Microsoft" href="http://www.microsoft.com/" />
|
||||
<link rel="help" href="http://dev.w3.org/html5/spec/Overview.html#sandboxed-automatic-features-browsing-context-flag" />
|
||||
<meta name="assert" content="Allow autoplay for HTML5 Video inside iframe with sandbox attribute if sandbox='allow-scripts'." />
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<script type="text/javascript">
|
||||
|
||||
|
||||
var t = async_test("Allow autoplay for HTML5 Video inside iframe with sandbox attribute if sandbox='allow-scripts'.");
|
||||
|
||||
function callback(event)
|
||||
{
|
||||
t.step(function(){
|
||||
assert_true('sandbox' in document.createElement('iframe'));
|
||||
assert_equals(event.data, "play event fired");
|
||||
});
|
||||
t.done();
|
||||
}
|
||||
<script>
|
||||
async_test(function (t) {
|
||||
var callback = t.step_func_done(function(event) {
|
||||
assert_true('sandbox' in document.createElement('iframe'));
|
||||
assert_equals(event.data, "play event fired");
|
||||
});
|
||||
|
||||
var timer = setTimeout(callback, 8000);
|
||||
window.addEventListener("message", callback, false);
|
||||
}, "Allow autoplay for HTML5 Video inside iframe with sandbox attribute if sandbox='allow-scripts'.");
|
||||
</script>
|
||||
<iframe src="iframe_sandbox_002.htm" sandbox="allow-scripts" style="display: none"></iframe>
|
||||
<div id=log></div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
@ -273,7 +273,7 @@ var AboutProtocolParent = {
|
||||
} else {
|
||||
channel.loadGroup = null;
|
||||
}
|
||||
let stream = channel.open();
|
||||
let stream = channel.open2();
|
||||
let data = NetUtil.readInputStreamToString(stream, stream.available(), {});
|
||||
return {
|
||||
data: data,
|
||||
|
@ -53,18 +53,14 @@ function run_test() {
|
||||
engineTemplateFile.copyTo(engineFile.parent, "test-search-engine.xml");
|
||||
|
||||
// The list of visibleDefaultEngines needs to match or the cache will be ignored.
|
||||
let chan = NetUtil.ioService.newChannel2("resource://search-plugins/list.txt",
|
||||
null, // aOriginCharset
|
||||
null, // aBaseURI
|
||||
null, // aLoadingNode
|
||||
Services.scriptSecurityManager.getSystemPrincipal(),
|
||||
null, // aTriggeringPrincipal
|
||||
Ci.nsILoadInfo.SEC_NORMAL,
|
||||
Ci.nsIContentPolicy.TYPE_OTHER);
|
||||
let chan = NetUtil.newChannel({
|
||||
uri: "resource://search-plugins/list.txt",
|
||||
loadUsingSystemPrincipal: true
|
||||
});
|
||||
let visibleDefaultEngines = [];
|
||||
let sis = Cc["@mozilla.org/scriptableinputstream;1"].
|
||||
createInstance(Ci.nsIScriptableInputStream);
|
||||
sis.init(chan.open());
|
||||
sis.init(chan.open2());
|
||||
let list = sis.read(sis.available());
|
||||
let names = list.split("\n").filter(n => !!n);
|
||||
for (let name of names) {
|
||||
|
@ -10081,5 +10081,12 @@
|
||||
"bug_numbers": [1229961],
|
||||
"n_values": 12,
|
||||
"description": "Plugin drawing model. 0 when windowed, otherwise NPDrawingModel + 1."
|
||||
},
|
||||
"WEB_NOTIFICATION_REQUEST_PERMISSION_CALLBACK": {
|
||||
"alert_emails": ["push@mozilla.com"],
|
||||
"expires_in_version": "55",
|
||||
"bug_numbers": [1241278],
|
||||
"kind": "boolean",
|
||||
"description": "Usage of the deprecated Notification.requestPermission() callback argument"
|
||||
}
|
||||
}
|
||||
|
@ -318,6 +318,13 @@ var dataProviders = {
|
||||
}
|
||||
catch (e) {}
|
||||
|
||||
let promises = [];
|
||||
// done will be called upon all pending promises being resolved.
|
||||
// add your pending promise to promises when adding new ones.
|
||||
function completed() {
|
||||
Promise.all(promises).then(() => done(data));
|
||||
}
|
||||
|
||||
data.numTotalWindows = 0;
|
||||
data.numAcceleratedWindows = 0;
|
||||
let winEnumer = Services.ww.getWindowEnumerator();
|
||||
@ -333,7 +340,6 @@ var dataProviders = {
|
||||
data.numTotalWindows++;
|
||||
data.windowLayerManagerType = winUtils.layerManagerType;
|
||||
data.windowLayerManagerRemote = winUtils.layerManagerRemote;
|
||||
data.supportsHardwareH264 = winUtils.supportsHardwareH264Decoding;
|
||||
}
|
||||
catch (e) {
|
||||
continue;
|
||||
@ -342,6 +348,16 @@ var dataProviders = {
|
||||
data.numAcceleratedWindows++;
|
||||
}
|
||||
|
||||
let winUtils = Services.wm.getMostRecentWindow("").
|
||||
QueryInterface(Ci.nsIInterfaceRequestor).
|
||||
getInterface(Ci.nsIDOMWindowUtils)
|
||||
data.supportsHardwareH264 = "Unknown";
|
||||
let promise = winUtils.supportsHardwareH264Decoding;
|
||||
promise.then(function(v) {
|
||||
data.supportsHardwareH264 = v;
|
||||
});
|
||||
promises.push(promise);
|
||||
|
||||
if (!data.numAcceleratedWindows && gfxInfo) {
|
||||
let win = AppConstants.platform == "win";
|
||||
let feature = win ? gfxInfo.FEATURE_DIRECT3D_9_LAYERS :
|
||||
@ -350,7 +366,7 @@ var dataProviders = {
|
||||
}
|
||||
|
||||
if (!gfxInfo) {
|
||||
done(data);
|
||||
completed();
|
||||
return;
|
||||
}
|
||||
|
||||
@ -448,7 +464,7 @@ var dataProviders = {
|
||||
}
|
||||
}
|
||||
|
||||
done(data);
|
||||
completed();
|
||||
},
|
||||
|
||||
javaScript: function javaScript(done) {
|
||||
|
@ -131,16 +131,13 @@ XPCOMUtils.defineLazyGetter(UpdateUtils, "Locale", function() {
|
||||
let channel;
|
||||
let locale;
|
||||
for (let res of ['app', 'gre']) {
|
||||
channel = Services.io.newChannel2("resource://" + res + "/" + FILE_UPDATE_LOCALE,
|
||||
null,
|
||||
null,
|
||||
null, // aLoadingNode
|
||||
Services.scriptSecurityManager.getSystemPrincipal(),
|
||||
null, // aTriggeringPrincipal
|
||||
Ci.nsILoadInfo.SEC_NORMAL,
|
||||
Ci.nsIContentPolicy.TYPE_INTERNAL_XMLHTTPREQUEST);
|
||||
channel = NetUtil.newChannel({
|
||||
uri: "resource://" + res + "/" + FILE_UPDATE_LOCALE,
|
||||
contentPolicyType: Ci.nsIContentPolicy.TYPE_INTERNAL_XMLHTTPREQUEST,
|
||||
loadUsingSystemPrincipal: true
|
||||
});
|
||||
try {
|
||||
let inputStream = channel.open();
|
||||
let inputStream = channel.open2();
|
||||
locale = NetUtil.readInputStreamToString(inputStream, inputStream.available());
|
||||
} catch(e) {}
|
||||
if (locale)
|
||||
|
@ -172,6 +172,7 @@ if CONFIG['ENABLE_TESTS']:
|
||||
DIRS += [
|
||||
'/testing/mochitest',
|
||||
'/testing/xpcshell',
|
||||
'/testing/tools/minidumpwriter',
|
||||
'/testing/tools/screenshot',
|
||||
'/testing/profiles',
|
||||
'/testing/mozbase',
|
||||
|
@ -353,29 +353,6 @@ PuppetWidget::DispatchEvent(WidgetGUIEvent* event, nsEventStatus& aStatus)
|
||||
|
||||
nsEventStatus
|
||||
PuppetWidget::DispatchInputEvent(WidgetInputEvent* aEvent)
|
||||
{
|
||||
if (!mTabChild) {
|
||||
return nsEventStatus_eIgnore;
|
||||
}
|
||||
|
||||
switch (aEvent->mClass) {
|
||||
case eMouseEventClass:
|
||||
Unused <<
|
||||
mTabChild->SendDispatchMouseEvent(*aEvent->AsMouseEvent());
|
||||
break;
|
||||
case eKeyboardEventClass:
|
||||
Unused <<
|
||||
mTabChild->SendDispatchKeyboardEvent(*aEvent->AsKeyboardEvent());
|
||||
break;
|
||||
default:
|
||||
MOZ_ASSERT_UNREACHABLE("unsupported event type");
|
||||
}
|
||||
|
||||
return nsEventStatus_eIgnore;
|
||||
}
|
||||
|
||||
nsEventStatus
|
||||
PuppetWidget::DispatchAPZAwareEvent(WidgetInputEvent* aEvent)
|
||||
{
|
||||
if (!AsyncPanZoomEnabled()) {
|
||||
nsEventStatus status = nsEventStatus_eIgnore;
|
||||
@ -392,6 +369,14 @@ PuppetWidget::DispatchAPZAwareEvent(WidgetInputEvent* aEvent)
|
||||
Unused <<
|
||||
mTabChild->SendDispatchWheelEvent(*aEvent->AsWheelEvent());
|
||||
break;
|
||||
case eMouseEventClass:
|
||||
Unused <<
|
||||
mTabChild->SendDispatchMouseEvent(*aEvent->AsMouseEvent());
|
||||
break;
|
||||
case eKeyboardEventClass:
|
||||
Unused <<
|
||||
mTabChild->SendDispatchKeyboardEvent(*aEvent->AsKeyboardEvent());
|
||||
break;
|
||||
default:
|
||||
MOZ_ASSERT_UNREACHABLE("unsupported event type");
|
||||
}
|
||||
|
@ -132,7 +132,6 @@ public:
|
||||
LayoutDeviceIntPoint* aPoint = nullptr);
|
||||
|
||||
NS_IMETHOD DispatchEvent(WidgetGUIEvent* aEvent, nsEventStatus& aStatus) override;
|
||||
nsEventStatus DispatchAPZAwareEvent(WidgetInputEvent* aEvent) override;
|
||||
nsEventStatus DispatchInputEvent(WidgetInputEvent* aEvent) override;
|
||||
void SetConfirmedTargetAPZC(uint64_t aInputBlockId,
|
||||
const nsTArray<ScrollableLayerGuid>& aTargets) const override;
|
||||
|
@ -4538,7 +4538,7 @@ NSEvent* gLastDragMouseDownEvent = nil;
|
||||
else
|
||||
geckoEvent.button = WidgetMouseEvent::eLeftButton;
|
||||
|
||||
mGeckoChild->DispatchAPZAwareEvent(&geckoEvent);
|
||||
mGeckoChild->DispatchInputEvent(&geckoEvent);
|
||||
mBlockedLastMouseDown = NO;
|
||||
|
||||
// XXX maybe call markedTextSelectionChanged:client: here?
|
||||
@ -4565,7 +4565,7 @@ NSEvent* gLastDragMouseDownEvent = nil;
|
||||
|
||||
// This might destroy our widget (and null out mGeckoChild).
|
||||
bool defaultPrevented =
|
||||
(mGeckoChild->DispatchAPZAwareEvent(&geckoEvent) == nsEventStatus_eConsumeNoDefault);
|
||||
(mGeckoChild->DispatchInputEvent(&geckoEvent) == nsEventStatus_eConsumeNoDefault);
|
||||
|
||||
// Check to see if we are double-clicking in the titlebar.
|
||||
CGFloat locationInTitlebar = [[self window] frame].size.height - [theEvent locationInWindow].y;
|
||||
@ -4686,7 +4686,7 @@ NewCGSRegionFromRegion(const LayoutDeviceIntRegion& aRegion,
|
||||
WidgetMouseEvent::eReal);
|
||||
[self convertCocoaMouseEvent:theEvent toGeckoEvent:&geckoEvent];
|
||||
|
||||
mGeckoChild->DispatchAPZAwareEvent(&geckoEvent);
|
||||
mGeckoChild->DispatchInputEvent(&geckoEvent);
|
||||
|
||||
NS_OBJC_END_TRY_ABORT_BLOCK;
|
||||
}
|
||||
@ -4732,7 +4732,7 @@ NewCGSRegionFromRegion(const LayoutDeviceIntRegion& aRegion,
|
||||
geckoEvent.button = WidgetMouseEvent::eRightButton;
|
||||
geckoEvent.clickCount = [theEvent clickCount];
|
||||
|
||||
mGeckoChild->DispatchAPZAwareEvent(&geckoEvent);
|
||||
mGeckoChild->DispatchInputEvent(&geckoEvent);
|
||||
if (!mGeckoChild)
|
||||
return;
|
||||
|
||||
@ -4756,7 +4756,7 @@ NewCGSRegionFromRegion(const LayoutDeviceIntRegion& aRegion,
|
||||
geckoEvent.clickCount = [theEvent clickCount];
|
||||
|
||||
nsAutoRetainCocoaObject kungFuDeathGrip(self);
|
||||
mGeckoChild->DispatchAPZAwareEvent(&geckoEvent);
|
||||
mGeckoChild->DispatchInputEvent(&geckoEvent);
|
||||
|
||||
NS_OBJC_END_TRY_ABORT_BLOCK;
|
||||
}
|
||||
@ -4840,7 +4840,7 @@ static int32_t RoundUp(double aDouble)
|
||||
WidgetWheelEvent wheelEvent(true, msg, mGeckoChild);
|
||||
[self convertCocoaMouseWheelEvent:theEvent toGeckoEvent:&wheelEvent];
|
||||
mExpectingWheelStop = (msg == eWheelOperationStart);
|
||||
mGeckoChild->DispatchAPZAwareEvent(wheelEvent.AsInputEvent());
|
||||
mGeckoChild->DispatchInputEvent(wheelEvent.AsInputEvent());
|
||||
}
|
||||
|
||||
- (void)sendWheelCondition:(BOOL)condition
|
||||
|
@ -2695,7 +2695,7 @@ nsWindow::DispatchMissedButtonReleases(GdkEventCrossing *aGdkEvent)
|
||||
WidgetMouseEvent synthEvent(true, eMouseUp, this,
|
||||
WidgetMouseEvent::eSynthesized);
|
||||
synthEvent.button = buttonType;
|
||||
DispatchAPZAwareEvent(&synthEvent);
|
||||
DispatchInputEvent(&synthEvent);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2817,7 +2817,7 @@ nsWindow::OnButtonPressEvent(GdkEventButton *aEvent)
|
||||
InitButtonEvent(event, aEvent);
|
||||
event.pressure = mLastMotionPressure;
|
||||
|
||||
DispatchAPZAwareEvent(&event);
|
||||
DispatchInputEvent(&event);
|
||||
|
||||
// right menu click on linux should also pop up a context menu
|
||||
if (domButton == WidgetMouseEvent::eRightButton &&
|
||||
@ -2860,7 +2860,7 @@ nsWindow::OnButtonReleaseEvent(GdkEventButton *aEvent)
|
||||
gdk_event_get_axis ((GdkEvent*)aEvent, GDK_AXIS_PRESSURE, &pressure);
|
||||
event.pressure = pressure ? pressure : mLastMotionPressure;
|
||||
|
||||
DispatchAPZAwareEvent(&event);
|
||||
DispatchInputEvent(&event);
|
||||
mLastMotionPressure = pressure;
|
||||
}
|
||||
|
||||
@ -3228,7 +3228,7 @@ nsWindow::OnScrollEvent(GdkEventScroll *aEvent)
|
||||
wheelEvent.time = aEvent->time;
|
||||
wheelEvent.timeStamp = GetEventTimeStamp(aEvent->time);
|
||||
|
||||
DispatchAPZAwareEvent(&wheelEvent);
|
||||
DispatchInputEvent(&wheelEvent);
|
||||
}
|
||||
|
||||
void
|
||||
@ -3445,7 +3445,7 @@ nsWindow::OnTouchEvent(GdkEventTouch* aEvent)
|
||||
*event.touches.AppendElement() = touch.forget();
|
||||
}
|
||||
|
||||
DispatchAPZAwareEvent(&event);
|
||||
DispatchInputEvent(&event);
|
||||
return TRUE;
|
||||
}
|
||||
#endif
|
||||
|
@ -1012,14 +1012,13 @@ nsBaseWidget::ProcessUntransformedAPZEvent(WidgetInputEvent* aEvent,
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
InputAPZContext context(aGuid, aInputBlockId, aApzResponse);
|
||||
|
||||
// If this is a touch event and APZ has targeted it to an APZC in the root
|
||||
// If this is an event that the APZ has targeted to an APZC in the root
|
||||
// process, apply that APZC's callback-transform before dispatching the
|
||||
// event. If the event is instead targeted to an APZC in the child process,
|
||||
// the transform will be applied in the child process before dispatching
|
||||
// the event there (see e.g. TabChild::RecvRealTouchEvent()).
|
||||
// TODO: Do other types of events (than touch) need this?
|
||||
if (aEvent->AsTouchEvent() && aGuid.mLayersId == mCompositorParent->RootLayerTreeId()) {
|
||||
APZCCallbackHelper::ApplyCallbackTransform(*aEvent->AsTouchEvent(), aGuid,
|
||||
if (aGuid.mLayersId == mCompositorParent->RootLayerTreeId()) {
|
||||
APZCCallbackHelper::ApplyCallbackTransform(*aEvent, aGuid,
|
||||
GetDefaultScale());
|
||||
}
|
||||
|
||||
@ -1030,7 +1029,7 @@ nsBaseWidget::ProcessUntransformedAPZEvent(WidgetInputEvent* aEvent,
|
||||
UniquePtr<WidgetEvent> original(aEvent->Duplicate());
|
||||
DispatchEvent(aEvent, status);
|
||||
|
||||
if (mAPZC && !context.WasRoutedToChildProcess()) {
|
||||
if (mAPZC && !context.WasRoutedToChildProcess() && aInputBlockId) {
|
||||
// EventStateManager did not route the event into the child process.
|
||||
// It's safe to communicate to APZ that the event has been processed.
|
||||
// TODO: Eventually we'll be able to move the SendSetTargetAPZCNotification
|
||||
@ -1064,21 +1063,6 @@ nsBaseWidget::ProcessUntransformedAPZEvent(WidgetInputEvent* aEvent,
|
||||
return status;
|
||||
}
|
||||
|
||||
nsEventStatus
|
||||
nsBaseWidget::DispatchInputEvent(WidgetInputEvent* aEvent)
|
||||
{
|
||||
if (mAPZC) {
|
||||
nsEventStatus result = mAPZC->ReceiveInputEvent(*aEvent, nullptr, nullptr);
|
||||
if (result == nsEventStatus_eConsumeNoDefault) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
nsEventStatus status;
|
||||
DispatchEvent(aEvent, status);
|
||||
return status;
|
||||
}
|
||||
|
||||
class DispatchWheelEventOnMainThread : public Task
|
||||
{
|
||||
public:
|
||||
@ -1146,7 +1130,7 @@ private:
|
||||
};
|
||||
|
||||
nsEventStatus
|
||||
nsBaseWidget::DispatchAPZAwareEvent(WidgetInputEvent* aEvent)
|
||||
nsBaseWidget::DispatchInputEvent(WidgetInputEvent* aEvent)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
if (mAPZC) {
|
||||
|
@ -252,12 +252,8 @@ public:
|
||||
const FrameMetrics::ViewID& aViewId,
|
||||
const CSSRect& aRect,
|
||||
const uint32_t& aFlags) override;
|
||||
// Helper function for dispatching events which are not processed by APZ,
|
||||
// but need to be transformed by APZ.
|
||||
nsEventStatus DispatchInputEvent(mozilla::WidgetInputEvent* aEvent) override;
|
||||
|
||||
// Dispatch an event that must be first be routed through APZ.
|
||||
nsEventStatus DispatchAPZAwareEvent(mozilla::WidgetInputEvent* aEvent) override;
|
||||
nsEventStatus DispatchInputEvent(mozilla::WidgetInputEvent* aEvent) override;
|
||||
|
||||
void SetConfirmedTargetAPZC(uint64_t aInputBlockId,
|
||||
const nsTArray<ScrollableLayerGuid>& aTargets) const override;
|
||||
|
@ -1415,13 +1415,6 @@ class nsIWidget : public nsISupports {
|
||||
* enabled. If invoked in the child process, it is forwarded to the
|
||||
* parent process synchronously.
|
||||
*/
|
||||
virtual nsEventStatus DispatchAPZAwareEvent(mozilla::WidgetInputEvent* aEvent) = 0;
|
||||
|
||||
/**
|
||||
* Dispatches an event that must be transformed by APZ first, but is not
|
||||
* actually handled by APZ. If invoked in the child process, it is
|
||||
* forwarded to the parent process synchronously.
|
||||
*/
|
||||
virtual nsEventStatus DispatchInputEvent(mozilla::WidgetInputEvent* aEvent) = 0;
|
||||
|
||||
/**
|
||||
|
@ -185,7 +185,7 @@ private:
|
||||
event.refPoint = loc;
|
||||
event.touches.AppendElement(t);
|
||||
}
|
||||
aWindow->DispatchAPZAwareEvent(&event);
|
||||
aWindow->DispatchInputEvent(&event);
|
||||
}
|
||||
|
||||
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
|
||||
|
@ -531,13 +531,21 @@ WinUtils::SystemScaleFactor()
|
||||
// The result of GetDeviceCaps won't change dynamically, as it predates
|
||||
// per-monitor DPI and support for on-the-fly resolution changes.
|
||||
// Therefore, we only need to look it up once.
|
||||
static int logPixelsY = 0;
|
||||
if (!logPixelsY) {
|
||||
static double systemScale = 0;
|
||||
if (systemScale == 0) {
|
||||
HDC screenDC = GetDC(nullptr);
|
||||
logPixelsY = GetDeviceCaps(screenDC, LOGPIXELSY);
|
||||
systemScale = GetDeviceCaps(screenDC, LOGPIXELSY) / 96.0;
|
||||
ReleaseDC(nullptr, screenDC);
|
||||
|
||||
if (systemScale == 0) {
|
||||
// Bug 1012487 - This can occur when the Screen DC is used off the
|
||||
// main thread on windows. For now just assume a 100% DPI for this
|
||||
// drawing call.
|
||||
// XXX - fixme!
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
return logPixelsY / 96.0;
|
||||
return systemScale;
|
||||
}
|
||||
|
||||
#ifndef WM_DPICHANGED
|
||||
@ -601,19 +609,7 @@ WinUtils::LogToPhysFactor(HMONITOR aMonitor)
|
||||
return dpiY / 96.0;
|
||||
}
|
||||
|
||||
// The system DPI will never change during the session.
|
||||
HDC hdc = ::GetDC(nullptr);
|
||||
double result = ::GetDeviceCaps(hdc, LOGPIXELSY) / 96.0;
|
||||
::ReleaseDC(nullptr, hdc);
|
||||
|
||||
if (result == 0) {
|
||||
// Bug 1012487 - This can occur when the Screen DC is used off the
|
||||
// main thread on windows. For now just assume a 100% DPI for this
|
||||
// drawing call.
|
||||
// XXX - fixme!
|
||||
result = 1.0;
|
||||
}
|
||||
return result;
|
||||
return SystemScaleFactor();
|
||||
}
|
||||
|
||||
/* static */
|
||||
|
@ -3928,7 +3928,7 @@ bool nsWindow::DispatchContentCommandEvent(WidgetContentCommandEvent* aEvent)
|
||||
|
||||
bool nsWindow::DispatchWheelEvent(WidgetWheelEvent* aEvent)
|
||||
{
|
||||
nsEventStatus status = DispatchAPZAwareEvent(aEvent->AsInputEvent());
|
||||
nsEventStatus status = DispatchInputEvent(aEvent->AsInputEvent());
|
||||
return ConvertStatus(status);
|
||||
}
|
||||
|
||||
@ -4266,7 +4266,7 @@ nsWindow::DispatchMouseEvent(EventMessage aEventMessage, WPARAM wParam,
|
||||
}
|
||||
}
|
||||
|
||||
result = ConvertStatus(DispatchAPZAwareEvent(&event));
|
||||
result = ConvertStatus(DispatchInputEvent(&event));
|
||||
|
||||
// Release the widget with NS_IF_RELEASE() just in case
|
||||
// the context menu key code in EventListenerManager::HandleEvent()
|
||||
@ -6507,13 +6507,13 @@ bool nsWindow::OnTouch(WPARAM wParam, LPARAM lParam)
|
||||
if (!touchInput.mTimeStamp.IsNull()) {
|
||||
// Convert MultiTouchInput to WidgetTouchEvent interface.
|
||||
WidgetTouchEvent widgetTouchEvent = touchInput.ToWidgetTouchEvent(this);
|
||||
DispatchAPZAwareEvent(&widgetTouchEvent);
|
||||
DispatchInputEvent(&widgetTouchEvent);
|
||||
}
|
||||
// Dispatch touch end event if we have one.
|
||||
if (!touchEndInput.mTimeStamp.IsNull()) {
|
||||
// Convert MultiTouchInput to WidgetTouchEvent interface.
|
||||
WidgetTouchEvent widgetTouchEvent = touchEndInput.ToWidgetTouchEvent(this);
|
||||
DispatchAPZAwareEvent(&widgetTouchEvent);
|
||||
DispatchInputEvent(&widgetTouchEvent);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,7 @@ using mozilla::plugins::PluginInstanceParent;
|
||||
#include "mozilla/gfx/DataSurfaceHelpers.h"
|
||||
#include "mozilla/gfx/Tools.h"
|
||||
#include "mozilla/RefPtr.h"
|
||||
#include "mozilla/UniquePtrExtensions.h"
|
||||
#include "nsGfxCIID.h"
|
||||
#include "gfxContext.h"
|
||||
#include "prmem.h"
|
||||
@ -69,7 +70,7 @@ using namespace mozilla::plugins;
|
||||
*
|
||||
**************************************************************/
|
||||
|
||||
static nsAutoPtr<uint8_t> sSharedSurfaceData;
|
||||
static UniquePtr<uint8_t[]> sSharedSurfaceData;
|
||||
static IntSize sSharedSurfaceSize;
|
||||
|
||||
struct IconMetrics {
|
||||
@ -151,11 +152,11 @@ EnsureSharedSurfaceSize(IntSize size)
|
||||
|
||||
if (!sSharedSurfaceData || (WORDSSIZE(size) > WORDSSIZE(sSharedSurfaceSize))) {
|
||||
sSharedSurfaceSize = size;
|
||||
sSharedSurfaceData = nullptr;
|
||||
sSharedSurfaceData = (uint8_t *)malloc(WORDSSIZE(sSharedSurfaceSize) * 4);
|
||||
sSharedSurfaceData =
|
||||
MakeUniqueFallible<uint8_t[]>(WORDSSIZE(sSharedSurfaceSize) * 4);
|
||||
}
|
||||
|
||||
return (sSharedSurfaceData != nullptr);
|
||||
return !sSharedSurfaceData;
|
||||
}
|
||||
|
||||
nsIWidgetListener* nsWindow::GetPaintListener()
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
#include "nsCOMPtr.h"
|
||||
#include "mozilla/RefPtr.h"
|
||||
#include "mozilla/TypeTraits.h"
|
||||
|
||||
#include "nsCycleCollectionNoteChild.h"
|
||||
#include "mozilla/MemoryReporting.h"
|
||||
@ -21,6 +22,9 @@ template <class T>
|
||||
class nsAutoPtr
|
||||
{
|
||||
private:
|
||||
static_assert(!mozilla::IsScalar<T>::value, "If you are using "
|
||||
"nsAutoPtr to hold an array, use UniquePtr<T[]> instead");
|
||||
|
||||
void**
|
||||
begin_assignment()
|
||||
{
|
||||
|
@ -13,7 +13,6 @@
|
||||
#include "nsStreamUtils.h"
|
||||
#include "nsStringStream.h"
|
||||
#include "nsComponentManagerUtils.h"
|
||||
#include "nsAutoPtr.h"
|
||||
|
||||
TEST(CloneInputStream, InvalidInput)
|
||||
{
|
||||
@ -145,7 +144,7 @@ TEST(CloneInputStream, CloneMultiplexStream)
|
||||
testing::ConsumeAndValidateStream(clone, doubled);
|
||||
|
||||
// Stream that has been read should fail.
|
||||
nsAutoPtr<char> buffer(new char[512]);
|
||||
char buffer[512];
|
||||
uint32_t read;
|
||||
rv = stream->Read(buffer, 512, &read);
|
||||
ASSERT_TRUE(NS_SUCCEEDED(rv));
|
||||
@ -175,7 +174,7 @@ TEST(CloneInputStream, CloneMultiplexStreamPartial)
|
||||
}
|
||||
|
||||
// Fail when first stream read, but second hasn't been started.
|
||||
nsAutoPtr<char> buffer(new char[1024]);
|
||||
char buffer[1024];
|
||||
uint32_t read;
|
||||
nsresult rv = stream->Read(buffer, 1024, &read);
|
||||
ASSERT_TRUE(NS_SUCCEEDED(rv));
|
||||
|
Loading…
Reference in New Issue
Block a user