mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Merge mozilla-central to mozilla-inbound
This commit is contained in:
commit
3bd1c074b2
@ -136,33 +136,43 @@ Sanitizer.prototype = {
|
||||
range = this.range || Sanitizer.getClearRange();
|
||||
}
|
||||
|
||||
// For performance reasons we start all the clear tasks at once, then wait
|
||||
// for their promises later.
|
||||
// Some of the clear() calls may raise exceptions (for example bug 265028),
|
||||
// we catch and store them, but continue to sanitize as much as possible.
|
||||
// Callers should check returned errors and give user feedback
|
||||
// about items that could not be sanitized
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_TOTAL", refObj);
|
||||
|
||||
let annotateError = (name, ex) => {
|
||||
progress[name] = "failed";
|
||||
seenError = true;
|
||||
console.error("Error sanitizing " + name, ex);
|
||||
};
|
||||
|
||||
// Array of objects in form { name, promise }.
|
||||
// Name is the itemName and promise may be a promise, if the sanitization
|
||||
// is asynchronous, or the function return value, if synchronous.
|
||||
let promises = [];
|
||||
for (let itemName of itemsToClear) {
|
||||
let item = this.items[itemName];
|
||||
if (!("clear" in item)) {
|
||||
progress[itemName] = "`clear` not in item";
|
||||
continue;
|
||||
}
|
||||
item.range = range;
|
||||
// Some of these clear() may raise exceptions (see bug #265028)
|
||||
// to sanitize as much as possible, we catch and store them,
|
||||
// rather than fail fast.
|
||||
// Callers should check returned errors and give user feedback
|
||||
// about items that could not be sanitized
|
||||
let refObj = {};
|
||||
try {
|
||||
TelemetryStopwatch.start("FX_SANITIZE_TOTAL", refObj);
|
||||
yield item.clear();
|
||||
progress[itemName] = "cleared";
|
||||
} catch(er) {
|
||||
progress[itemName] = "failed";
|
||||
seenError = true;
|
||||
console.error("Error sanitizing " + itemName, er);
|
||||
} finally {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_TOTAL", refObj);
|
||||
// Note we need to catch errors here, otherwise Promise.all would stop
|
||||
// at the first rejection.
|
||||
promises.push(item.clear(range)
|
||||
.then(() => progress[itemName] = "cleared",
|
||||
ex => annotateError(itemName, ex)));
|
||||
} catch (ex) {
|
||||
annotateError(itemName, ex);
|
||||
}
|
||||
}
|
||||
yield Promise.all(promises);
|
||||
|
||||
// Sanitization is complete.
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_TOTAL", refObj);
|
||||
// Reset the inProgress preference since we were not killed during
|
||||
// sanitization.
|
||||
Preferences.reset(Sanitizer.PREF_SANITIZE_IN_PROGRESS);
|
||||
progress = {};
|
||||
if (seenError) {
|
||||
@ -180,85 +190,111 @@ Sanitizer.prototype = {
|
||||
|
||||
items: {
|
||||
cache: {
|
||||
clear: function ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let seenException;
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_CACHE", refObj);
|
||||
|
||||
var cache = Cc["@mozilla.org/netwerk/cache-storage-service;1"].
|
||||
getService(Ci.nsICacheStorageService);
|
||||
try {
|
||||
// Cache doesn't consult timespan, nor does it have the
|
||||
// facility for timespan-based eviction. Wipe it.
|
||||
let cache = Cc["@mozilla.org/netwerk/cache-storage-service;1"]
|
||||
.getService(Ci.nsICacheStorageService);
|
||||
cache.clear();
|
||||
} catch(er) {}
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
var imageCache = Cc["@mozilla.org/image/tools;1"].
|
||||
getService(Ci.imgITools).getImgCacheForDocument(null);
|
||||
try {
|
||||
let imageCache = Cc["@mozilla.org/image/tools;1"]
|
||||
.getService(Ci.imgITools)
|
||||
.getImgCacheForDocument(null);
|
||||
imageCache.clearCache(false); // true=chrome, false=content
|
||||
} catch(er) {}
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_CACHE", refObj);
|
||||
}
|
||||
if (seenException) {
|
||||
throw seenException;
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
cookies: {
|
||||
clear: Task.async(function* ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let seenException;
|
||||
let yieldCounter = 0;
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_COOKIES", refObj);
|
||||
|
||||
// Clear cookies.
|
||||
TelemetryStopwatch.start("FX_SANITIZE_COOKIES_2", refObj);
|
||||
try {
|
||||
let cookieMgr = Components.classes["@mozilla.org/cookiemanager;1"]
|
||||
.getService(Ci.nsICookieManager);
|
||||
if (range) {
|
||||
// Iterate through the cookies and delete any created after our cutoff.
|
||||
let cookiesEnum = cookieMgr.enumerator;
|
||||
while (cookiesEnum.hasMoreElements()) {
|
||||
let cookie = cookiesEnum.getNext().QueryInterface(Ci.nsICookie2);
|
||||
|
||||
var cookieMgr = Components.classes["@mozilla.org/cookiemanager;1"]
|
||||
.getService(Ci.nsICookieManager);
|
||||
if (this.range) {
|
||||
// Iterate through the cookies and delete any created after our cutoff.
|
||||
var cookiesEnum = cookieMgr.enumerator;
|
||||
while (cookiesEnum.hasMoreElements()) {
|
||||
var cookie = cookiesEnum.getNext().QueryInterface(Ci.nsICookie2);
|
||||
if (cookie.creationTime > range[0]) {
|
||||
// This cookie was created after our cutoff, clear it
|
||||
cookieMgr.remove(cookie.host, cookie.name, cookie.path, false);
|
||||
|
||||
if (cookie.creationTime > this.range[0]) {
|
||||
// This cookie was created after our cutoff, clear it
|
||||
cookieMgr.remove(cookie.host, cookie.name, cookie.path, false);
|
||||
|
||||
if (++yieldCounter % YIELD_PERIOD == 0) {
|
||||
yield new Promise(resolve => setTimeout(resolve, 0)); // Don't block the main thread too long
|
||||
if (++yieldCounter % YIELD_PERIOD == 0) {
|
||||
yield new Promise(resolve => setTimeout(resolve, 0)); // Don't block the main thread too long
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Remove everything
|
||||
cookieMgr.removeAll();
|
||||
yield new Promise(resolve => setTimeout(resolve, 0)); // Don't block the main thread too long
|
||||
}
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
} finally {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_COOKIES_2", refObj);
|
||||
}
|
||||
else {
|
||||
// Remove everything
|
||||
cookieMgr.removeAll();
|
||||
yield new Promise(resolve => setTimeout(resolve, 0)); // Don't block the main thread too long
|
||||
}
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_COOKIES_2", refObj);
|
||||
|
||||
// Clear deviceIds. Done asynchronously (returns before complete).
|
||||
let mediaMgr = Components.classes["@mozilla.org/mediaManagerService;1"]
|
||||
.getService(Ci.nsIMediaManagerService);
|
||||
mediaMgr.sanitizeDeviceIds(this.range && this.range[0]);
|
||||
try {
|
||||
let mediaMgr = Components.classes["@mozilla.org/mediaManagerService;1"]
|
||||
.getService(Ci.nsIMediaManagerService);
|
||||
mediaMgr.sanitizeDeviceIds(range && range[0]);
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
// Clear plugin data.
|
||||
TelemetryStopwatch.start("FX_SANITIZE_PLUGINS", refObj);
|
||||
yield this.promiseClearPluginCookies();
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_PLUGINS", refObj);
|
||||
try {
|
||||
yield this.promiseClearPluginCookies(range);
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
} finally {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_PLUGINS", refObj);
|
||||
}
|
||||
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_COOKIES", refObj);
|
||||
if (seenException) {
|
||||
throw seenException;
|
||||
}
|
||||
}),
|
||||
|
||||
promiseClearPluginCookies: Task.async(function*() {
|
||||
promiseClearPluginCookies: Task.async(function* (range) {
|
||||
const phInterface = Ci.nsIPluginHost;
|
||||
const FLAG_CLEAR_ALL = phInterface.FLAG_CLEAR_ALL;
|
||||
let ph = Cc["@mozilla.org/plugin/host;1"].getService(phInterface);
|
||||
|
||||
// Determine age range in seconds. (-1 means clear all.) We don't know
|
||||
// that this.range[1] is actually now, so we compute age range based
|
||||
// on the lower bound. If this.range results in a negative age, do
|
||||
// nothing.
|
||||
let age = this.range ? (Date.now() / 1000 - this.range[0] / 1000000) : -1;
|
||||
if (!this.range || age >= 0) {
|
||||
// that range[1] is actually now, so we compute age range based
|
||||
// on the lower bound. If range results in a negative age, do nothing.
|
||||
let age = range ? (Date.now() / 1000 - range[0] / 1000000) : -1;
|
||||
if (!range || age >= 0) {
|
||||
let tags = ph.getPluginTags();
|
||||
for (let tag of tags) {
|
||||
try {
|
||||
@ -280,103 +316,130 @@ Sanitizer.prototype = {
|
||||
},
|
||||
|
||||
offlineApps: {
|
||||
clear: function ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_OFFLINEAPPS", refObj);
|
||||
Components.utils.import("resource:///modules/offlineAppCache.jsm");
|
||||
OfflineAppCacheHelper.clear();
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_OFFLINEAPPS", refObj);
|
||||
}
|
||||
try {
|
||||
Components.utils.import("resource:///modules/offlineAppCache.jsm");
|
||||
// This doesn't wait for the cleanup to be complete.
|
||||
OfflineAppCacheHelper.clear();
|
||||
} finally {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_OFFLINEAPPS", refObj);
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
history: {
|
||||
clear: Task.async(function* ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let seenException;
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_HISTORY", refObj);
|
||||
try {
|
||||
if (this.range) {
|
||||
if (range) {
|
||||
yield PlacesUtils.history.removeVisitsByFilter({
|
||||
beginDate: new Date(this.range[0] / 1000),
|
||||
endDate: new Date(this.range[1] / 1000)
|
||||
beginDate: new Date(range[0] / 1000),
|
||||
endDate: new Date(range[1] / 1000)
|
||||
});
|
||||
} else {
|
||||
// Remove everything.
|
||||
yield PlacesUtils.history.clear();
|
||||
}
|
||||
|
||||
try {
|
||||
let clearStartingTime = this.range ? String(this.range[0]) : "";
|
||||
Services.obs.notifyObservers(null, "browser:purge-session-history", clearStartingTime);
|
||||
} catch (e) { }
|
||||
|
||||
try {
|
||||
let predictor = Components.classes["@mozilla.org/network/predictor;1"]
|
||||
.getService(Components.interfaces.nsINetworkPredictor);
|
||||
predictor.reset();
|
||||
} catch (e) {
|
||||
console.error("Error while resetting the predictor", e);
|
||||
}
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
} finally {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_HISTORY", refObj);
|
||||
}
|
||||
|
||||
try {
|
||||
let clearStartingTime = range ? String(range[0]) : "";
|
||||
Services.obs.notifyObservers(null, "browser:purge-session-history", clearStartingTime);
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
try {
|
||||
let predictor = Components.classes["@mozilla.org/network/predictor;1"]
|
||||
.getService(Components.interfaces.nsINetworkPredictor);
|
||||
predictor.reset();
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
if (seenException) {
|
||||
throw seenException;
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
formdata: {
|
||||
clear: function ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let seenException;
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_FORMDATA", refObj);
|
||||
|
||||
// Clear undo history of all searchBars
|
||||
var windowManager = Components.classes['@mozilla.org/appshell/window-mediator;1']
|
||||
.getService(Components.interfaces.nsIWindowMediator);
|
||||
var windows = windowManager.getEnumerator("navigator:browser");
|
||||
while (windows.hasMoreElements()) {
|
||||
let currentWindow = windows.getNext();
|
||||
let currentDocument = currentWindow.document;
|
||||
let searchBar = currentDocument.getElementById("searchbar");
|
||||
if (searchBar)
|
||||
searchBar.textbox.reset();
|
||||
let tabBrowser = currentWindow.gBrowser;
|
||||
if (!tabBrowser) {
|
||||
// No tab browser? This means that it's too early during startup (typically,
|
||||
// Session Restore hasn't completed yet). Since we don't have find
|
||||
// bars at that stage and since Session Restore will not restore
|
||||
// find bars further down during startup, we have nothing to clear.
|
||||
continue;
|
||||
try {
|
||||
// Clear undo history of all searchBars
|
||||
let windows = Services.wm.getEnumerator("navigator:browser");
|
||||
while (windows.hasMoreElements()) {
|
||||
let currentWindow = windows.getNext();
|
||||
let currentDocument = currentWindow.document;
|
||||
let searchBar = currentDocument.getElementById("searchbar");
|
||||
if (searchBar)
|
||||
searchBar.textbox.reset();
|
||||
let tabBrowser = currentWindow.gBrowser;
|
||||
if (!tabBrowser) {
|
||||
// No tab browser? This means that it's too early during startup (typically,
|
||||
// Session Restore hasn't completed yet). Since we don't have find
|
||||
// bars at that stage and since Session Restore will not restore
|
||||
// find bars further down during startup, we have nothing to clear.
|
||||
continue;
|
||||
}
|
||||
for (let tab of tabBrowser.tabs) {
|
||||
if (tabBrowser.isFindBarInitialized(tab))
|
||||
tabBrowser.getFindBar(tab).clear();
|
||||
}
|
||||
// Clear any saved find value
|
||||
tabBrowser._lastFindValue = "";
|
||||
}
|
||||
for (let tab of tabBrowser.tabs) {
|
||||
if (tabBrowser.isFindBarInitialized(tab))
|
||||
tabBrowser.getFindBar(tab).clear();
|
||||
}
|
||||
// Clear any saved find value
|
||||
tabBrowser._lastFindValue = "";
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
let change = { op: "remove" };
|
||||
if (this.range) {
|
||||
[ change.firstUsedStart, change.firstUsedEnd ] = this.range;
|
||||
try {
|
||||
let change = { op: "remove" };
|
||||
if (range) {
|
||||
[ change.firstUsedStart, change.firstUsedEnd ] = range;
|
||||
}
|
||||
yield new Promise(resolve => {
|
||||
FormHistory.update(change, {
|
||||
handleError(e) {
|
||||
seenException = new Error("Error " + e.result + ": " + e.message);
|
||||
},
|
||||
handleCompletion() {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
FormHistory.update(change);
|
||||
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_FORMDATA", refObj);
|
||||
}
|
||||
if (seenException) {
|
||||
throw seenException;
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
downloads: {
|
||||
clear: function ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_DOWNLOADS", refObj);
|
||||
Task.spawn(function*() {
|
||||
try {
|
||||
let filterByTime = null;
|
||||
if (this.range) {
|
||||
if (range) {
|
||||
// Convert microseconds back to milliseconds for date comparisons.
|
||||
let rangeBeginMs = this.range[0] / 1000;
|
||||
let rangeEndMs = this.range[1] / 1000;
|
||||
let rangeBeginMs = range[0] / 1000;
|
||||
let rangeEndMs = range[1] / 1000;
|
||||
filterByTime = download => download.startTime >= rangeBeginMs &&
|
||||
download.startTime <= rangeEndMs;
|
||||
}
|
||||
@ -384,82 +447,92 @@ Sanitizer.prototype = {
|
||||
// Clear all completed/cancelled downloads
|
||||
let list = yield Downloads.getList(Downloads.ALL);
|
||||
list.removeFinished(filterByTime);
|
||||
} finally {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_DOWNLOADS", refObj);
|
||||
}.bind(this)).then(null, error => {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_DOWNLOADS", refObj);
|
||||
Components.utils.reportError(error);
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
sessions: {
|
||||
clear: function ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_SESSIONS", refObj);
|
||||
|
||||
// clear all auth tokens
|
||||
var sdr = Components.classes["@mozilla.org/security/sdr;1"]
|
||||
.getService(Components.interfaces.nsISecretDecoderRing);
|
||||
sdr.logoutAndTeardown();
|
||||
try {
|
||||
// clear all auth tokens
|
||||
let sdr = Components.classes["@mozilla.org/security/sdr;1"]
|
||||
.getService(Components.interfaces.nsISecretDecoderRing);
|
||||
sdr.logoutAndTeardown();
|
||||
|
||||
// clear FTP and plain HTTP auth sessions
|
||||
var os = Components.classes["@mozilla.org/observer-service;1"]
|
||||
.getService(Components.interfaces.nsIObserverService);
|
||||
os.notifyObservers(null, "net:clear-active-logins", null);
|
||||
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_SESSIONS", refObj);
|
||||
}
|
||||
// clear FTP and plain HTTP auth sessions
|
||||
Services.obs.notifyObservers(null, "net:clear-active-logins", null);
|
||||
} finally {
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_SESSIONS", refObj);
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
siteSettings: {
|
||||
clear: function ()
|
||||
{
|
||||
clear: Task.async(function* (range) {
|
||||
let seenException;
|
||||
let refObj = {};
|
||||
TelemetryStopwatch.start("FX_SANITIZE_SITESETTINGS", refObj);
|
||||
|
||||
// Clear site-specific permissions like "Allow this site to open popups"
|
||||
// we ignore the "end" range and hope it is now() - none of the
|
||||
// interfaces used here support a true range anyway.
|
||||
let startDateMS = this.range == null ? null : this.range[0] / 1000;
|
||||
var pm = Components.classes["@mozilla.org/permissionmanager;1"]
|
||||
.getService(Components.interfaces.nsIPermissionManager);
|
||||
if (startDateMS == null) {
|
||||
pm.removeAll();
|
||||
} else {
|
||||
pm.removeAllSince(startDateMS);
|
||||
let startDateMS = range ? range[0] / 1000 : null;
|
||||
|
||||
try {
|
||||
// Clear site-specific permissions like "Allow this site to open popups"
|
||||
// we ignore the "end" range and hope it is now() - none of the
|
||||
// interfaces used here support a true range anyway.
|
||||
if (startDateMS == null) {
|
||||
Services.perms.removeAll();
|
||||
} else {
|
||||
Services.perms.removeAllSince(startDateMS);
|
||||
}
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
// Clear site-specific settings like page-zoom level
|
||||
var cps = Components.classes["@mozilla.org/content-pref/service;1"]
|
||||
.getService(Components.interfaces.nsIContentPrefService2);
|
||||
if (startDateMS == null) {
|
||||
cps.removeAllDomains(null);
|
||||
} else {
|
||||
cps.removeAllDomainsSince(startDateMS, null);
|
||||
try {
|
||||
// Clear site-specific settings like page-zoom level
|
||||
let cps = Components.classes["@mozilla.org/content-pref/service;1"]
|
||||
.getService(Components.interfaces.nsIContentPrefService2);
|
||||
if (startDateMS == null) {
|
||||
cps.removeAllDomains(null);
|
||||
} else {
|
||||
cps.removeAllDomainsSince(startDateMS, null);
|
||||
}
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
// Clear "Never remember passwords for this site", which is not handled by
|
||||
// the permission manager
|
||||
// (Note the login manager doesn't support date ranges yet, and bug
|
||||
// 1058438 is calling for loginSaving stuff to end up in the
|
||||
// permission manager)
|
||||
var pwmgr = Components.classes["@mozilla.org/login-manager;1"]
|
||||
.getService(Components.interfaces.nsILoginManager);
|
||||
var hosts = pwmgr.getAllDisabledHosts();
|
||||
for (var host of hosts) {
|
||||
pwmgr.setLoginSavingEnabled(host, true);
|
||||
try {
|
||||
// Clear "Never remember passwords for this site", which is not handled by
|
||||
// the permission manager
|
||||
// (Note the login manager doesn't support date ranges yet, and bug
|
||||
// 1058438 is calling for loginSaving stuff to end up in the
|
||||
// permission manager)
|
||||
let hosts = Services.logins.getAllDisabledHosts();
|
||||
for (let host of hosts) {
|
||||
Services.logins.setLoginSavingEnabled(host, true);
|
||||
}
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
// Clear site security settings - no support for ranges in this
|
||||
// interface either, so we clearAll().
|
||||
var sss = Cc["@mozilla.org/ssservice;1"]
|
||||
.getService(Ci.nsISiteSecurityService);
|
||||
sss.clearAll();
|
||||
try {
|
||||
// Clear site security settings - no support for ranges in this
|
||||
// interface either, so we clearAll().
|
||||
let sss = Cc["@mozilla.org/ssservice;1"]
|
||||
.getService(Ci.nsISiteSecurityService);
|
||||
sss.clearAll();
|
||||
} catch (ex) {
|
||||
seenException = ex;
|
||||
}
|
||||
|
||||
// Clear all push notification subscriptions
|
||||
try {
|
||||
var push = Cc["@mozilla.org/push/Service;1"]
|
||||
let push = Cc["@mozilla.org/push/Service;1"]
|
||||
.getService(Ci.nsIPushService);
|
||||
push.clearForDomain("*", status => {
|
||||
if (!Components.isSuccessCode(status)) {
|
||||
@ -471,7 +544,10 @@ Sanitizer.prototype = {
|
||||
}
|
||||
|
||||
TelemetryStopwatch.finish("FX_SANITIZE_SITESETTINGS", refObj);
|
||||
}
|
||||
if (seenException) {
|
||||
throw seenException;
|
||||
}
|
||||
})
|
||||
},
|
||||
|
||||
openWindows: {
|
||||
@ -490,7 +566,7 @@ Sanitizer.prototype = {
|
||||
win.skipNextCanClose = false;
|
||||
}
|
||||
},
|
||||
clear: Task.async(function*() {
|
||||
clear: Task.async(function* () {
|
||||
// NB: this closes all *browser* windows, not other windows like the library, about window,
|
||||
// browser console, etc.
|
||||
|
||||
@ -670,16 +746,14 @@ Sanitizer.__defineGetter__("prefs", function()
|
||||
// Shows sanitization UI
|
||||
Sanitizer.showUI = function(aParentWindow)
|
||||
{
|
||||
var ww = Components.classes["@mozilla.org/embedcomp/window-watcher;1"]
|
||||
.getService(Components.interfaces.nsIWindowWatcher);
|
||||
let win = AppConstants.platform == "macosx" ?
|
||||
null: // make this an app-modal window on Mac
|
||||
aParentWindow;
|
||||
ww.openWindow(win,
|
||||
"chrome://browser/content/sanitize.xul",
|
||||
"Sanitize",
|
||||
"chrome,titlebar,dialog,centerscreen,modal",
|
||||
null);
|
||||
Services.ww.openWindow(win,
|
||||
"chrome://browser/content/sanitize.xul",
|
||||
"Sanitize",
|
||||
"chrome,titlebar,dialog,centerscreen,modal",
|
||||
null);
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -42,16 +42,12 @@ function promiseDownloadRemoved(list) {
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
function test() {
|
||||
waitForExplicitFinish();
|
||||
|
||||
Task.spawn(function() {
|
||||
yield setupDownloads();
|
||||
yield setupFormHistory();
|
||||
yield setupHistory();
|
||||
yield onHistoryReady();
|
||||
}).then(null, ex => ok(false, ex)).then(finish);
|
||||
}
|
||||
add_task(function* test() {
|
||||
yield setupDownloads();
|
||||
yield setupFormHistory();
|
||||
yield setupHistory();
|
||||
yield onHistoryReady();
|
||||
});
|
||||
|
||||
function countEntries(name, message, check) {
|
||||
let deferred = Promise.defer();
|
||||
@ -77,7 +73,7 @@ function countEntries(name, message, check) {
|
||||
return deferred.promise;
|
||||
}
|
||||
|
||||
function onHistoryReady() {
|
||||
function* onHistoryReady() {
|
||||
var hoursSinceMidnight = new Date().getHours();
|
||||
var minutesSinceMidnight = hoursSinceMidnight * 60 + new Date().getMinutes();
|
||||
|
||||
@ -100,13 +96,14 @@ function onHistoryReady() {
|
||||
|
||||
let publicList = yield Downloads.getList(Downloads.PUBLIC);
|
||||
let downloadPromise = promiseDownloadRemoved(publicList);
|
||||
let formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear 10 minutes ago
|
||||
s.range = [now_uSec - 10*60*1000000, now_uSec];
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
s.range = null;
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://10minutes.com"))),
|
||||
@ -157,12 +154,13 @@ function onHistoryReady() {
|
||||
ok((yield downloadExists(publicList, "fakefile-today")), "'Today' download should still be present");
|
||||
|
||||
downloadPromise = promiseDownloadRemoved(publicList);
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear 1 hour
|
||||
Sanitizer.prefs.setIntPref("timeSpan", 1);
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://1hour.com"))),
|
||||
@ -206,13 +204,14 @@ function onHistoryReady() {
|
||||
ok((yield downloadExists(publicList, "fakefile-today")), "'Today' download should still be present");
|
||||
|
||||
downloadPromise = promiseDownloadRemoved(publicList);
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear 1 hour 10 minutes
|
||||
s.range = [now_uSec - 70*60*1000000, now_uSec];
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
s.range = null;
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://1hour10minutes.com"))),
|
||||
@ -251,12 +250,13 @@ function onHistoryReady() {
|
||||
ok((yield downloadExists(publicList, "fakefile-today")), "'Today' download should still be present");
|
||||
|
||||
downloadPromise = promiseDownloadRemoved(publicList);
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear 2 hours
|
||||
Sanitizer.prefs.setIntPref("timeSpan", 2);
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://2hour.com"))),
|
||||
@ -289,15 +289,16 @@ function onHistoryReady() {
|
||||
ok((yield downloadExists(publicList, "fakefile-4-hour-10-minutes")), "4 hour 10 minute download should still be present");
|
||||
if (hoursSinceMidnight > 2)
|
||||
ok((yield downloadExists(publicList, "fakefile-today")), "'Today' download should still be present");
|
||||
|
||||
|
||||
downloadPromise = promiseDownloadRemoved(publicList);
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear 2 hours 10 minutes
|
||||
s.range = [now_uSec - 130*60*1000000, now_uSec];
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
s.range = null;
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://2hour10minutes.com"))),
|
||||
@ -328,12 +329,13 @@ function onHistoryReady() {
|
||||
ok((yield downloadExists(publicList, "fakefile-today")), "'Today' download should still be present");
|
||||
|
||||
downloadPromise = promiseDownloadRemoved(publicList);
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear 4 hours
|
||||
Sanitizer.prefs.setIntPref("timeSpan", 3);
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://4hour.com"))),
|
||||
@ -360,13 +362,14 @@ function onHistoryReady() {
|
||||
ok((yield downloadExists(publicList, "fakefile-today")), "'Today' download should still be present");
|
||||
|
||||
downloadPromise = promiseDownloadRemoved(publicList);
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear 4 hours 10 minutes
|
||||
s.range = [now_uSec - 250*60*1000000, now_uSec];
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
s.range = null;
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://4hour10minutes.com"))),
|
||||
@ -395,12 +398,13 @@ function onHistoryReady() {
|
||||
} else {
|
||||
downloadPromise = Promise.resolve();
|
||||
}
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Clear Today
|
||||
Sanitizer.prefs.setIntPref("timeSpan", 4);
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
// Be careful. If we add our objectss just before midnight, and sanitize
|
||||
@ -423,12 +427,13 @@ function onHistoryReady() {
|
||||
ok((yield downloadExists(publicList, "fakefile-old")), "Year old download should still be present");
|
||||
|
||||
downloadPromise = promiseDownloadRemoved(publicList);
|
||||
formHistoryPromise = promiseFormHistoryRemoved();
|
||||
|
||||
// Choose everything
|
||||
Sanitizer.prefs.setIntPref("timeSpan", 0);
|
||||
s.sanitize();
|
||||
yield s.sanitize();
|
||||
|
||||
yield promiseFormHistoryRemoved();
|
||||
yield formHistoryPromise;
|
||||
yield downloadPromise;
|
||||
|
||||
ok(!(yield promiseIsURIVisited(makeURI("http://before-today.com"))),
|
||||
@ -472,7 +477,6 @@ function setupHistory() {
|
||||
let lastYear = new Date();
|
||||
lastYear.setFullYear(lastYear.getFullYear() - 1);
|
||||
addPlace(makeURI("http://before-today.com/"), "Before Today", lastYear.getTime() * 1000);
|
||||
|
||||
PlacesUtils.asyncHistory.updatePlaces(places, {
|
||||
handleError: () => ok(false, "Unexpected error in adding visit."),
|
||||
handleResult: () => { },
|
||||
|
@ -118,11 +118,29 @@ extensions.registerSchemaAPI("tabs", null, (extension, context) => {
|
||||
|
||||
onCreated: new EventManager(context, "tabs.onCreated", fire => {
|
||||
let listener = event => {
|
||||
if (event.detail.adoptedTab) {
|
||||
// This tab is being created to adopt a tab from another window. We
|
||||
// map this event to an onAttached, rather than onCreated, event.
|
||||
return;
|
||||
}
|
||||
|
||||
// We need to delay sending this event until the next tick, since the
|
||||
// tab does not have its final index when the TabOpen event is dispatched.
|
||||
let tab = event.originalTarget;
|
||||
fire(TabManager.convert(extension, tab));
|
||||
Promise.resolve().then(() => {
|
||||
fire(TabManager.convert(extension, tab));
|
||||
});
|
||||
};
|
||||
|
||||
let windowListener = window => {
|
||||
if (window.arguments[0] instanceof window.XULElement) {
|
||||
// If the first window argument is a XUL element, it means the
|
||||
// window is about to adopt a tab from another window to replace its
|
||||
// initial tab, which means we need to skip the onCreated event, and
|
||||
// fire an onAttached event instead.
|
||||
return;
|
||||
}
|
||||
|
||||
for (let tab of window.gBrowser.tabs) {
|
||||
fire(TabManager.convert(extension, tab));
|
||||
}
|
||||
@ -136,6 +154,145 @@ extensions.registerSchemaAPI("tabs", null, (extension, context) => {
|
||||
};
|
||||
}).api(),
|
||||
|
||||
onAttached: new EventManager(context, "tabs.onAttached", fire => {
|
||||
let fireForTab = tab => {
|
||||
let newWindowId = WindowManager.getId(tab.ownerDocument.defaultView);
|
||||
fire(TabManager.getId(tab), {newWindowId, newPosition: tab._tPos});
|
||||
};
|
||||
|
||||
let listener = event => {
|
||||
if (event.detail.adoptedTab) {
|
||||
// We need to delay sending this event until the next tick, since the
|
||||
// tab does not have its final index when the TabOpen event is dispatched.
|
||||
Promise.resolve().then(() => {
|
||||
fireForTab(event.originalTarget);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
let windowListener = window => {
|
||||
if (window.arguments[0] instanceof window.XULElement) {
|
||||
// If the first window argument is a XUL element, it means the
|
||||
// window is about to adopt a tab from another window to replace its
|
||||
// initial tab.
|
||||
//
|
||||
// Note that this event handler depends on running before the
|
||||
// delayed startup code in browser.js, which is currently triggered
|
||||
// by the first MozAfterPaint event. That code handles finally
|
||||
// adopting the tab, and clears it from the arguments list in the
|
||||
// process, so if we run later than it, we're too late.
|
||||
let tab = window.arguments[0];
|
||||
|
||||
// We need to be sure to fire this event after the onDetached event
|
||||
// for the original tab.
|
||||
tab.addEventListener("TabClose", function listener(event) {
|
||||
tab.removeEventListener("TabClose", listener);
|
||||
Promise.resolve().then(() => {
|
||||
fireForTab(event.detail.adoptedBy);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
WindowListManager.addOpenListener(windowListener);
|
||||
AllWindowEvents.addListener("TabOpen", listener);
|
||||
return () => {
|
||||
WindowListManager.removeOpenListener(windowListener);
|
||||
AllWindowEvents.removeListener("TabOpen", listener);
|
||||
};
|
||||
}).api(),
|
||||
|
||||
onDetached: new EventManager(context, "tabs.onDetached", fire => {
|
||||
let listener = event => {
|
||||
if (event.detail.adoptedBy) {
|
||||
let tab = event.originalTarget;
|
||||
let oldWindowId = WindowManager.getId(tab.ownerDocument.defaultView);
|
||||
fire(TabManager.getId(tab), {oldWindowId, oldPosition: tab._tPos});
|
||||
}
|
||||
};
|
||||
|
||||
AllWindowEvents.addListener("TabClose", listener);
|
||||
return () => {
|
||||
AllWindowEvents.removeListener("TabClose", listener);
|
||||
};
|
||||
}).api(),
|
||||
|
||||
onRemoved: new EventManager(context, "tabs.onRemoved", fire => {
|
||||
let fireForTab = (tab, isWindowClosing) => {
|
||||
let tabId = TabManager.getId(tab);
|
||||
let windowId = WindowManager.getId(tab.ownerDocument.defaultView);
|
||||
|
||||
fire(tabId, {windowId, isWindowClosing});
|
||||
};
|
||||
|
||||
let tabListener = event => {
|
||||
// Only fire if this tab is not being moved to another window. If it
|
||||
// is being adopted by another window, we fire an onDetached, rather
|
||||
// than an onRemoved, event.
|
||||
if (!event.detail.adoptedBy) {
|
||||
fireForTab(event.originalTarget, false);
|
||||
}
|
||||
};
|
||||
|
||||
let windowListener = window => {
|
||||
for (let tab of window.gBrowser.tabs) {
|
||||
fireForTab(tab, true);
|
||||
}
|
||||
};
|
||||
|
||||
WindowListManager.addCloseListener(windowListener);
|
||||
AllWindowEvents.addListener("TabClose", tabListener);
|
||||
return () => {
|
||||
WindowListManager.removeCloseListener(windowListener);
|
||||
AllWindowEvents.removeListener("TabClose", tabListener);
|
||||
};
|
||||
}).api(),
|
||||
|
||||
onReplaced: ignoreEvent(context, "tabs.onReplaced"),
|
||||
|
||||
onMoved: new EventManager(context, "tabs.onMoved", fire => {
|
||||
// There are certain circumstances where we need to ignore a move event.
|
||||
//
|
||||
// Namely, the first time the tab is moved after it's created, we need
|
||||
// to report the final position as the initial position in the tab's
|
||||
// onAttached or onCreated event. This is because most tabs are inserted
|
||||
// in a temporary location and then moved after the TabOpen event fires,
|
||||
// which generates a TabOpen event followed by a TabMove event, which
|
||||
// does not match the contract of our API.
|
||||
let ignoreNextMove = new WeakSet();
|
||||
|
||||
let openListener = event => {
|
||||
ignoreNextMove.add(event.target);
|
||||
// Remove the tab from the set on the next tick, since it will already
|
||||
// have been moved by then.
|
||||
Promise.resolve().then(() => {
|
||||
ignoreNextMove.delete(event.target);
|
||||
});
|
||||
};
|
||||
|
||||
let moveListener = event => {
|
||||
let tab = event.originalTarget;
|
||||
|
||||
if (ignoreNextMove.has(tab)) {
|
||||
ignoreNextMove.delete(tab);
|
||||
return;
|
||||
}
|
||||
|
||||
fire(TabManager.getId(tab), {
|
||||
windowId: WindowManager.getId(tab.ownerDocument.defaultView),
|
||||
fromIndex: event.detail,
|
||||
toIndex: tab._tPos,
|
||||
});
|
||||
};
|
||||
|
||||
AllWindowEvents.addListener("TabMove", moveListener);
|
||||
AllWindowEvents.addListener("TabOpen", openListener);
|
||||
return () => {
|
||||
AllWindowEvents.removeListener("TabMove", moveListener);
|
||||
AllWindowEvents.removeListener("TabOpen", openListener);
|
||||
};
|
||||
}).api(),
|
||||
|
||||
onUpdated: new EventManager(context, "tabs.onUpdated", fire => {
|
||||
function sanitize(extension, changeInfo) {
|
||||
let result = {};
|
||||
@ -239,34 +396,6 @@ extensions.registerSchemaAPI("tabs", null, (extension, context) => {
|
||||
};
|
||||
}).api(),
|
||||
|
||||
onReplaced: ignoreEvent(context, "tabs.onReplaced"),
|
||||
|
||||
onRemoved: new EventManager(context, "tabs.onRemoved", fire => {
|
||||
let tabListener = event => {
|
||||
let tab = event.originalTarget;
|
||||
let tabId = TabManager.getId(tab);
|
||||
let windowId = WindowManager.getId(tab.ownerDocument.defaultView);
|
||||
let removeInfo = {windowId, isWindowClosing: false};
|
||||
fire(tabId, removeInfo);
|
||||
};
|
||||
|
||||
let windowListener = window => {
|
||||
for (let tab of window.gBrowser.tabs) {
|
||||
let tabId = TabManager.getId(tab);
|
||||
let windowId = WindowManager.getId(window);
|
||||
let removeInfo = {windowId, isWindowClosing: true};
|
||||
fire(tabId, removeInfo);
|
||||
}
|
||||
};
|
||||
|
||||
WindowListManager.addCloseListener(windowListener);
|
||||
AllWindowEvents.addListener("TabClose", tabListener);
|
||||
return () => {
|
||||
WindowListManager.removeCloseListener(windowListener);
|
||||
AllWindowEvents.removeListener("TabClose", tabListener);
|
||||
};
|
||||
}).api(),
|
||||
|
||||
create: function(createProperties) {
|
||||
return new Promise(resolve => {
|
||||
function createInWindow(window) {
|
||||
@ -646,6 +775,25 @@ extensions.registerSchemaAPI("tabs", null, (extension, context) => {
|
||||
|
||||
return Promise.resolve(tabsMoved.map(tab => TabManager.convert(extension, tab)));
|
||||
},
|
||||
|
||||
duplicate: function(tabId) {
|
||||
let tab = TabManager.getTab(tabId);
|
||||
if (!tab) {
|
||||
return Promise.reject({message: `Invalid tab ID: ${tabId}`});
|
||||
}
|
||||
|
||||
let gBrowser = tab.ownerDocument.defaultView.gBrowser;
|
||||
let newTab = gBrowser.duplicateTab(tab);
|
||||
gBrowser.moveTabTo(newTab, tab._tPos + 1);
|
||||
gBrowser.selectTabAtIndex(newTab._tPos);
|
||||
|
||||
return new Promise(resolve => {
|
||||
newTab.addEventListener("SSTabRestored", function listener() {
|
||||
newTab.removeEventListener("SSTabRestored", listener);
|
||||
return resolve(TabManager.convert(extension, newTab));
|
||||
});
|
||||
});
|
||||
},
|
||||
},
|
||||
};
|
||||
return self;
|
||||
|
@ -471,11 +471,60 @@ ExtensionTabManager.prototype = {
|
||||
global.TabManager = {
|
||||
_tabs: new WeakMap(),
|
||||
_nextId: 1,
|
||||
_initialized: false,
|
||||
|
||||
// We begin listening for TabOpen and TabClose events once we've started
|
||||
// assigning IDs to tabs, so that we can remap the IDs of tabs which are moved
|
||||
// between windows.
|
||||
initListener() {
|
||||
if (this._initialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
AllWindowEvents.addListener("TabOpen", this);
|
||||
AllWindowEvents.addListener("TabClose", this);
|
||||
WindowListManager.addOpenListener(this.handleWindowOpen.bind(this));
|
||||
|
||||
this._initialized = true;
|
||||
},
|
||||
|
||||
handleEvent(event) {
|
||||
if (event.type == "TabOpen") {
|
||||
let {adoptedTab} = event.detail;
|
||||
if (adoptedTab) {
|
||||
// This tab is being created to adopt a tab from a different window.
|
||||
// Copy the ID from the old tab to the new.
|
||||
this._tabs.set(event.target, this.getId(adoptedTab));
|
||||
}
|
||||
} else if (event.type == "TabClose") {
|
||||
let {adoptedBy} = event.detail;
|
||||
if (adoptedBy) {
|
||||
// This tab is being closed because it was adopted by a new window.
|
||||
// Copy its ID to the new tab, in case it was created as the first tab
|
||||
// of a new window, and did not have an `adoptedTab` detail when it was
|
||||
// opened.
|
||||
this._tabs.set(adoptedBy, this.getId(event.target));
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
handleWindowOpen(window) {
|
||||
if (window.arguments[0] instanceof window.XULElement) {
|
||||
// If the first window argument is a XUL element, it means the
|
||||
// window is about to adopt a tab from another window to replace its
|
||||
// initial tab.
|
||||
let adoptedTab = window.arguments[0];
|
||||
|
||||
this._tabs.set(window.gBrowser.tabs[0], this.getId(adoptedTab));
|
||||
}
|
||||
},
|
||||
|
||||
getId(tab) {
|
||||
if (this._tabs.has(tab)) {
|
||||
return this._tabs.get(tab);
|
||||
}
|
||||
this.initListener();
|
||||
|
||||
let id = this._nextId++;
|
||||
this._tabs.set(tab, id);
|
||||
return id;
|
||||
|
@ -5,6 +5,8 @@
|
||||
XPCOMUtils.defineLazyServiceGetter(this, "aboutNewTabService",
|
||||
"@mozilla.org/browser/aboutnewtab-service;1",
|
||||
"nsIAboutNewTabService");
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "PrivateBrowsingUtils",
|
||||
"resource://gre/modules/PrivateBrowsingUtils.jsm");
|
||||
|
||||
Cu.import("resource://gre/modules/ExtensionUtils.jsm");
|
||||
var {
|
||||
@ -68,7 +70,27 @@ extensions.registerSchemaAPI("windows", null, (extension, context) => {
|
||||
}
|
||||
|
||||
let args = Cc["@mozilla.org/supports-array;1"].createInstance(Ci.nsISupportsArray);
|
||||
if (createData.url !== null) {
|
||||
|
||||
if (createData.tabId !== null) {
|
||||
if (createData.url !== null) {
|
||||
return Promise.reject({message: "`tabId` may not be used in conjunction with `url`"});
|
||||
}
|
||||
|
||||
let tab = TabManager.getTab(createData.tabId);
|
||||
if (tab == null) {
|
||||
return Promise.reject({message: `Invalid tab ID: ${createData.tabId}`});
|
||||
}
|
||||
|
||||
// Private browsing tabs can only be moved to private browsing
|
||||
// windows.
|
||||
let incognito = PrivateBrowsingUtils.isBrowserPrivate(tab.linkedBrowser);
|
||||
if (createData.incognito !== null && createData.incognito != incognito) {
|
||||
return Promise.reject({message: "`incognito` property must match the incognito state of tab"});
|
||||
}
|
||||
createData.incognito = incognito;
|
||||
|
||||
args.AppendElement(tab);
|
||||
} else if (createData.url !== null) {
|
||||
if (Array.isArray(createData.url)) {
|
||||
let array = Cc["@mozilla.org/supports-array;1"].createInstance(Ci.nsISupportsArray);
|
||||
for (let url of createData.url) {
|
||||
|
@ -24,6 +24,7 @@ support-files =
|
||||
[browser_ext_runtime_setUninstallURL.js]
|
||||
[browser_ext_tabs_audio.js]
|
||||
[browser_ext_tabs_captureVisibleTab.js]
|
||||
[browser_ext_tabs_events.js]
|
||||
[browser_ext_tabs_executeScript.js]
|
||||
[browser_ext_tabs_executeScript_good.js]
|
||||
[browser_ext_tabs_executeScript_bad.js]
|
||||
@ -31,11 +32,13 @@ support-files =
|
||||
[browser_ext_tabs_query.js]
|
||||
[browser_ext_tabs_getCurrent.js]
|
||||
[browser_ext_tabs_create.js]
|
||||
[browser_ext_tabs_duplicate.js]
|
||||
[browser_ext_tabs_update.js]
|
||||
[browser_ext_tabs_onUpdated.js]
|
||||
[browser_ext_tabs_sendMessage.js]
|
||||
[browser_ext_tabs_move.js]
|
||||
[browser_ext_tabs_move_window.js]
|
||||
[browser_ext_windows_create_tabId.js]
|
||||
[browser_ext_windows_update.js]
|
||||
[browser_ext_contentscript_connect.js]
|
||||
[browser_ext_tab_runtimeConnect.js]
|
||||
|
@ -0,0 +1,44 @@
|
||||
/* -*- Mode: indent-tabs-mode: nil; js-indent-level: 2 -*- */
|
||||
/* vim: set sts=2 sw=2 et tw=80: */
|
||||
"use strict";
|
||||
|
||||
add_task(function* testDuplicateTab() {
|
||||
yield BrowserTestUtils.openNewForegroundTab(gBrowser, "http://example.net/");
|
||||
|
||||
let extension = ExtensionTestUtils.loadExtension({
|
||||
manifest: {
|
||||
"permissions": ["tabs"],
|
||||
},
|
||||
|
||||
background: function() {
|
||||
browser.tabs.query({
|
||||
lastFocusedWindow: true,
|
||||
}, function(tabs) {
|
||||
let source = tabs[1];
|
||||
// By moving it 0, we check that the new tab is created next
|
||||
// to the existing one.
|
||||
browser.tabs.move(source.id, {index: 0}, () => {
|
||||
browser.tabs.duplicate(source.id, (tab) => {
|
||||
browser.test.assertEq("http://example.net/", tab.url);
|
||||
// Should be the second tab, next to the one duplicated.
|
||||
browser.test.assertEq(1, tab.index);
|
||||
// Should be selected by default.
|
||||
browser.test.assertTrue(tab.selected);
|
||||
browser.test.notifyPass("tabs.duplicate");
|
||||
});
|
||||
});
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
yield extension.startup();
|
||||
yield extension.awaitFinish("tabs.duplicate");
|
||||
yield extension.unload();
|
||||
|
||||
while (window.gBrowser.tabs.length > 1) {
|
||||
let tab = window.gBrowser.tabs[0];
|
||||
if (tab.linkedBrowser.currentURI.spec === "http://example.net/") {
|
||||
yield BrowserTestUtils.removeTab(tab);
|
||||
}
|
||||
}
|
||||
});
|
@ -0,0 +1,161 @@
|
||||
/* -*- Mode: indent-tabs-mode: nil; js-indent-level: 2 -*- */
|
||||
/* vim: set sts=2 sw=2 et tw=80: */
|
||||
"use strict";
|
||||
|
||||
add_task(function* testTabEvents() {
|
||||
function background() {
|
||||
let events = [];
|
||||
browser.tabs.onCreated.addListener(tab => {
|
||||
events.push({type: "onCreated", tab});
|
||||
});
|
||||
|
||||
browser.tabs.onAttached.addListener((tabId, info) => {
|
||||
events.push(Object.assign({type: "onAttached", tabId}, info));
|
||||
});
|
||||
|
||||
browser.tabs.onDetached.addListener((tabId, info) => {
|
||||
events.push(Object.assign({type: "onDetached", tabId}, info));
|
||||
});
|
||||
|
||||
browser.tabs.onRemoved.addListener((tabId, info) => {
|
||||
events.push(Object.assign({type: "onRemoved", tabId}, info));
|
||||
});
|
||||
|
||||
browser.tabs.onMoved.addListener((tabId, info) => {
|
||||
events.push(Object.assign({type: "onMoved", tabId}, info));
|
||||
});
|
||||
|
||||
function expectEvents(names) {
|
||||
browser.test.log(`Expecting events: ${names.join(", ")}`);
|
||||
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, 0);
|
||||
}).then(() => {
|
||||
browser.test.assertEq(names.length, events.length, "Got expected number of events");
|
||||
for (let [i, name] of names.entries()) {
|
||||
browser.test.assertEq(name, i in events && events[i].type,
|
||||
`Got expected ${name} event`);
|
||||
}
|
||||
return events.splice(0);
|
||||
});
|
||||
}
|
||||
|
||||
browser.test.log("Create second browser window");
|
||||
let windowId;
|
||||
Promise.all([
|
||||
browser.windows.getCurrent(),
|
||||
browser.windows.create({url: "about:blank"}),
|
||||
]).then(windows => {
|
||||
windowId = windows[0].id;
|
||||
let otherWindowId = windows[1].id;
|
||||
let initialTab;
|
||||
|
||||
return expectEvents(["onCreated"]).then(([created]) => {
|
||||
initialTab = created.tab;
|
||||
|
||||
browser.test.log("Create tab in window 1");
|
||||
return browser.tabs.create({windowId, index: 0, url: "about:blank"});
|
||||
}).then(tab => {
|
||||
let oldIndex = tab.index;
|
||||
browser.test.assertEq(0, oldIndex, "Tab has the expected index");
|
||||
|
||||
return expectEvents(["onCreated"]).then(([created]) => {
|
||||
browser.test.assertEq(tab.id, created.tab.id, "Got expected tab ID");
|
||||
browser.test.assertEq(oldIndex, created.tab.index, "Got expected tab index");
|
||||
|
||||
browser.test.log("Move tab to window 2");
|
||||
return browser.tabs.move([tab.id], {windowId: otherWindowId, index: 0});
|
||||
}).then(() => {
|
||||
return expectEvents(["onDetached", "onAttached"]);
|
||||
}).then(([detached, attached]) => {
|
||||
browser.test.assertEq(oldIndex, detached.oldPosition, "Expected old index");
|
||||
browser.test.assertEq(windowId, detached.oldWindowId, "Expected old window ID");
|
||||
|
||||
browser.test.assertEq(0, attached.newPosition, "Expected new index");
|
||||
browser.test.assertEq(otherWindowId, attached.newWindowId, "Expected new window ID");
|
||||
|
||||
browser.test.log("Move tab within the same window");
|
||||
return browser.tabs.move([tab.id], {index: 1});
|
||||
}).then(([moved]) => {
|
||||
browser.test.assertEq(1, moved.index, "Expected new index");
|
||||
|
||||
return expectEvents(["onMoved"]);
|
||||
}).then(([moved]) => {
|
||||
browser.test.assertEq(tab.id, moved.tabId, "Expected tab ID");
|
||||
browser.test.assertEq(0, moved.fromIndex, "Expected old index");
|
||||
browser.test.assertEq(1, moved.toIndex, "Expected new index");
|
||||
browser.test.assertEq(otherWindowId, moved.windowId, "Expected window ID");
|
||||
|
||||
browser.test.log("Remove tab");
|
||||
return browser.tabs.remove(tab.id);
|
||||
}).then(() => {
|
||||
return expectEvents(["onRemoved"]);
|
||||
}).then(([removed]) => {
|
||||
browser.test.assertEq(tab.id, removed.tabId, "Expected removed tab ID");
|
||||
browser.test.assertEq(otherWindowId, removed.windowId, "Expected removed tab window ID");
|
||||
// Note: We want to test for the actual boolean value false here.
|
||||
browser.test.assertEq(false, removed.isWindowClosing, "Expected isWindowClosing value");
|
||||
|
||||
browser.test.log("Close second window");
|
||||
return browser.windows.remove(otherWindowId);
|
||||
}).then(() => {
|
||||
return expectEvents(["onRemoved"]);
|
||||
}).then(([removed]) => {
|
||||
browser.test.assertEq(initialTab.id, removed.tabId, "Expected removed tab ID");
|
||||
browser.test.assertEq(otherWindowId, removed.windowId, "Expected removed tab window ID");
|
||||
browser.test.assertEq(true, removed.isWindowClosing, "Expected isWindowClosing value");
|
||||
});
|
||||
});
|
||||
}).then(() => {
|
||||
browser.test.log("Create additional tab in window 1");
|
||||
return browser.tabs.create({windowId, url: "about:blank"});
|
||||
}).then(tab => {
|
||||
return expectEvents(["onCreated"]).then(() => {
|
||||
browser.test.log("Create a new window, adopting the new tab");
|
||||
|
||||
// We have to explicitly wait for the event here, since its timing is
|
||||
// not predictable.
|
||||
let promiseAttached = new Promise(resolve => {
|
||||
browser.tabs.onAttached.addListener(function listener(tabId) {
|
||||
browser.tabs.onAttached.removeListener(listener);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
return Promise.all([
|
||||
browser.windows.create({tabId: tab.id}),
|
||||
promiseAttached,
|
||||
]);
|
||||
}).then(([window]) => {
|
||||
return expectEvents(["onDetached", "onAttached"]).then(([detached, attached]) => {
|
||||
browser.test.assertEq(tab.id, detached.tabId, "Expected onDetached tab ID");
|
||||
|
||||
browser.test.assertEq(tab.id, attached.tabId, "Expected onAttached tab ID");
|
||||
browser.test.assertEq(0, attached.newPosition, "Expected onAttached new index");
|
||||
browser.test.assertEq(window.id, attached.newWindowId,
|
||||
"Expected onAttached new window id");
|
||||
|
||||
browser.test.log("Close the new window");
|
||||
return browser.windows.remove(window.id);
|
||||
});
|
||||
});
|
||||
}).then(() => {
|
||||
browser.test.notifyPass("tabs-events");
|
||||
}).catch(e => {
|
||||
browser.test.fail(`${e} :: ${e.stack}`);
|
||||
browser.test.notifyFail("tabs-events");
|
||||
});
|
||||
}
|
||||
|
||||
let extension = ExtensionTestUtils.loadExtension({
|
||||
manifest: {
|
||||
"permissions": ["tabs"],
|
||||
},
|
||||
|
||||
background,
|
||||
});
|
||||
|
||||
yield extension.startup();
|
||||
yield extension.awaitFinish("tabs-events");
|
||||
yield extension.unload();
|
||||
});
|
@ -0,0 +1,108 @@
|
||||
/* -*- Mode: indent-tabs-mode: nil; js-indent-level: 2 -*- */
|
||||
/* vim: set sts=2 sw=2 et tw=80: */
|
||||
"use strict";
|
||||
|
||||
add_task(function* testWindowCreate() {
|
||||
function background() {
|
||||
let promiseTabAttached = () => {
|
||||
return new Promise(resolve => {
|
||||
browser.tabs.onAttached.addListener(function listener() {
|
||||
browser.tabs.onAttached.removeListener(listener);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
let windowId;
|
||||
browser.windows.getCurrent().then(window => {
|
||||
windowId = window.id;
|
||||
|
||||
browser.test.log("Create additional tab in window 1");
|
||||
return browser.tabs.create({windowId, url: "about:blank"});
|
||||
}).then(tab => {
|
||||
browser.test.log("Create a new window, adopting the new tab");
|
||||
|
||||
// Note that we want to check against actual boolean values for
|
||||
// all of the `incognito` property tests.
|
||||
browser.test.assertEq(false, tab.incognito, "Tab is not private");
|
||||
|
||||
return Promise.all([
|
||||
promiseTabAttached(),
|
||||
browser.windows.create({tabId: tab.id}),
|
||||
]);
|
||||
}).then(([, window]) => {
|
||||
browser.test.assertEq(false, window.incognito, "New window is not private");
|
||||
|
||||
browser.test.log("Close the new window");
|
||||
return browser.windows.remove(window.id);
|
||||
}).then(() => {
|
||||
browser.test.log("Create a new private window");
|
||||
|
||||
return browser.windows.create({incognito: true});
|
||||
}).then(privateWindow => {
|
||||
browser.test.assertEq(true, privateWindow.incognito, "Private window is private");
|
||||
|
||||
browser.test.log("Create additional tab in private window");
|
||||
return browser.tabs.create({windowId: privateWindow.id}).then(privateTab => {
|
||||
browser.test.assertEq(true, privateTab.incognito, "Private tab is private");
|
||||
|
||||
browser.test.log("Create a new window, adopting the new private tab");
|
||||
|
||||
return Promise.all([
|
||||
promiseTabAttached(),
|
||||
browser.windows.create({tabId: privateTab.id}),
|
||||
]);
|
||||
}).then(([, newWindow]) => {
|
||||
browser.test.assertEq(true, newWindow.incognito, "New private window is private");
|
||||
|
||||
browser.test.log("Close the new private window");
|
||||
return browser.windows.remove(newWindow.id);
|
||||
}).then(() => {
|
||||
browser.test.log("Close the private window");
|
||||
return browser.windows.remove(privateWindow.id);
|
||||
});
|
||||
}).then(() => {
|
||||
return browser.tabs.query({windowId, active: true});
|
||||
}).then(([tab]) => {
|
||||
browser.test.log("Try to create a window with both a tab and a URL");
|
||||
|
||||
return browser.windows.create({tabId: tab.id, url: "http://example.com/"}).then(
|
||||
window => {
|
||||
browser.test.fail("Create call should have failed");
|
||||
},
|
||||
error => {
|
||||
browser.test.assertTrue(/`tabId` may not be used in conjunction with `url`/.test(error.message),
|
||||
"Create call failed as expected");
|
||||
}).then(() => {
|
||||
browser.test.log("Try to create a window with both a tab and an invalid incognito setting");
|
||||
|
||||
return browser.windows.create({tabId: tab.id, incognito: true});
|
||||
}).then(
|
||||
window => {
|
||||
browser.test.fail("Create call should have failed");
|
||||
},
|
||||
error => {
|
||||
browser.test.assertTrue(/`incognito` property must match the incognito state of tab/.test(error.message),
|
||||
"Create call failed as expected");
|
||||
});
|
||||
}).then(() => {
|
||||
browser.test.notifyPass("window-create");
|
||||
}).catch(e => {
|
||||
browser.test.fail(`${e} :: ${e.stack}`);
|
||||
browser.test.notifyFail("window-create");
|
||||
});
|
||||
}
|
||||
|
||||
let extension = ExtensionTestUtils.loadExtension({
|
||||
manifest: {
|
||||
"permissions": ["tabs"],
|
||||
},
|
||||
|
||||
background,
|
||||
});
|
||||
|
||||
yield extension.startup();
|
||||
yield extension.awaitFinish("window-create");
|
||||
yield extension.unload();
|
||||
});
|
||||
|
@ -30,6 +30,9 @@ XPCOMUtils.defineLazyGetter(this, "gPrincipal", function() {
|
||||
return Services.scriptSecurityManager.getNoAppCodebasePrincipal(uri);
|
||||
});
|
||||
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "Task",
|
||||
"resource://gre/modules/Task.jsm");
|
||||
|
||||
// The maximum number of results PlacesProvider retrieves from history.
|
||||
const HISTORY_RESULTS_LIMIT = 100;
|
||||
|
||||
@ -68,46 +71,6 @@ let LinkChecker = {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Singleton that provides utility functions for links.
|
||||
* A link is a plain object that looks like this:
|
||||
*
|
||||
* {
|
||||
* url: "http://www.mozilla.org/",
|
||||
* title: "Mozilla",
|
||||
* frecency: 1337,
|
||||
* lastVisitDate: 1394678824766431,
|
||||
* }
|
||||
*/
|
||||
const LinkUtils = {
|
||||
_sortProperties: [
|
||||
"frecency",
|
||||
"lastVisitDate",
|
||||
"url",
|
||||
],
|
||||
|
||||
/**
|
||||
* Compares two links.
|
||||
*
|
||||
* @param {String} aLink1 The first link.
|
||||
* @param {String} aLink2 The second link.
|
||||
* @return {Number} A negative number if aLink1 is ordered before aLink2, zero if
|
||||
* aLink1 and aLink2 have the same ordering, or a positive number if
|
||||
* aLink1 is ordered after aLink2.
|
||||
* Order is ascending.
|
||||
*/
|
||||
compareLinks: function LinkUtils_compareLinks(aLink1, aLink2) {
|
||||
for (let prop of LinkUtils._sortProperties) {
|
||||
if (!aLink1.hasOwnProperty(prop) || !aLink2.hasOwnProperty(prop)) {
|
||||
throw new Error("Comparable link missing required property: " + prop);
|
||||
}
|
||||
}
|
||||
return aLink2.frecency - aLink1.frecency ||
|
||||
aLink2.lastVisitDate - aLink1.lastVisitDate ||
|
||||
aLink1.url.localeCompare(aLink2.url);
|
||||
},
|
||||
};
|
||||
|
||||
/* Queries history to retrieve the most visited sites. Emits events when the
|
||||
* history changes.
|
||||
* Implements the EventEmitter interface.
|
||||
@ -192,71 +155,86 @@ Links.prototype = {
|
||||
*
|
||||
* @returns {Promise} Returns a promise with the array of links as payload.
|
||||
*/
|
||||
getLinks: function PlacesProvider_getLinks() {
|
||||
let getLinksPromise = new Promise((resolve, reject) => {
|
||||
let options = PlacesUtils.history.getNewQueryOptions();
|
||||
options.maxResults = this.maxNumLinks;
|
||||
getLinks: Task.async(function*() {
|
||||
// Select a single page per host with highest frecency, highest recency.
|
||||
// Choose N top such pages. Note +rev_host, to turn off optimizer per :mak
|
||||
// suggestion.
|
||||
let sqlQuery = `SELECT url, title, frecency,
|
||||
last_visit_date as lastVisitDate,
|
||||
"history" as type
|
||||
FROM moz_places
|
||||
WHERE frecency in (
|
||||
SELECT MAX(frecency) as frecency
|
||||
FROM moz_places
|
||||
WHERE hidden = 0 AND last_visit_date NOTNULL
|
||||
GROUP BY +rev_host
|
||||
ORDER BY frecency DESC
|
||||
LIMIT :limit
|
||||
)
|
||||
GROUP BY rev_host HAVING MAX(lastVisitDate)
|
||||
ORDER BY frecency DESC, lastVisitDate DESC, url`;
|
||||
|
||||
// Sort by frecency, descending.
|
||||
options.sortingMode = Ci.nsINavHistoryQueryOptions
|
||||
.SORT_BY_FRECENCY_DESCENDING;
|
||||
let links = yield this.executePlacesQuery(sqlQuery, {
|
||||
columns: ["url", "title", "lastVisitDate", "frecency", "type"],
|
||||
params: {limit: this.maxNumLinks}
|
||||
});
|
||||
|
||||
let links = [];
|
||||
return links.filter(link => LinkChecker.checkLoadURI(link.url));
|
||||
}),
|
||||
|
||||
let queryHandlers = {
|
||||
handleResult: function(aResultSet) {
|
||||
for (let row = aResultSet.getNextRow(); row; row = aResultSet.getNextRow()) {
|
||||
let url = row.getResultByIndex(1);
|
||||
if (LinkChecker.checkLoadURI(url)) {
|
||||
let link = {
|
||||
url: url,
|
||||
title: row.getResultByIndex(2),
|
||||
frecency: row.getResultByIndex(12),
|
||||
lastVisitDate: row.getResultByIndex(5),
|
||||
type: "history",
|
||||
};
|
||||
links.push(link);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
handleError: function(aError) {
|
||||
reject(aError);
|
||||
},
|
||||
|
||||
handleCompletion: function(aReason) { // jshint ignore:line
|
||||
// The Places query breaks ties in frecency by place ID descending, but
|
||||
// that's different from how Links.compareLinks breaks ties, because
|
||||
// compareLinks doesn't have access to place IDs. It's very important
|
||||
// that the initial list of links is sorted in the same order imposed by
|
||||
// compareLinks, because Links uses compareLinks to perform binary
|
||||
// searches on the list. So, ensure the list is so ordered.
|
||||
let i = 1;
|
||||
let outOfOrder = [];
|
||||
while (i < links.length) {
|
||||
if (LinkUtils.compareLinks(links[i - 1], links[i]) > 0) {
|
||||
outOfOrder.push(links.splice(i, 1)[0]);
|
||||
} else {
|
||||
i++;
|
||||
}
|
||||
}
|
||||
for (let link of outOfOrder) {
|
||||
i = BinarySearch.insertionIndexOf(LinkUtils.compareLinks, links, link);
|
||||
links.splice(i, 0, link);
|
||||
}
|
||||
|
||||
resolve(links);
|
||||
/**
|
||||
* Executes arbitrary query against places database
|
||||
*
|
||||
* @param {String} aSql
|
||||
* SQL query to execute
|
||||
* @param {Object} [optional] aOptions
|
||||
* aOptions.columns - an array of column names. if supplied the returned
|
||||
* items will consist of objects keyed on column names. Otherwise
|
||||
* an array of raw values is returned in the select order
|
||||
* aOptions.param - an object of SQL binding parameters
|
||||
* aOptions.callback - a callback to handle query rows
|
||||
*
|
||||
* @returns {Promise} Returns a promise with the array of retrieved items
|
||||
*/
|
||||
executePlacesQuery: Task.async(function*(aSql, aOptions={}) {
|
||||
let {columns, params, callback} = aOptions;
|
||||
let items = [];
|
||||
let queryError = null;
|
||||
let conn = yield PlacesUtils.promiseDBConnection();
|
||||
yield conn.executeCached(aSql, params, aRow => {
|
||||
try {
|
||||
// check if caller wants to handle query raws
|
||||
if (callback) {
|
||||
callback(aRow);
|
||||
}
|
||||
};
|
||||
|
||||
// Execute the query.
|
||||
let query = PlacesUtils.history.getNewQuery();
|
||||
let db = PlacesUtils.history.QueryInterface(Ci.nsPIPlacesDatabase);
|
||||
db.asyncExecuteLegacyQueries([query], 1, options, queryHandlers);
|
||||
// otherwise fill in the item and add items array
|
||||
else {
|
||||
let item = null;
|
||||
// if columns array is given construct an object
|
||||
if (columns && Array.isArray(columns)) {
|
||||
item = {};
|
||||
columns.forEach(column => {
|
||||
item[column] = aRow.getResultByName(column);
|
||||
});
|
||||
} else {
|
||||
// if no columns - make an array of raw values
|
||||
item = [];
|
||||
for (let i = 0; i < aRow.numEntries; i++) {
|
||||
item.push(aRow.getResultByIndex(i));
|
||||
}
|
||||
}
|
||||
items.push(item);
|
||||
}
|
||||
} catch (e) {
|
||||
queryError = e;
|
||||
throw StopIteration;
|
||||
}
|
||||
});
|
||||
|
||||
return getLinksPromise;
|
||||
}
|
||||
if (queryError) {
|
||||
throw new Error(queryError);
|
||||
}
|
||||
return items;
|
||||
}),
|
||||
};
|
||||
|
||||
/**
|
||||
@ -266,6 +244,5 @@ const gLinks = new Links(); // jshint ignore:line
|
||||
|
||||
let PlacesProvider = {
|
||||
LinkChecker: LinkChecker,
|
||||
LinkUtils: LinkUtils,
|
||||
links: gLinks,
|
||||
};
|
||||
|
@ -31,6 +31,30 @@ function run_test() {
|
||||
run_next_test();
|
||||
}
|
||||
|
||||
// url prefix for test history population
|
||||
const TEST_URL = "https://mozilla.com/";
|
||||
// time when the test starts execution
|
||||
const TIME_NOW = (new Date()).getTime();
|
||||
|
||||
// utility function to compute past timestap
|
||||
function timeDaysAgo(numDays) {
|
||||
return TIME_NOW - (numDays * 24 * 60 * 60 * 1000);
|
||||
}
|
||||
|
||||
// utility function to make a visit for insetion into places db
|
||||
function makeVisit(index, daysAgo, isTyped, domain=TEST_URL) {
|
||||
let {
|
||||
TRANSITION_TYPED,
|
||||
TRANSITION_LINK
|
||||
} = PlacesUtils.history;
|
||||
|
||||
return {
|
||||
uri: NetUtil.newURI(`${domain}${index}`),
|
||||
visitDate: timeDaysAgo(daysAgo),
|
||||
transition: (isTyped) ? TRANSITION_TYPED : TRANSITION_LINK,
|
||||
};
|
||||
}
|
||||
|
||||
/** Test LinkChecker **/
|
||||
|
||||
add_task(function test_LinkChecker_securityCheck() {
|
||||
@ -50,97 +74,6 @@ add_task(function test_LinkChecker_securityCheck() {
|
||||
}
|
||||
});
|
||||
|
||||
/** Test LinkUtils **/
|
||||
|
||||
add_task(function test_LinkUtils_compareLinks() {
|
||||
|
||||
let fixtures = {
|
||||
firstOlder: {
|
||||
url: "http://www.mozilla.org/firstolder",
|
||||
title: "Mozilla",
|
||||
frecency: 1337,
|
||||
lastVisitDate: 1394678824766431,
|
||||
},
|
||||
older: {
|
||||
url: "http://www.mozilla.org/older",
|
||||
title: "Mozilla",
|
||||
frecency: 1337,
|
||||
lastVisitDate: 1394678824766431,
|
||||
},
|
||||
newer: {
|
||||
url: "http://www.mozilla.org/newer",
|
||||
title: "Mozilla",
|
||||
frecency: 1337,
|
||||
lastVisitDate: 1494678824766431,
|
||||
},
|
||||
moreFrecent: {
|
||||
url: "http://www.mozilla.org/moreFrecent",
|
||||
title: "Mozilla",
|
||||
frecency: 1337357,
|
||||
lastVisitDate: 1394678824766431,
|
||||
}
|
||||
};
|
||||
|
||||
let links = [
|
||||
// tests string ordering, f is before o
|
||||
{link1: fixtures.firstOlder, link2: fixtures.older, expected: false},
|
||||
|
||||
// test identity
|
||||
{link1: fixtures.older, link2: fixtures.older, expected: false},
|
||||
|
||||
// test ordering by date
|
||||
{link1: fixtures.older, link2: fixtures.newer, expected: true},
|
||||
{link1: fixtures.newer, link2: fixtures.older, expected: false},
|
||||
|
||||
// test frecency
|
||||
{link1: fixtures.moreFrecent, link2: fixtures.older, expected: false},
|
||||
];
|
||||
|
||||
for (let {link1, link2, expected} of links) {
|
||||
let observed = PlacesProvider.LinkUtils.compareLinks(link1, link2) > 0;
|
||||
equal(observed , expected, `comparing ${link1.url} and ${link2.url}`);
|
||||
}
|
||||
|
||||
// test error scenarios
|
||||
|
||||
let errorFixtures = {
|
||||
missingFrecency: {
|
||||
url: "http://www.mozilla.org/firstolder",
|
||||
title: "Mozilla",
|
||||
lastVisitDate: 1394678824766431,
|
||||
},
|
||||
missingVisitDate: {
|
||||
url: "http://www.mozilla.org/firstolder",
|
||||
title: "Mozilla",
|
||||
frecency: 1337,
|
||||
},
|
||||
missingURL: {
|
||||
title: "Mozilla",
|
||||
frecency: 1337,
|
||||
lastVisitDate: 1394678824766431,
|
||||
}
|
||||
};
|
||||
|
||||
let errorLinks = [
|
||||
{link1: fixtures.older, link2: errorFixtures.missingFrecency},
|
||||
{link2: fixtures.older, link1: errorFixtures.missingFrecency},
|
||||
{link1: fixtures.older, link2: errorFixtures.missingVisitDate},
|
||||
{link1: fixtures.older, link2: errorFixtures.missingURL},
|
||||
{link1: errorFixtures.missingFrecency, link2: errorFixtures.missingVisitDate}
|
||||
];
|
||||
|
||||
let errorCount = 0;
|
||||
for (let {link1, link2} of errorLinks) {
|
||||
try {
|
||||
let observed = PlacesProvider.LinkUtils.compareLinks(link1, link2) > 0; // jshint ignore:line
|
||||
} catch (e) {
|
||||
ok(true, `exception for comparison of ${link1.url} and ${link2.url}`);
|
||||
errorCount += 1;
|
||||
}
|
||||
}
|
||||
equal(errorCount, errorLinks.length);
|
||||
});
|
||||
|
||||
/** Test Provider **/
|
||||
|
||||
add_task(function* test_Links_getLinks() {
|
||||
@ -162,28 +95,13 @@ add_task(function* test_Links_getLinks() {
|
||||
add_task(function* test_Links_getLinks_Order() {
|
||||
yield PlacesTestUtils.clearHistory();
|
||||
let provider = PlacesProvider.links;
|
||||
let {
|
||||
TRANSITION_TYPED,
|
||||
TRANSITION_LINK
|
||||
} = PlacesUtils.history;
|
||||
|
||||
function timeDaysAgo(numDays) {
|
||||
let now = new Date();
|
||||
return now.getTime() - (numDays * 24 * 60 * 60 * 1000);
|
||||
}
|
||||
|
||||
let timeEarlier = timeDaysAgo(0);
|
||||
let timeLater = timeDaysAgo(2);
|
||||
|
||||
// all four visits must come from different domains to avoid deduplication
|
||||
let visits = [
|
||||
// frecency 200
|
||||
{uri: NetUtil.newURI("https://mozilla.com/0"), visitDate: timeEarlier, transition: TRANSITION_TYPED},
|
||||
// sort by url, frecency 200
|
||||
{uri: NetUtil.newURI("https://mozilla.com/1"), visitDate: timeEarlier, transition: TRANSITION_TYPED},
|
||||
// sort by last visit date, frecency 200
|
||||
{uri: NetUtil.newURI("https://mozilla.com/2"), visitDate: timeLater, transition: TRANSITION_TYPED},
|
||||
// sort by frecency, frecency 10
|
||||
{uri: NetUtil.newURI("https://mozilla.com/3"), visitDate: timeLater, transition: TRANSITION_LINK},
|
||||
makeVisit(0, 0, true, "http://bar.com/"), // frecency 200, today
|
||||
makeVisit(1, 0, true, "http://foo.com/"), // frecency 200, today
|
||||
makeVisit(2, 2, true, "http://buz.com/"), // frecency 200, 2 days ago
|
||||
makeVisit(3, 2, false, "http://aaa.com/"), // frecency 10, 2 days ago, transition
|
||||
];
|
||||
|
||||
let links = yield provider.getLinks();
|
||||
@ -197,6 +115,28 @@ add_task(function* test_Links_getLinks_Order() {
|
||||
}
|
||||
});
|
||||
|
||||
add_task(function* test_Links_getLinks_Deduplication() {
|
||||
yield PlacesTestUtils.clearHistory();
|
||||
let provider = PlacesProvider.links;
|
||||
|
||||
// all for visits must come from different domains to avoid deduplication
|
||||
let visits = [
|
||||
makeVisit(0, 2, true, "http://bar.com/"), // frecency 200, 2 days ago
|
||||
makeVisit(1, 0, true, "http://bar.com/"), // frecency 200, today
|
||||
makeVisit(2, 0, false, "http://foo.com/"), // frecency 10, today
|
||||
makeVisit(3, 0, true, "http://foo.com/"), // frecency 200, today
|
||||
];
|
||||
|
||||
let links = yield provider.getLinks();
|
||||
equal(links.length, 0, "empty history yields empty links");
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
|
||||
links = yield provider.getLinks();
|
||||
equal(links.length, 2, "only two links must be left after deduplication");
|
||||
equal(links[0].url, visits[1].uri.spec, "earliest link is present");
|
||||
equal(links[1].url, visits[3].uri.spec, "most fresent link is present");
|
||||
});
|
||||
|
||||
add_task(function* test_Links_onLinkChanged() {
|
||||
let provider = PlacesProvider.links;
|
||||
provider.init();
|
||||
@ -305,3 +245,122 @@ add_task(function* test_Links_onManyLinksChanged() {
|
||||
yield promise;
|
||||
provider.destroy();
|
||||
});
|
||||
|
||||
add_task(function* test_Links_execute_query() {
|
||||
yield PlacesTestUtils.clearHistory();
|
||||
let provider = PlacesProvider.links;
|
||||
|
||||
let visits = [
|
||||
makeVisit(0, 0, true), // frecency 200, today
|
||||
makeVisit(1, 0, true), // frecency 200, today
|
||||
makeVisit(2, 2, true), // frecency 200, 2 days ago
|
||||
makeVisit(3, 2, false), // frecency 10, 2 days ago, transition
|
||||
];
|
||||
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
|
||||
function testItemValue(results, index, value) {
|
||||
equal(results[index][0], `${TEST_URL}${value}`, "raw url");
|
||||
equal(results[index][1], `test visit for ${TEST_URL}${value}`, "raw title");
|
||||
}
|
||||
|
||||
function testItemObject(results, index, columnValues) {
|
||||
Object.keys(columnValues).forEach(name => {
|
||||
equal(results[index][name], columnValues[name], "object name " + name);
|
||||
});
|
||||
}
|
||||
|
||||
// select all 4 records
|
||||
let results = yield provider.executePlacesQuery("select url, title from moz_places");
|
||||
equal(results.length, 4, "expect 4 items");
|
||||
// check for insert order sequence
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
testItemValue(results, i, i);
|
||||
}
|
||||
|
||||
// test parameter passing
|
||||
results = yield provider.executePlacesQuery(
|
||||
"select url, title from moz_places limit :limit",
|
||||
{params: {limit: 2}}
|
||||
);
|
||||
equal(results.length, 2, "expect 2 items");
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
testItemValue(results, i, i);
|
||||
}
|
||||
|
||||
// test extracting items by name
|
||||
results = yield provider.executePlacesQuery(
|
||||
"select url, title from moz_places limit :limit",
|
||||
{columns: ["url", "title"], params: {limit: 4}}
|
||||
);
|
||||
equal(results.length, 4, "expect 4 items");
|
||||
for (let i = 0; i < results.length; i++) {
|
||||
testItemObject(results, i, {
|
||||
"url": `${TEST_URL}${i}`,
|
||||
"title": `test visit for ${TEST_URL}${i}`,
|
||||
});
|
||||
}
|
||||
|
||||
// test ordering
|
||||
results = yield provider.executePlacesQuery(
|
||||
"select url, title, last_visit_date, frecency from moz_places " +
|
||||
"order by frecency DESC, last_visit_date DESC, url DESC limit :limit",
|
||||
{columns: ["url", "title", "last_visit_date", "frecency"], params: {limit: 4}}
|
||||
);
|
||||
equal(results.length, 4, "expect 4 items");
|
||||
testItemObject(results, 0, {url: `${TEST_URL}1`});
|
||||
testItemObject(results, 1, {url: `${TEST_URL}0`});
|
||||
testItemObject(results, 2, {url: `${TEST_URL}2`});
|
||||
testItemObject(results, 3, {url: `${TEST_URL}3`});
|
||||
|
||||
// test callback passing
|
||||
results = [];
|
||||
function handleRow(aRow) {
|
||||
results.push({
|
||||
url: aRow.getResultByName("url"),
|
||||
title: aRow.getResultByName("title"),
|
||||
last_visit_date: aRow.getResultByName("last_visit_date"),
|
||||
frecency: aRow.getResultByName("frecency")
|
||||
});
|
||||
}
|
||||
yield provider.executePlacesQuery(
|
||||
"select url, title, last_visit_date, frecency from moz_places " +
|
||||
"order by frecency DESC, last_visit_date DESC, url DESC",
|
||||
{callback: handleRow}
|
||||
);
|
||||
equal(results.length, 4, "expect 4 items");
|
||||
testItemObject(results, 0, {url: `${TEST_URL}1`});
|
||||
testItemObject(results, 1, {url: `${TEST_URL}0`});
|
||||
testItemObject(results, 2, {url: `${TEST_URL}2`});
|
||||
testItemObject(results, 3, {url: `${TEST_URL}3`});
|
||||
|
||||
// negative test cases
|
||||
// bad sql
|
||||
try {
|
||||
yield provider.executePlacesQuery("select from moz");
|
||||
do_throw("bad sql should've thrown");
|
||||
}
|
||||
catch (e) {
|
||||
do_check_true("expected failure - bad sql");
|
||||
}
|
||||
// missing bindings
|
||||
try {
|
||||
yield provider.executePlacesQuery("select * from moz_places limit :limit");
|
||||
do_throw("bad sql should've thrown");
|
||||
}
|
||||
catch (e) {
|
||||
do_check_true("expected failure - missing bidning");
|
||||
}
|
||||
// non-existent column name
|
||||
try {
|
||||
yield provider.executePlacesQuery("select * from moz_places limit :limit",
|
||||
{columns: ["no-such-column"], params: {limit: 4}});
|
||||
do_throw("bad sql should've thrown");
|
||||
}
|
||||
catch (e) {
|
||||
do_check_true("expected failure - wrong column name");
|
||||
}
|
||||
|
||||
// cleanup
|
||||
yield PlacesTestUtils.clearHistory();
|
||||
});
|
||||
|
@ -35,6 +35,8 @@ const FTP_URL = "ftp://localhost/clearHistoryOnShutdown/";
|
||||
var formHistoryStartup = Cc["@mozilla.org/satchel/form-history-startup;1"].
|
||||
getService(Ci.nsIObserver);
|
||||
formHistoryStartup.observe(null, "profile-after-change", null);
|
||||
XPCOMUtils.defineLazyModuleGetter(this, "FormHistory",
|
||||
"resource://gre/modules/FormHistory.jsm");
|
||||
|
||||
var timeInMicroseconds = Date.now() * 1000;
|
||||
|
||||
@ -73,10 +75,15 @@ add_task(function* test_execute() {
|
||||
}
|
||||
do_print("Add cache.");
|
||||
yield storeCache(FTP_URL, "testData");
|
||||
do_print("Add form history.");
|
||||
yield addFormHistory();
|
||||
Assert.equal((yield getFormHistoryCount()), 1, "Added form history");
|
||||
|
||||
do_print("Simulate and wait shutdown.");
|
||||
yield shutdownPlaces();
|
||||
|
||||
Assert.equal((yield getFormHistoryCount()), 0, "Form history cleared");
|
||||
|
||||
let stmt = DBConn(true).createStatement(
|
||||
"SELECT id FROM moz_places WHERE url = :page_url "
|
||||
);
|
||||
@ -96,6 +103,30 @@ add_task(function* test_execute() {
|
||||
yield checkCache(FTP_URL);
|
||||
});
|
||||
|
||||
function addFormHistory() {
|
||||
return new Promise(resolve => {
|
||||
let now = Date.now() * 1000;
|
||||
FormHistory.update({ op: "add",
|
||||
fieldname: "testfield",
|
||||
value: "test",
|
||||
timesUsed: 1,
|
||||
firstUsed: now,
|
||||
lastUsed: now
|
||||
},
|
||||
{ handleCompletion(reason) { resolve(); } });
|
||||
});
|
||||
}
|
||||
|
||||
function getFormHistoryCount() {
|
||||
return new Promise((resolve, reject) => {
|
||||
let count = -1;
|
||||
FormHistory.count({ fieldname: "testfield" },
|
||||
{ handleResult(result) { count = result; },
|
||||
handleCompletion(reason) { resolve(count); }
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function storeCache(aURL, aContent) {
|
||||
let cache = Services.cache2;
|
||||
let storage = cache.diskCacheStorage(LoadContextInfo.default, false);
|
||||
|
@ -352,14 +352,14 @@ function prompt(aBrowser, aRequest) {
|
||||
let chromeDoc = this.browser.ownerDocument;
|
||||
|
||||
if (aTopic == "shown") {
|
||||
let PopupNotifications = chromeDoc.defaultView.PopupNotifications;
|
||||
let popupId = "Devices";
|
||||
if (requestTypes.length == 1 && (requestTypes[0] == "Microphone" ||
|
||||
requestTypes[0] == "AudioCapture"))
|
||||
popupId = "Microphone";
|
||||
if (requestTypes.indexOf("Screen") != -1)
|
||||
popupId = "Screen";
|
||||
PopupNotifications.panel.firstChild.setAttribute("popupid", "webRTC-share" + popupId);
|
||||
chromeDoc.getElementById("webRTC-shareDevices-notification")
|
||||
.setAttribute("popupid", "webRTC-share" + popupId);
|
||||
}
|
||||
|
||||
if (aTopic != "showing")
|
||||
@ -911,9 +911,10 @@ function updateBrowserSpecificIndicator(aBrowser, aState) {
|
||||
dismissed: true,
|
||||
eventCallback: function(aTopic, aNewBrowser) {
|
||||
if (aTopic == "shown") {
|
||||
let PopupNotifications = this.browser.ownerDocument.defaultView.PopupNotifications;
|
||||
let popupId = captureState == "Microphone" ? "Microphone" : "Devices";
|
||||
PopupNotifications.panel.firstChild.setAttribute("popupid", "webRTC-sharing" + popupId);
|
||||
this.browser.ownerDocument
|
||||
.getElementById("webRTC-sharingDevices-notification")
|
||||
.setAttribute("popupid", "webRTC-sharing" + popupId);
|
||||
}
|
||||
|
||||
if (aTopic == "swapping") {
|
||||
@ -950,8 +951,9 @@ function updateBrowserSpecificIndicator(aBrowser, aState) {
|
||||
dismissed: true,
|
||||
eventCallback: function(aTopic, aNewBrowser) {
|
||||
if (aTopic == "shown") {
|
||||
let PopupNotifications = this.browser.ownerDocument.defaultView.PopupNotifications;
|
||||
PopupNotifications.panel.firstChild.setAttribute("popupid", "webRTC-sharingScreen");
|
||||
this.browser.ownerDocument
|
||||
.getElementById("webRTC-sharingScreen-notification")
|
||||
.setAttribute("popupid", "webRTC-sharingScreen");
|
||||
}
|
||||
|
||||
if (aTopic == "swapping") {
|
||||
|
@ -25,7 +25,7 @@ buildscript {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:1.3.0'
|
||||
classpath 'com.android.tools.build:gradle:1.5.0'
|
||||
classpath('com.stanfy.spoon:spoon-gradle-plugin:1.0.4') {
|
||||
// Without these, we get errors linting.
|
||||
exclude module: 'guava'
|
||||
|
@ -92,7 +92,7 @@
|
||||
// rule is a better rule to check this.
|
||||
"max-depth": 0,
|
||||
// Maximum length of a line.
|
||||
"max-len": [1, 80, 2, {"ignoreUrls": true, "ignorePattern": "\\s*require\\s*\\(|^\\s*loader\\.lazy"}],
|
||||
"max-len": [1, 80, 2, {"ignoreUrls": true, "ignorePattern": "\\s*require\\s*\\(|^\\s*loader\\.lazy|-\\*-"}],
|
||||
// Maximum depth callbacks can be nested.
|
||||
"max-nested-callbacks": [2, 3],
|
||||
// Don't limit the number of parameters that can be used in a function.
|
||||
|
@ -23,12 +23,11 @@
|
||||
require.config({
|
||||
baseUrl: ".",
|
||||
paths: {
|
||||
"devtools/client/shared": "resource://devtools/client/shared",
|
||||
"devtools/client/shared/vendor/react": [
|
||||
"resource://devtools/client/shared/vendor/react-dev",
|
||||
"resource://devtools/client/shared/vendor/react"
|
||||
],
|
||||
"devtools/client/shared/vendor/react-dom":
|
||||
"resource://devtools/client/shared/vendor/react-dom"
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -110,11 +110,22 @@ function BrowserLoader(baseURI, window) {
|
||||
// Allow modules to use the window's console to ensure logs appear in a
|
||||
// tab toolbox, if one exists, instead of just the browser console.
|
||||
console: window.console,
|
||||
// Make sure 'define' function exists. This allows reusing AMD modules.
|
||||
define: function(callback) {
|
||||
callback(this.require, this.exports, this.module);
|
||||
return this.exports;
|
||||
}
|
||||
// Make sure `define` function exists. This allows defining some modules
|
||||
// in AMD format while retaining CommonJS compatibility through this hook.
|
||||
// JSON Viewer needs modules in AMD format, as it currently uses RequireJS
|
||||
// from a content document and can't access our usual loaders. So, any
|
||||
// modules shared with the JSON Viewer should include a define wrapper:
|
||||
//
|
||||
// // Make this available to both AMD and CJS environments
|
||||
// define(function(require, exports, module) {
|
||||
// ... code ...
|
||||
// });
|
||||
//
|
||||
// Bug 1248830 will work out a better plan here for our content module
|
||||
// loading needs, especially as we head towards devtools.html.
|
||||
define(factory) {
|
||||
factory(this.require, this.exports, this.module);
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
"use strict";
|
||||
|
||||
const {Cc, Ci, Cu} = require("chrome");
|
||||
const {Ci, Cu} = require("chrome");
|
||||
const EventEmitter = require("devtools/shared/event-emitter");
|
||||
loader.lazyImporter(this, "setNamedTimeout",
|
||||
"resource://devtools/client/shared/widgets/ViewHelpers.jsm");
|
||||
@ -14,15 +14,16 @@ const XUL_NS = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul";
|
||||
const HTML_NS = "http://www.w3.org/1999/xhtml";
|
||||
const AFTER_SCROLL_DELAY = 100;
|
||||
|
||||
// Different types of events emitted by the Various components of the TableWidget
|
||||
// Different types of events emitted by the Various components of the
|
||||
// TableWidget.
|
||||
const EVENTS = {
|
||||
TABLE_CLEARED: "table-cleared",
|
||||
CELL_EDIT: "cell-edit",
|
||||
COLUMN_SORTED: "column-sorted",
|
||||
COLUMN_TOGGLED: "column-toggled",
|
||||
ROW_SELECTED: "row-selected",
|
||||
ROW_UPDATED: "row-updated",
|
||||
HEADER_CONTEXT_MENU: "header-context-menu",
|
||||
ROW_CONTEXT_MENU: "row-context-menu",
|
||||
ROW_SELECTED: "row-selected",
|
||||
ROW_UPDATED: "row-updated",
|
||||
SCROLL_END: "scroll-end"
|
||||
};
|
||||
Object.defineProperty(this, "EVENTS", {
|
||||
@ -31,8 +32,8 @@ Object.defineProperty(this, "EVENTS", {
|
||||
writable: false
|
||||
});
|
||||
|
||||
// Maximum number of character visible in any cell in the table. This is to avoid
|
||||
// making the cell take up all the space in a row.
|
||||
// Maximum number of character visible in any cell in the table. This is to
|
||||
// avoid making the cell take up all the space in a row.
|
||||
const MAX_VISIBLE_STRING_SIZE = 100;
|
||||
|
||||
/**
|
||||
@ -52,7 +53,7 @@ const MAX_VISIBLE_STRING_SIZE = 100;
|
||||
* the context menu in the headers will not appear.
|
||||
* - firstColumn: key of the first column that should appear.
|
||||
*/
|
||||
function TableWidget(node, options={}) {
|
||||
function TableWidget(node, options = {}) {
|
||||
EventEmitter.decorate(this);
|
||||
|
||||
this.document = node.ownerDocument;
|
||||
@ -83,9 +84,10 @@ function TableWidget(node, options={}) {
|
||||
this.items = new Map();
|
||||
this.columns = new Map();
|
||||
|
||||
// Setup the column headers context menu to allow users to hide columns at will
|
||||
// Setup the column headers context menu to allow users to hide columns at
|
||||
// will.
|
||||
if (this.removableColumns) {
|
||||
this.onPopupCommand = this.onPopupCommand.bind(this)
|
||||
this.onPopupCommand = this.onPopupCommand.bind(this);
|
||||
this.setupHeadersContextMenu();
|
||||
}
|
||||
|
||||
@ -99,7 +101,7 @@ function TableWidget(node, options={}) {
|
||||
this.selectedRow = id;
|
||||
};
|
||||
this.on(EVENTS.ROW_SELECTED, this.bindSelectedRow);
|
||||
};
|
||||
}
|
||||
|
||||
TableWidget.prototype = {
|
||||
|
||||
@ -162,9 +164,9 @@ TableWidget.prototype = {
|
||||
},
|
||||
|
||||
/**
|
||||
* Prepares the context menu for the headers of the table columns. This context
|
||||
* menu allows users to toggle various columns, only with an exception of the
|
||||
* unique columns and when only two columns are visible in the table.
|
||||
* Prepares the context menu for the headers of the table columns. This
|
||||
* context menu allows users to toggle various columns, only with an exception
|
||||
* of the unique columns and when only two columns are visible in the table.
|
||||
*/
|
||||
setupHeadersContextMenu: function() {
|
||||
let popupset = this.document.getElementsByTagName("popupset")[0];
|
||||
@ -308,7 +310,8 @@ TableWidget.prototype = {
|
||||
},
|
||||
|
||||
/**
|
||||
* Selects the previous row. Cycles over to the last row if first row is selected
|
||||
* Selects the previous row. Cycles over to the last row if first row is
|
||||
* selected.
|
||||
*/
|
||||
selectPreviousRow: function() {
|
||||
for (let column of this.columns.values()) {
|
||||
@ -430,9 +433,9 @@ TableWidget.prototype = {
|
||||
}
|
||||
|
||||
let sortedItems = this.columns.get(column).sort([...this.items.values()]);
|
||||
for (let [id, column] of this.columns) {
|
||||
if (id != column) {
|
||||
column.sort(sortedItems);
|
||||
for (let [id, col] of this.columns) {
|
||||
if (id != col) {
|
||||
col.sort(sortedItems);
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -673,8 +676,8 @@ Column.prototype = {
|
||||
|
||||
/**
|
||||
* Pushes the `item` object into the column. If this column is sorted on,
|
||||
* then inserts the object at the right position based on the column's id key's
|
||||
* value.
|
||||
* then inserts the object at the right position based on the column's id
|
||||
* key's value.
|
||||
*
|
||||
* @returns {number}
|
||||
* The index of the currently pushed item.
|
||||
@ -856,7 +859,7 @@ Column.prototype = {
|
||||
}
|
||||
|
||||
if (event.button == 0 && event.originalTarget == this.header) {
|
||||
return this.table.sortBy(this.id);
|
||||
this.table.sortBy(this.id);
|
||||
}
|
||||
},
|
||||
|
||||
@ -970,7 +973,7 @@ Cell.prototype = {
|
||||
|
||||
if (!(value instanceof Ci.nsIDOMNode) &&
|
||||
value.length > MAX_VISIBLE_STRING_SIZE) {
|
||||
value = value .substr(0, MAX_VISIBLE_STRING_SIZE) + "\u2026"; // …
|
||||
value = value .substr(0, MAX_VISIBLE_STRING_SIZE) + "\u2026";
|
||||
}
|
||||
|
||||
if (value instanceof Ci.nsIDOMNode) {
|
||||
@ -1001,7 +1004,7 @@ Cell.prototype = {
|
||||
flash: function() {
|
||||
this.label.classList.remove("flash-out");
|
||||
// Cause a reflow so that the animation retriggers on adding back the class
|
||||
let a = this.label.parentNode.offsetWidth;
|
||||
let a = this.label.parentNode.offsetWidth; // eslint-disable-line
|
||||
this.label.classList.add("flash-out");
|
||||
},
|
||||
|
||||
@ -1013,4 +1016,4 @@ Cell.prototype = {
|
||||
this.label.remove();
|
||||
this.label = null;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -5,7 +5,6 @@
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
"use strict";
|
||||
|
||||
const Services = require("Services")
|
||||
const HTML_NS = "http://www.w3.org/1999/xhtml";
|
||||
|
||||
const EventEmitter = require("devtools/shared/event-emitter");
|
||||
@ -17,11 +16,12 @@ const EventEmitter = require("devtools/shared/event-emitter");
|
||||
* The container element for the tree widget.
|
||||
* @param {Object} options
|
||||
* - emptyText {string}: text to display when no entries in the table.
|
||||
* - defaultType {string}: The default type of the tree items. For ex. 'js'
|
||||
* - defaultType {string}: The default type of the tree items. For ex.
|
||||
* 'js'
|
||||
* - sorted {boolean}: Defaults to true. If true, tree items are kept in
|
||||
* lexical order. If false, items will be kept in insertion order.
|
||||
*/
|
||||
function TreeWidget(node, options={}) {
|
||||
function TreeWidget(node, options = {}) {
|
||||
EventEmitter.decorate(this);
|
||||
|
||||
this.document = node.ownerDocument;
|
||||
@ -43,7 +43,7 @@ function TreeWidget(node, options={}) {
|
||||
}
|
||||
// A map to hold all the passed attachment to each leaf in the tree.
|
||||
this.attachments = new Map();
|
||||
};
|
||||
}
|
||||
|
||||
TreeWidget.prototype = {
|
||||
|
||||
@ -220,7 +220,8 @@ TreeWidget.prototype = {
|
||||
|
||||
/**
|
||||
* Adds an item in the tree. The item can be added as a child to any node in
|
||||
* the tree. The method will also create any subnode not present in the process.
|
||||
* the tree. The method will also create any subnode not present in the
|
||||
* process.
|
||||
*
|
||||
* @param {[string|object]} items
|
||||
* An array of either string or objects where each increasing index
|
||||
@ -266,7 +267,7 @@ TreeWidget.prototype = {
|
||||
* The array of ids leading up to the item.
|
||||
*/
|
||||
remove: function(item) {
|
||||
this.root.remove(item)
|
||||
this.root.remove(item);
|
||||
this.attachments.delete(JSON.stringify(item));
|
||||
// Display the empty tree text
|
||||
if (this.root.items.size == 0 && this.emptyText) {
|
||||
@ -337,7 +338,7 @@ TreeWidget.prototype = {
|
||||
*/
|
||||
onKeypress: function(event) {
|
||||
let currentSelected = this._selectedLabel;
|
||||
switch(event.keyCode) {
|
||||
switch (event.keyCode) {
|
||||
case event.DOM_VK_UP:
|
||||
this.selectPreviousItem();
|
||||
break;
|
||||
@ -404,7 +405,7 @@ module.exports.TreeWidget = TreeWidget;
|
||||
* The type of the current node. For ex. "js"
|
||||
*/
|
||||
function TreeItem(document, parent, label, type) {
|
||||
this.document = document
|
||||
this.document = document;
|
||||
this.node = this.document.createElementNS(HTML_NS, "li");
|
||||
this.node.setAttribute("tabindex", "0");
|
||||
this.isRoot = !parent;
|
||||
@ -412,7 +413,7 @@ function TreeItem(document, parent, label, type) {
|
||||
if (this.parent) {
|
||||
this.level = this.parent.level + 1;
|
||||
}
|
||||
if (!!label) {
|
||||
if (label) {
|
||||
this.label = this.document.createElementNS(HTML_NS, "div");
|
||||
this.label.setAttribute("empty", "true");
|
||||
this.label.setAttribute("level", this.level);
|
||||
@ -421,7 +422,7 @@ function TreeItem(document, parent, label, type) {
|
||||
this.label.setAttribute("type", type);
|
||||
}
|
||||
if (typeof label == "string") {
|
||||
this.label.textContent = label
|
||||
this.label.textContent = label;
|
||||
} else {
|
||||
this.label.appendChild(label);
|
||||
}
|
||||
@ -454,8 +455,8 @@ TreeItem.prototype = {
|
||||
level: 0,
|
||||
|
||||
/**
|
||||
* Adds the item to the sub tree contained by this node. The item to be inserted
|
||||
* can be a direct child of this node, or further down the tree.
|
||||
* Adds the item to the sub tree contained by this node. The item to be
|
||||
* inserted can be a direct child of this node, or further down the tree.
|
||||
*
|
||||
* @param {array} items
|
||||
* Same as TreeWidget.add method's argument
|
||||
@ -473,16 +474,18 @@ TreeItem.prototype = {
|
||||
// Get the id and label corresponding to this level inside the tree.
|
||||
let id = items[this.level].id || items[this.level];
|
||||
if (this.items.has(id)) {
|
||||
// An item with same id already exists, thus calling the add method of that
|
||||
// child to add the passed node at correct position.
|
||||
// An item with same id already exists, thus calling the add method of
|
||||
// that child to add the passed node at correct position.
|
||||
this.items.get(id).add(items, defaultType, sorted);
|
||||
return;
|
||||
}
|
||||
// No item with the id `id` exists, so we create one and call the add
|
||||
// method of that item.
|
||||
// The display string of the item can be the label, the id, or the item itself
|
||||
// if its a plain string.
|
||||
let label = items[this.level].label || items[this.level].id || items[this.level];
|
||||
// The display string of the item can be the label, the id, or the item
|
||||
// itself if its a plain string.
|
||||
let label = items[this.level].label ||
|
||||
items[this.level].id ||
|
||||
items[this.level];
|
||||
let node = items[this.level].node;
|
||||
if (node) {
|
||||
// The item is supposed to be a DOMNode, so we fetch the textContent in
|
||||
|
@ -6,7 +6,6 @@
|
||||
|
||||
"use strict";
|
||||
|
||||
const {Cu} = require("chrome");
|
||||
const EventEmitter = require("devtools/shared/event-emitter");
|
||||
|
||||
loader.lazyRequireGetter(this, "StorageFront",
|
||||
|
@ -62,8 +62,8 @@ const testCases = [
|
||||
[["indexedDB", "https://sectest1.example.org", "idb-s2", "obj-s2"],
|
||||
[16]],
|
||||
[["Cache", "http://test1.example.org", "plop"],
|
||||
[MAIN_DOMAIN + "404_cached_file.js", MAIN_DOMAIN + "browser_storage_basic.js"]],
|
||||
|
||||
[MAIN_DOMAIN + "404_cached_file.js",
|
||||
MAIN_DOMAIN + "browser_storage_basic.js"]],
|
||||
];
|
||||
|
||||
/**
|
||||
|
@ -4,7 +4,8 @@
|
||||
|
||||
"use strict";
|
||||
|
||||
var { console } = Cu.import("resource://gre/modules/Console.jsm", {});
|
||||
/* eslint no-unused-vars: [2, {"vars": "local"}] */
|
||||
|
||||
var { require } = Cu.import("resource://devtools/shared/Loader.jsm", {});
|
||||
var { TargetFactory } = require("devtools/client/framework/target");
|
||||
var promise = require("promise");
|
||||
@ -92,13 +93,13 @@ function* openTabAndSetupStorage(url) {
|
||||
|
||||
// Setup the async storages in main window and for all its iframes
|
||||
let callSetup = function*(win) {
|
||||
if (typeof(win.setup) == "function") {
|
||||
if (typeof (win.setup) == "function") {
|
||||
yield win.setup();
|
||||
}
|
||||
for(var i = 0; i < win.frames.length; i++) {
|
||||
for (let i = 0; i < win.frames.length; i++) {
|
||||
yield callSetup(win.frames[i]);
|
||||
}
|
||||
}
|
||||
};
|
||||
yield callSetup(gWindow);
|
||||
|
||||
// open storage inspector
|
||||
@ -254,7 +255,7 @@ function* click(node) {
|
||||
/**
|
||||
* Recursively expand the variables view up to a given property.
|
||||
*
|
||||
* @param aOptions
|
||||
* @param options
|
||||
* Options for view expansion:
|
||||
* - rootVariable: start from the given scope/variable/property.
|
||||
* - expandTo: string made up of property names you want to expand.
|
||||
@ -266,14 +267,14 @@ function* click(node) {
|
||||
* last property - |nextSibling| in the example above. Rejection is
|
||||
* always the last property that was found.
|
||||
*/
|
||||
function variablesViewExpandTo(aOptions) {
|
||||
let root = aOptions.rootVariable;
|
||||
let expandTo = aOptions.expandTo.split(".");
|
||||
function variablesViewExpandTo(options) {
|
||||
let root = options.rootVariable;
|
||||
let expandTo = options.expandTo.split(".");
|
||||
let lastDeferred = promise.defer();
|
||||
|
||||
function getNext(aProp) {
|
||||
function getNext(prop) {
|
||||
let name = expandTo.shift();
|
||||
let newProp = aProp.get(name);
|
||||
let newProp = prop.get(name);
|
||||
|
||||
if (expandTo.length > 0) {
|
||||
ok(newProp, "found property " + name);
|
||||
@ -281,19 +282,15 @@ function variablesViewExpandTo(aOptions) {
|
||||
newProp.expand();
|
||||
getNext(newProp);
|
||||
} else {
|
||||
lastDeferred.reject(aProp);
|
||||
lastDeferred.reject(prop);
|
||||
}
|
||||
} else if (newProp) {
|
||||
lastDeferred.resolve(newProp);
|
||||
} else {
|
||||
lastDeferred.reject(aProp);
|
||||
lastDeferred.reject(prop);
|
||||
}
|
||||
}
|
||||
|
||||
function fetchError(aProp) {
|
||||
lastDeferred.reject(aProp);
|
||||
}
|
||||
|
||||
if (root && root.expand) {
|
||||
root.expand();
|
||||
getNext(root);
|
||||
@ -307,12 +304,12 @@ function variablesViewExpandTo(aOptions) {
|
||||
/**
|
||||
* Find variables or properties in a VariablesView instance.
|
||||
*
|
||||
* @param array aRules
|
||||
* @param array ruleArray
|
||||
* The array of rules you want to match. Each rule is an object with:
|
||||
* - name (string|regexp): property name to match.
|
||||
* - value (string|regexp): property value to match.
|
||||
* - dontMatch (boolean): make sure the rule doesn't match any property.
|
||||
* @param boolean aParsed
|
||||
* @param boolean parsed
|
||||
* true if we want to test the rules in the parse value section of the
|
||||
* storage sidebar
|
||||
* @return object
|
||||
@ -323,23 +320,23 @@ function variablesViewExpandTo(aOptions) {
|
||||
* VariablesView. If the rule did not match, then |matchedProp| is
|
||||
* undefined.
|
||||
*/
|
||||
function findVariableViewProperties(aRules, aParsed) {
|
||||
function findVariableViewProperties(ruleArray, parsed) {
|
||||
// Initialize the search.
|
||||
function init() {
|
||||
// If aParsed is true, we are checking rules in the parsed value section of
|
||||
// If parsed is true, we are checking rules in the parsed value section of
|
||||
// the storage sidebar. That scope uses a blank variable as a placeholder
|
||||
// Thus, adding a blank parent to each name
|
||||
if (aParsed) {
|
||||
aRules = aRules.map(({name, value, dontMatch}) => {
|
||||
if (parsed) {
|
||||
ruleArray = ruleArray.map(({name, value, dontMatch}) => {
|
||||
return {name: "." + name, value, dontMatch};
|
||||
});
|
||||
}
|
||||
// Separate out the rules that require expanding properties throughout the
|
||||
// view.
|
||||
let expandRules = [];
|
||||
let rules = aRules.filter((aRule) => {
|
||||
if (typeof aRule.name == "string" && aRule.name.indexOf(".") > -1) {
|
||||
expandRules.push(aRule);
|
||||
let rules = ruleArray.filter(rule => {
|
||||
if (typeof rule.name == "string" && rule.name.indexOf(".") > -1) {
|
||||
expandRules.push(rule);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@ -355,24 +352,24 @@ function findVariableViewProperties(aRules, aParsed) {
|
||||
// Process the rules that need to expand properties.
|
||||
let lastStep = processExpandRules.bind(null, expandRules);
|
||||
|
||||
// Return the results - a promise resolved to hold the updated aRules array.
|
||||
let returnResults = onAllRulesMatched.bind(null, aRules);
|
||||
// Return the results - a promise resolved to hold the updated ruleArray.
|
||||
let returnResults = onAllRulesMatched.bind(null, ruleArray);
|
||||
|
||||
return promise.all(outstanding).then(lastStep).then(returnResults);
|
||||
}
|
||||
|
||||
function onMatch(aProp, aRule, aMatched) {
|
||||
if (aMatched && !aRule.matchedProp) {
|
||||
aRule.matchedProp = aProp;
|
||||
function onMatch(prop, rule, matched) {
|
||||
if (matched && !rule.matchedProp) {
|
||||
rule.matchedProp = prop;
|
||||
}
|
||||
}
|
||||
|
||||
function finder(rules, aView, aPromises) {
|
||||
for (let scope of aView) {
|
||||
function finder(rules, view, promises) {
|
||||
for (let scope of view) {
|
||||
for (let [, prop] of scope) {
|
||||
for (let rule of rules) {
|
||||
let matcher = matchVariablesViewProperty(prop, rule);
|
||||
aPromises.push(matcher.then(onMatch.bind(null, prop, rule)));
|
||||
promises.push(matcher.then(onMatch.bind(null, prop, rule)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -386,17 +383,17 @@ function findVariableViewProperties(aRules, aParsed) {
|
||||
|
||||
let deferred = promise.defer();
|
||||
let expandOptions = {
|
||||
rootVariable: gUI.view.getScopeAtIndex(aParsed ? 1 : 0),
|
||||
rootVariable: gUI.view.getScopeAtIndex(parsed ? 1 : 0),
|
||||
expandTo: rule.name
|
||||
};
|
||||
|
||||
variablesViewExpandTo(expandOptions).then(function onSuccess(aProp) {
|
||||
variablesViewExpandTo(expandOptions).then(function onSuccess(prop) {
|
||||
let name = rule.name;
|
||||
let lastName = name.split(".").pop();
|
||||
rule.name = lastName;
|
||||
|
||||
let matched = matchVariablesViewProperty(aProp, rule);
|
||||
return matched.then(onMatch.bind(null, aProp, rule)).then(function() {
|
||||
let matched = matchVariablesViewProperty(prop, rule);
|
||||
return matched.then(onMatch.bind(null, prop, rule)).then(function() {
|
||||
rule.name = name;
|
||||
});
|
||||
}, function onFailure() {
|
||||
@ -430,9 +427,9 @@ function findVariableViewProperties(aRules, aParsed) {
|
||||
* Check if a given Property object from the variables view matches the given
|
||||
* rule.
|
||||
*
|
||||
* @param object aProp
|
||||
* @param object prop
|
||||
* The variable's view Property instance.
|
||||
* @param object aRule
|
||||
* @param object rule
|
||||
* Rules for matching the property. See findVariableViewProperties() for
|
||||
* details.
|
||||
* @return object
|
||||
@ -440,36 +437,36 @@ function findVariableViewProperties(aRules, aParsed) {
|
||||
* result is a boolean that tells your promise callback the match
|
||||
* result: true or false.
|
||||
*/
|
||||
function matchVariablesViewProperty(aProp, aRule) {
|
||||
function resolve(aResult) {
|
||||
return promise.resolve(aResult);
|
||||
function matchVariablesViewProperty(prop, rule) {
|
||||
function resolve(result) {
|
||||
return promise.resolve(result);
|
||||
}
|
||||
|
||||
if (!aProp) {
|
||||
if (!prop) {
|
||||
return resolve(false);
|
||||
}
|
||||
|
||||
if (aRule.name) {
|
||||
let match = aRule.name instanceof RegExp ?
|
||||
aRule.name.test(aProp.name) :
|
||||
aProp.name == aRule.name;
|
||||
if (rule.name) {
|
||||
let match = rule.name instanceof RegExp ?
|
||||
rule.name.test(prop.name) :
|
||||
prop.name == rule.name;
|
||||
if (!match) {
|
||||
return resolve(false);
|
||||
}
|
||||
}
|
||||
|
||||
if ("value" in aRule) {
|
||||
let displayValue = aProp.displayValue;
|
||||
if (aProp.displayValueClassName == "token-string") {
|
||||
if ("value" in rule) {
|
||||
let displayValue = prop.displayValue;
|
||||
if (prop.displayValueClassName == "token-string") {
|
||||
displayValue = displayValue.substring(1, displayValue.length - 1);
|
||||
}
|
||||
|
||||
let match = aRule.value instanceof RegExp ?
|
||||
aRule.value.test(displayValue) :
|
||||
displayValue == aRule.value;
|
||||
let match = rule.value instanceof RegExp ?
|
||||
rule.value.test(displayValue) :
|
||||
displayValue == rule.value;
|
||||
if (!match) {
|
||||
info("rule " + aRule.name + " did not match value, expected '" +
|
||||
aRule.value + "', found '" + displayValue + "'");
|
||||
info("rule " + rule.name + " did not match value, expected '" +
|
||||
rule.value + "', found '" + displayValue + "'");
|
||||
return resolve(false);
|
||||
}
|
||||
}
|
||||
@ -521,7 +518,7 @@ function* selectTableItem(id) {
|
||||
* @param {Boolean} [useCapture] for addEventListener/removeEventListener
|
||||
* @return A promise that resolves when the event has been handled
|
||||
*/
|
||||
function once(target, eventName, useCapture=false) {
|
||||
function once(target, eventName, useCapture = false) {
|
||||
info("Waiting for event: '" + eventName + "' on " + target + ".");
|
||||
|
||||
let deferred = promise.defer();
|
||||
|
@ -69,6 +69,7 @@ var StorageUI = this.StorageUI = function StorageUI(front, target, panelWin) {
|
||||
emptyText: L10N.getStr("table.emptyText"),
|
||||
highlightUpdated: true,
|
||||
});
|
||||
|
||||
this.displayObjectSidebar = this.displayObjectSidebar.bind(this);
|
||||
this.table.on(TableWidget.EVENTS.ROW_SELECTED, this.displayObjectSidebar);
|
||||
|
||||
@ -150,7 +151,7 @@ StorageUI.prototype = {
|
||||
* An object containing which storage types were cleared
|
||||
*/
|
||||
onCleared: function(response) {
|
||||
let [type, host, db, objectStore] = this.tree.selectedItem;
|
||||
let [type, host] = this.tree.selectedItem;
|
||||
if (response.hasOwnProperty(type) && response[type].indexOf(host) > -1) {
|
||||
this.table.clear();
|
||||
this.hideSidebar();
|
||||
@ -219,7 +220,7 @@ StorageUI.prototype = {
|
||||
this.tree.selectedItem = [type, host, name[0], name[1]];
|
||||
this.fetchStorageObjects(type, host, [JSON.stringify(name)], 1);
|
||||
}
|
||||
} catch(ex) {
|
||||
} catch (ex) {
|
||||
// Do nothing
|
||||
}
|
||||
}
|
||||
@ -317,7 +318,9 @@ StorageUI.prototype = {
|
||||
fetchStorageObjects: function(type, host, names, reason) {
|
||||
let fetchOpts = reason === 3 ? {offset: this.itemOffset}
|
||||
: {};
|
||||
this.storageTypes[type].getStoreObjects(host, names, fetchOpts).then(({data}) => {
|
||||
let storageType = this.storageTypes[type];
|
||||
|
||||
storageType.getStoreObjects(host, names, fetchOpts).then(({data}) => {
|
||||
if (!data.length) {
|
||||
this.emit("store-objects-updated");
|
||||
return;
|
||||
@ -341,14 +344,15 @@ StorageUI.prototype = {
|
||||
populateStorageTree: function(storageTypes) {
|
||||
this.storageTypes = {};
|
||||
for (let type in storageTypes) {
|
||||
// Ignore `from` field, which is just a protocol.js implementation artifact
|
||||
// Ignore `from` field, which is just a protocol.js implementation
|
||||
// artifact.
|
||||
if (type === "from") {
|
||||
continue;
|
||||
}
|
||||
let typeLabel = type;
|
||||
try {
|
||||
typeLabel = L10N.getStr("tree.labels." + type);
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
console.error("Unable to localize tree label type:" + type);
|
||||
}
|
||||
this.tree.add([{id: type, label: typeLabel, type: "store"}]);
|
||||
@ -366,7 +370,7 @@ StorageUI.prototype = {
|
||||
this.tree.selectedItem = [type, host, names[0], names[1]];
|
||||
this.fetchStorageObjects(type, host, [name], 0);
|
||||
}
|
||||
} catch(ex) {
|
||||
} catch (ex) {
|
||||
// Do Nothing
|
||||
}
|
||||
}
|
||||
@ -576,8 +580,9 @@ StorageUI.prototype = {
|
||||
columns[key] = key;
|
||||
try {
|
||||
columns[key] = L10N.getStr("table.headers." + type + "." + key);
|
||||
} catch(e) {
|
||||
console.error("Unable to localize table header type:" + type + " key:" + key);
|
||||
} catch (e) {
|
||||
console.error("Unable to localize table header type:" + type +
|
||||
" key:" + key);
|
||||
}
|
||||
}
|
||||
this.table.setColumns(columns, null, HIDDEN_COLUMNS);
|
||||
@ -643,12 +648,14 @@ StorageUI.prototype = {
|
||||
* Handles endless scrolling for the table
|
||||
*/
|
||||
handleScrollEnd: function() {
|
||||
if (!this.shouldLoadMoreItems) return;
|
||||
if (!this.shouldLoadMoreItems) {
|
||||
return;
|
||||
}
|
||||
this.shouldLoadMoreItems = false;
|
||||
this.itemOffset += 50;
|
||||
|
||||
let item = this.tree.selectedItem;
|
||||
let [type, host, db, objectStore] = item;
|
||||
let [type, host] = item;
|
||||
let names = null;
|
||||
if (item.length > 2) {
|
||||
names = [JSON.stringify(item.slice(2))];
|
||||
|
@ -7,8 +7,11 @@
|
||||
margin: 0;
|
||||
display : flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
/* Bug 1243598 - Reduce the container height by the tab height to make room
|
||||
for the tabs above. */
|
||||
height: calc(100% - 24px);
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
#browser-style-checkbox {
|
||||
@ -20,7 +23,6 @@
|
||||
#propertyContainer {
|
||||
-moz-user-select: text;
|
||||
overflow: auto;
|
||||
height: 0px;
|
||||
flex: auto;
|
||||
}
|
||||
|
||||
|
@ -3,18 +3,20 @@
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#sidebar-panel-fontinspector {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
padding-bottom: 20px;
|
||||
width: 100%;
|
||||
/* Bug 1243598 - Reduce the container height by the tab height to make room
|
||||
for the tabs above. */
|
||||
height: calc(100% - 24px);
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
#font-container {
|
||||
overflow: auto;
|
||||
flex: auto;
|
||||
height: 0px;
|
||||
}
|
||||
|
||||
#all-fonts {
|
||||
|
@ -374,11 +374,19 @@ html, body, #app, #memory-tool {
|
||||
.heap-tree-item-total-bytes {
|
||||
width: 10%;
|
||||
/*
|
||||
* Provision for up to :
|
||||
* - 12 characters for the number part (10s of GB and spaces every 3 digits)
|
||||
* - 4 chars for the percent part (the maximum length string is "100%")
|
||||
* Provision for up to 19 characters:
|
||||
*
|
||||
* GG_MMM_KKK_BBB_100%
|
||||
* | ||| |
|
||||
* '------------'|'--'
|
||||
* 14 ch for 10s | 4 ch for the largest % we will
|
||||
* of GB and | normally see: "100%"
|
||||
* spaces every |
|
||||
* 3 digits |
|
||||
* |
|
||||
* A space between the number and percent
|
||||
*/
|
||||
min-width: 16ch;
|
||||
min-width: 19ch;
|
||||
}
|
||||
|
||||
.heap-tree-item-name {
|
||||
|
@ -17,15 +17,17 @@
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
/* Bug 1243598 - Reduce the container height by the tab height to make room
|
||||
for the tabs above. */
|
||||
height: calc(100% - 24px);
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
#ruleview-container {
|
||||
-moz-user-select: text;
|
||||
overflow: auto;
|
||||
flex: auto;
|
||||
height: 0px;
|
||||
}
|
||||
|
||||
#ruleview-container.non-interactive {
|
||||
|
@ -27,7 +27,7 @@ const XUL_NS = "http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul";
|
||||
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
|
||||
|
||||
const WebConsoleUtils = require("devtools/shared/webconsole/utils").Utils;
|
||||
const l10n = new WebConsoleUtils.l10n(STRINGS_URI);
|
||||
const l10n = new WebConsoleUtils.L10n(STRINGS_URI);
|
||||
|
||||
const MAX_STRING_GRIP_LENGTH = 36;
|
||||
const ELLIPSIS = Services.prefs.getComplexValue("intl.ellipsis", Ci.nsIPrefLocalizedString).data;
|
||||
|
@ -25,7 +25,7 @@ loader.lazyRequireGetter(this, "showDoorhanger", "devtools/client/shared/doorhan
|
||||
loader.lazyRequireGetter(this, "viewSource", "devtools/client/shared/view-source");
|
||||
|
||||
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
|
||||
var l10n = new WebConsoleUtils.l10n(STRINGS_URI);
|
||||
var l10n = new WebConsoleUtils.L10n(STRINGS_URI);
|
||||
|
||||
const BROWSER_CONSOLE_WINDOW_FEATURES = "chrome,titlebar,toolbar,centerscreen,resizable,dialog=no";
|
||||
|
||||
|
@ -38,7 +38,7 @@ const GROUP_INDENT = 12;
|
||||
|
||||
const WEBCONSOLE_STRINGS_URI = "chrome://devtools/locale/" +
|
||||
"webconsole.properties";
|
||||
var WCUL10n = new WebConsoleUtils.l10n(WEBCONSOLE_STRINGS_URI);
|
||||
var WCUL10n = new WebConsoleUtils.L10n(WEBCONSOLE_STRINGS_URI);
|
||||
|
||||
DevToolsUtils.testing = true;
|
||||
|
||||
|
@ -33,7 +33,7 @@ loader.lazyRequireGetter(this, "gDevTools", "devtools/client/framework/devtools"
|
||||
loader.lazyImporter(this, "PluralForm", "resource://gre/modules/PluralForm.jsm");
|
||||
|
||||
const STRINGS_URI = "chrome://devtools/locale/webconsole.properties";
|
||||
var l10n = new WebConsoleUtils.l10n(STRINGS_URI);
|
||||
var l10n = new WebConsoleUtils.L10n(STRINGS_URI);
|
||||
|
||||
const XHTML_NS = "http://www.w3.org/1999/xhtml";
|
||||
|
||||
@ -597,7 +597,7 @@ WebConsoleFrame.prototype = {
|
||||
* We need this because it makes the layout a lot faster than
|
||||
* using -moz-box-flex and 100% width. See Bug 1237368.
|
||||
*/
|
||||
resize: function(e) {
|
||||
resize: function() {
|
||||
this.outputNode.style.width = this.outputWrapper.clientWidth + "px";
|
||||
},
|
||||
|
||||
@ -606,7 +606,7 @@ WebConsoleFrame.prototype = {
|
||||
* selected or when there is a split console present.
|
||||
* @private
|
||||
*/
|
||||
_onPanelSelected: function(evt, id) {
|
||||
_onPanelSelected: function() {
|
||||
this.jsterm.inputNode.focus();
|
||||
},
|
||||
|
||||
@ -711,7 +711,7 @@ WebConsoleFrame.prototype = {
|
||||
let categories = this.document
|
||||
.querySelectorAll(".webconsole-filter-button[category]");
|
||||
Array.forEach(categories, function(button) {
|
||||
button.addEventListener("contextmenu", (event) => {
|
||||
button.addEventListener("contextmenu", () => {
|
||||
button.open = true;
|
||||
}, false);
|
||||
button.addEventListener("click", this._toggleFilter, false);
|
||||
@ -1024,7 +1024,7 @@ WebConsoleFrame.prototype = {
|
||||
// "filtered-by-type" class, which turns on or off the display.
|
||||
|
||||
let attribute = WORKERTYPES_PREFKEYS.indexOf(prefKey) == -1
|
||||
? 'filter' : 'workerType';
|
||||
? "filter" : "workerType";
|
||||
|
||||
let xpath = ".//*[contains(@class, 'message') and " +
|
||||
"@" + attribute + "='" + prefKey + "']";
|
||||
@ -1107,22 +1107,19 @@ WebConsoleFrame.prototype = {
|
||||
},
|
||||
|
||||
/**
|
||||
* Merge the attributes of the two nodes that are about to be filtered.
|
||||
* Increment the number of repeats of original.
|
||||
* Merge the attributes of repeated nodes.
|
||||
*
|
||||
* @param nsIDOMNode original
|
||||
* The Original Node. The one being merged into.
|
||||
* @param nsIDOMNode filtered
|
||||
* The node being filtered out because it is repeated.
|
||||
*/
|
||||
mergeFilteredMessageNode: function(original, filtered) {
|
||||
mergeFilteredMessageNode: function(original) {
|
||||
let repeatNode = original.getElementsByClassName("message-repeats")[0];
|
||||
if (!repeatNode) {
|
||||
// no repeat node, return early.
|
||||
return;
|
||||
}
|
||||
|
||||
let occurrences = parseInt(repeatNode.getAttribute("value")) + 1;
|
||||
let occurrences = parseInt(repeatNode.getAttribute("value"), 10) + 1;
|
||||
repeatNode.setAttribute("value", occurrences);
|
||||
repeatNode.textContent = occurrences;
|
||||
let str = l10n.getStr("messageRepeats.tooltip2");
|
||||
@ -1172,7 +1169,7 @@ WebConsoleFrame.prototype = {
|
||||
}
|
||||
|
||||
if (dupeNode) {
|
||||
this.mergeFilteredMessageNode(dupeNode, node);
|
||||
this.mergeFilteredMessageNode(dupeNode);
|
||||
return dupeNode;
|
||||
}
|
||||
|
||||
@ -1378,7 +1375,7 @@ WebConsoleFrame.prototype = {
|
||||
let workerTypeID = CONSOLE_WORKER_IDS.indexOf(message.workerType);
|
||||
if (workerTypeID != -1) {
|
||||
node.workerType = WORKERTYPES_PREFKEYS[workerTypeID];
|
||||
node.setAttribute('workerType', WORKERTYPES_PREFKEYS[workerTypeID]);
|
||||
node.setAttribute("workerType", WORKERTYPES_PREFKEYS[workerTypeID]);
|
||||
}
|
||||
|
||||
return node;
|
||||
@ -1406,19 +1403,19 @@ WebConsoleFrame.prototype = {
|
||||
reportPageError: function(category, scriptError) {
|
||||
// Warnings and legacy strict errors become warnings; other types become
|
||||
// errors.
|
||||
let severity = 'error';
|
||||
let severity = "error";
|
||||
if (scriptError.warning || scriptError.strict) {
|
||||
severity = 'warning';
|
||||
severity = "warning";
|
||||
} else if (scriptError.info) {
|
||||
severity = 'log';
|
||||
severity = "log";
|
||||
}
|
||||
|
||||
switch (category) {
|
||||
case CATEGORY_CSS:
|
||||
category = 'css';
|
||||
category = "css";
|
||||
break;
|
||||
case CATEGORY_SECURITY:
|
||||
category = 'security';
|
||||
category = "security";
|
||||
break;
|
||||
default:
|
||||
category = "js";
|
||||
@ -1837,7 +1834,7 @@ WebConsoleFrame.prototype = {
|
||||
_updateNetMessage: function(actorId) {
|
||||
let networkInfo = this.webConsoleClient.getNetworkRequest(actorId);
|
||||
if (!networkInfo || !networkInfo.node) {
|
||||
return;
|
||||
return false;
|
||||
}
|
||||
|
||||
let messageNode = networkInfo.node;
|
||||
@ -1846,7 +1843,7 @@ WebConsoleFrame.prototype = {
|
||||
let hasResponseStart = updates.indexOf("responseStart") > -1;
|
||||
let request = networkInfo.request;
|
||||
let methodText = (networkInfo.isXHR) ?
|
||||
request.method + ' XHR' : request.method;
|
||||
request.method + " XHR" : request.method;
|
||||
let response = networkInfo.response;
|
||||
let updated = false;
|
||||
|
||||
@ -2133,7 +2130,7 @@ WebConsoleFrame.prototype = {
|
||||
* - visible: boolean that tells if the message is visible.
|
||||
*/
|
||||
_outputMessageFromQueue: function(hudIdSupportsString, item) {
|
||||
let [category, methodOrNode, args] = item;
|
||||
let [, methodOrNode, args] = item;
|
||||
|
||||
// The last object in the args array should be message
|
||||
// object or response packet received from the server.
|
||||
@ -2532,16 +2529,16 @@ WebConsoleFrame.prototype = {
|
||||
|
||||
// Make the location clickable.
|
||||
let onClick = () => {
|
||||
let target = locationNode.target;
|
||||
if (target == "scratchpad" || isScratchpad) {
|
||||
let nodeTarget = locationNode.target;
|
||||
if (nodeTarget == "scratchpad" || isScratchpad) {
|
||||
this.owner.viewSourceInScratchpad(url, line);
|
||||
return;
|
||||
}
|
||||
|
||||
let category = locationNode.parentNode.category;
|
||||
if (target == "styleeditor" || category == CATEGORY_CSS) {
|
||||
if (nodeTarget == "styleeditor" || category == CATEGORY_CSS) {
|
||||
this.owner.viewSourceInStyleEditor(fullURL, line);
|
||||
} else if (target == "jsdebugger" ||
|
||||
} else if (nodeTarget == "jsdebugger" ||
|
||||
category == CATEGORY_JS || category == CATEGORY_WEBDEV) {
|
||||
this.owner.viewSourceInDebugger(fullURL, line);
|
||||
} else {
|
||||
@ -2686,8 +2683,8 @@ WebConsoleFrame.prototype = {
|
||||
*
|
||||
* @param object options
|
||||
* - linkOnly:
|
||||
* An optional flag to copy only URL without timestamp and
|
||||
* other meta-information. Default is false.
|
||||
* An optional flag to copy only URL without other meta-information.
|
||||
* Default is false.
|
||||
* - contextmenu:
|
||||
* An optional flag to copy the last clicked item which brought
|
||||
* up the context menu if nothing is selected. Default is false.
|
||||
@ -2707,7 +2704,6 @@ WebConsoleFrame.prototype = {
|
||||
// Ensure the selected item hasn't been filtered by type or string.
|
||||
if (!item.classList.contains("filtered-by-type") &&
|
||||
!item.classList.contains("filtered-by-string")) {
|
||||
let timestampString = l10n.timestampString(item.timestamp);
|
||||
if (options.linkOnly) {
|
||||
strings.push(item.url);
|
||||
} else {
|
||||
@ -2839,14 +2835,14 @@ WebConsoleFrame.prototype = {
|
||||
*/
|
||||
function simpleValueEvalMacro(item, currentString) {
|
||||
return VariablesView.simpleValueEvalMacro(item, currentString, "_self");
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @see VariablesView.overrideValueEvalMacro
|
||||
*/
|
||||
function overrideValueEvalMacro(item, currentString) {
|
||||
return VariablesView.overrideValueEvalMacro(item, currentString, "_self");
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @see VariablesView.getterOrSetterEvalMacro
|
||||
@ -3210,7 +3206,7 @@ JSTerm.prototype = {
|
||||
if (callback) {
|
||||
callback(msg);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// attempt to execute the content of the inputNode
|
||||
executeString = executeString || this.getInputValue();
|
||||
@ -3384,7 +3380,7 @@ JSTerm.prototype = {
|
||||
let document = options.targetElement.ownerDocument;
|
||||
let iframe = document.createElementNS(XHTML_NS, "iframe");
|
||||
|
||||
iframe.addEventListener("load", function onIframeLoad(event) {
|
||||
iframe.addEventListener("load", function onIframeLoad() {
|
||||
iframe.removeEventListener("load", onIframeLoad, true);
|
||||
iframe.style.visibility = "visible";
|
||||
deferred.resolve(iframe.contentWindow);
|
||||
@ -4387,8 +4383,8 @@ JSTerm.prototype = {
|
||||
|
||||
let currentItem = this.autocompletePopup.selectedItem;
|
||||
if (currentItem && this.lastCompletion.value) {
|
||||
let suffix = currentItem.label.substring(this.lastCompletion.
|
||||
matchProp.length);
|
||||
let suffix =
|
||||
currentItem.label.substring(this.lastCompletion.matchProp.length);
|
||||
this.updateCompleteNode(suffix);
|
||||
} else {
|
||||
this.updateCompleteNode("");
|
||||
@ -4421,8 +4417,8 @@ JSTerm.prototype = {
|
||||
|
||||
let currentItem = this.autocompletePopup.selectedItem;
|
||||
if (currentItem && this.lastCompletion.value) {
|
||||
let suffix = currentItem.label.substring(this.lastCompletion.
|
||||
matchProp.length);
|
||||
let suffix =
|
||||
currentItem.label.substring(this.lastCompletion.matchProp.length);
|
||||
let cursor = this.inputNode.selectionStart;
|
||||
let value = this.getInputValue();
|
||||
this.setInputValue(value.substr(0, cursor) +
|
||||
@ -4585,7 +4581,9 @@ var Utils = {
|
||||
let prefName = CATEGORY_CLASS_FRAGMENTS[category];
|
||||
logLimit = Services.prefs.getIntPref("devtools.hud.loglimit." + prefName);
|
||||
logLimit = Math.max(logLimit, 1);
|
||||
} catch (e) { }
|
||||
} catch (e) {
|
||||
// Ignore any exceptions
|
||||
}
|
||||
|
||||
return logLimit;
|
||||
},
|
||||
|
@ -219,7 +219,7 @@ StorageActors.defaults = function(typeName, observationTopic, storeObjectType) {
|
||||
events.off(this.storageActor, "window-ready", this.onWindowReady);
|
||||
events.off(this.storageActor, "window-destroyed", this.onWindowDestroyed);
|
||||
|
||||
this.hostVsStores = null;
|
||||
this.hostVsStores.clear();
|
||||
this.storageActor = null;
|
||||
},
|
||||
|
||||
@ -474,7 +474,7 @@ StorageActors.createActor({
|
||||
},
|
||||
|
||||
destroy: function() {
|
||||
this.hostVsStores = null;
|
||||
this.hostVsStores.clear();
|
||||
|
||||
// We need to remove the cookie listeners early in E10S mode so we need to
|
||||
// use a conditional here to ensure that we only attempt to remove them in
|
||||
@ -716,7 +716,7 @@ var cookieHelpers = {
|
||||
case "cookie-changed":
|
||||
let cookie = subject.QueryInterface(Ci.nsICookie2);
|
||||
cookieHelpers.onCookieChanged(cookie, topic, data);
|
||||
break;
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
@ -726,7 +726,7 @@ var cookieHelpers = {
|
||||
let [cookie, topic, data] = msg.data.args;
|
||||
cookie = JSON.parse(cookie);
|
||||
cookieHelpers.onCookieChanged(cookie, topic, data);
|
||||
break;
|
||||
break;
|
||||
}
|
||||
},
|
||||
|
||||
@ -764,9 +764,9 @@ exports.setupParentProcessForCookies = function({mm, prefix}) {
|
||||
|
||||
gTrackedMessageManager.set("cookies", mm);
|
||||
|
||||
function handleMessageManagerDisconnected(evt, { mm: disconnected_mm }) {
|
||||
function handleMessageManagerDisconnected(evt, { mm: disconnectedMm }) {
|
||||
// filter out not subscribed message managers
|
||||
if (disconnected_mm !== mm || !gTrackedMessageManager.has("cookies")) {
|
||||
if (disconnectedMm !== mm || !gTrackedMessageManager.has("cookies")) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -794,7 +794,7 @@ exports.setupParentProcessForCookies = function({mm, prefix}) {
|
||||
method: methodName,
|
||||
args: args
|
||||
});
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
// We may receive a NS_ERROR_NOT_INITIALIZED if the target window has
|
||||
// been closed. This can legitimately happen in between test runs.
|
||||
}
|
||||
@ -837,7 +837,7 @@ function getObjectForLocalOrSessionStorage(type) {
|
||||
populateStoresForHost: function(host, window) {
|
||||
try {
|
||||
this.hostVsStores.set(host, window[type]);
|
||||
} catch(ex) {
|
||||
} catch (ex) {
|
||||
// Exceptions happen when local or session storage is inaccessible
|
||||
}
|
||||
return null;
|
||||
@ -850,7 +850,7 @@ function getObjectForLocalOrSessionStorage(type) {
|
||||
this.hostVsStores.set(this.getHostName(window.location),
|
||||
window[type]);
|
||||
}
|
||||
} catch(ex) {
|
||||
} catch (ex) {
|
||||
// Exceptions happen when local or session storage is inaccessible
|
||||
}
|
||||
return null;
|
||||
@ -922,11 +922,6 @@ StorageActors.createActor({
|
||||
storeObjectType: "storagestoreobject"
|
||||
}, getObjectForLocalOrSessionStorage("sessionStorage"));
|
||||
|
||||
|
||||
let CacheAttributes = [
|
||||
"url",
|
||||
"status",
|
||||
];
|
||||
types.addDictType("cacheobject", {
|
||||
"url": "string",
|
||||
"status": "string"
|
||||
@ -945,13 +940,15 @@ StorageActors.createActor({
|
||||
}, {
|
||||
getCachesForHost: Task.async(function*(host) {
|
||||
let uri = Services.io.newURI(host, null, null);
|
||||
let principal = Services.scriptSecurityManager.getNoAppCodebasePrincipal(uri);
|
||||
let principal =
|
||||
Services.scriptSecurityManager.getNoAppCodebasePrincipal(uri);
|
||||
|
||||
// The first argument tells if you want to get |content| cache or |chrome| cache.
|
||||
// The first argument tells if you want to get |content| cache or |chrome|
|
||||
// cache.
|
||||
// The |content| cache is the cache explicitely named by the web content
|
||||
// (service worker or web page).
|
||||
// The |chrome| cache is the cache implicitely cached by the platform, hosting the
|
||||
// source file of the service worker.
|
||||
// The |chrome| cache is the cache implicitely cached by the platform,
|
||||
// hosting the source file of the service worker.
|
||||
let { CacheStorage } = this.storageActor.window;
|
||||
let cache = new CacheStorage("content", principal);
|
||||
return cache;
|
||||
@ -981,18 +978,22 @@ StorageActors.createActor({
|
||||
|
||||
getNamesForHost: function(host) {
|
||||
// UI code expect each name to be a JSON string of an array :/
|
||||
return [...this.hostVsStores.get(host).keys()].map(a => JSON.stringify([a]));
|
||||
return [...this.hostVsStores.get(host).keys()].map(a => {
|
||||
return JSON.stringify([a]);
|
||||
});
|
||||
},
|
||||
|
||||
getValuesForHost: Task.async(function*(host, name) {
|
||||
if (!name) return [];
|
||||
if (!name) {
|
||||
return [];
|
||||
}
|
||||
// UI is weird and expect a JSON stringified array... and pass it back :/
|
||||
name = JSON.parse(name)[0];
|
||||
|
||||
let cache = this.hostVsStores.get(host).get(name);
|
||||
let requests = yield cache.keys();
|
||||
let results = [];
|
||||
for(let request of requests) {
|
||||
for (let request of requests) {
|
||||
let response = yield cache.match(request);
|
||||
// Unwrap the response to get access to all its properties if the
|
||||
// response happen to be 'opaque', when it is a Cross Origin Request.
|
||||
@ -1016,7 +1017,7 @@ StorageActors.createActor({
|
||||
return location.protocol + "//" + location.host;
|
||||
},
|
||||
|
||||
populateStoresForHost: Task.async(function*(host, window) {
|
||||
populateStoresForHost: Task.async(function*(host) {
|
||||
let storeMap = new Map();
|
||||
let caches = yield this.getCachesForHost(host);
|
||||
for (let name of (yield caches.keys())) {
|
||||
@ -1180,7 +1181,7 @@ StorageActors.createActor({
|
||||
},
|
||||
|
||||
destroy: function() {
|
||||
this.hostVsStores = null;
|
||||
this.hostVsStores.clear();
|
||||
this.objectsSize = null;
|
||||
|
||||
events.off(this.storageActor, "window-ready", this.onWindowReady);
|
||||
@ -1381,7 +1382,7 @@ StorageActors.createActor({
|
||||
unresolvedPromises.delete(func);
|
||||
deferred.resolve(msg.json.args[0]);
|
||||
}
|
||||
break;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
@ -1449,7 +1450,8 @@ var indexedDBHelpers = {
|
||||
principal = Services.scriptSecurityManager.getSystemPrincipal();
|
||||
} else {
|
||||
let uri = Services.io.newURI(host, null, null);
|
||||
principal = Services.scriptSecurityManager.createCodebasePrincipal(uri, {});
|
||||
principal = Services.scriptSecurityManager
|
||||
.createCodebasePrincipal(uri, {});
|
||||
}
|
||||
|
||||
return require("indexedDB").openForPrincipal(principal, name);
|
||||
@ -1733,9 +1735,9 @@ exports.setupParentProcessForIndexedDB = function({mm, prefix}) {
|
||||
|
||||
gTrackedMessageManager.set("indexedDB", mm);
|
||||
|
||||
function handleMessageManagerDisconnected(evt, { mm: disconnected_mm }) {
|
||||
function handleMessageManagerDisconnected(evt, { mm: disconnectedMm }) {
|
||||
// filter out not subscribed message managers
|
||||
if (disconnected_mm !== mm || !gTrackedMessageManager.has("indexedDB")) {
|
||||
if (disconnectedMm !== mm || !gTrackedMessageManager.has("indexedDB")) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1802,8 +1804,8 @@ var StorageActor = exports.StorageActor = protocol.ActorClass({
|
||||
this.fetchChildWindows(this.parentActor.docShell);
|
||||
|
||||
// Initialize the registered store types
|
||||
for (let [store, actor] of storageTypePool) {
|
||||
this.childActorPool.set(store, new actor(this));
|
||||
for (let [store, ActorConstructor] of storageTypePool) {
|
||||
this.childActorPool.set(store, new ActorConstructor(this));
|
||||
}
|
||||
|
||||
// Notifications that help us keep track of newly added windows and windows
|
||||
@ -2089,7 +2091,7 @@ var StorageActor = exports.StorageActor = protocol.ActorClass({
|
||||
/**
|
||||
* Front for the Storage Actor.
|
||||
*/
|
||||
var StorageFront = exports.StorageFront = protocol.FrontClass(StorageActor, {
|
||||
exports.StorageFront = protocol.FrontClass(StorageActor, {
|
||||
initialize: function(client, tabForm) {
|
||||
protocol.Front.prototype.initialize.call(this, client);
|
||||
this.actorID = tabForm.storageActor;
|
||||
|
@ -5,12 +5,12 @@
|
||||
"use strict";
|
||||
|
||||
const { Ci, Cu } = require("chrome");
|
||||
const Services = require("Services");
|
||||
const DevToolsUtils = require("devtools/shared/DevToolsUtils");
|
||||
const { assert, fetch } = DevToolsUtils;
|
||||
const EventEmitter = require("devtools/shared/event-emitter");
|
||||
const { OriginalLocation, GeneratedLocation } = require("devtools/server/actors/common");
|
||||
const { resolve } = require("promise");
|
||||
const URL = require("URL");
|
||||
|
||||
loader.lazyRequireGetter(this, "SourceActor", "devtools/server/actors/script", true);
|
||||
loader.lazyRequireGetter(this, "isEvalSource", "devtools/server/actors/script", true);
|
||||
@ -258,9 +258,9 @@ TabSources.prototype = {
|
||||
*/
|
||||
_isMinifiedURL: function (aURL) {
|
||||
try {
|
||||
let url = Services.io.newURI(aURL, null, null)
|
||||
.QueryInterface(Ci.nsIURL);
|
||||
return MINIFIED_SOURCE_REGEXP.test(url.fileName);
|
||||
let url = new URL(aURL);
|
||||
let pathname = url.pathname;
|
||||
return MINIFIED_SOURCE_REGEXP.test(pathname.slice(pathname.lastIndexOf("/") + 1));
|
||||
} catch (e) {
|
||||
// Not a valid URL so don't try to parse out the filename, just test the
|
||||
// whole thing with the minified source regexp.
|
||||
@ -301,22 +301,28 @@ TabSources.prototype = {
|
||||
spec.isInlineSource = true;
|
||||
} else {
|
||||
if (url) {
|
||||
try {
|
||||
let urlInfo = Services.io.newURI(url, null, null).QueryInterface(Ci.nsIURL);
|
||||
if (urlInfo.fileExtension === "xml") {
|
||||
// XUL inline scripts may not correctly have the
|
||||
// `source.element` property, so do a blunt check here if
|
||||
// it's an xml page.
|
||||
spec.isInlineSource = true;
|
||||
}
|
||||
else if (urlInfo.fileExtension === "js") {
|
||||
spec.contentType = "text/javascript";
|
||||
}
|
||||
} catch(ex) {
|
||||
// There are a few special URLs that we know are JavaScript:
|
||||
// inline `javascript:` and code coming from the console
|
||||
if (url.indexOf("javascript:") === 0 || url === 'debugger eval code') {
|
||||
spec.contentType = "text/javascript";
|
||||
// There are a few special URLs that we know are JavaScript:
|
||||
// inline `javascript:` and code coming from the console
|
||||
if (url.indexOf("javascript:") === 0 || url === 'debugger eval code') {
|
||||
spec.contentType = "text/javascript";
|
||||
} else {
|
||||
try {
|
||||
let pathname = new URL(url).pathname;
|
||||
let filename = pathname.slice(pathname.lastIndexOf("/") + 1);
|
||||
let index = filename.lastIndexOf(".");
|
||||
let extension = index >= 0 ? filename.slice(index + 1) : "";
|
||||
if (extension === "xml") {
|
||||
// XUL inline scripts may not correctly have the
|
||||
// `source.element` property, so do a blunt check here if
|
||||
// it's an xml page.
|
||||
spec.isInlineSource = true;
|
||||
}
|
||||
else if (extension === "js") {
|
||||
spec.contentType = "text/javascript";
|
||||
}
|
||||
} catch (e) {
|
||||
// This only needs to be here because URL is not yet exposed to
|
||||
// workers.
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -478,8 +484,9 @@ TabSources.prototype = {
|
||||
},
|
||||
|
||||
_dirname: function (aPath) {
|
||||
return Services.io.newURI(
|
||||
".", null, Services.io.newURI(aPath, null, null)).spec;
|
||||
let url = new URL(aPath);
|
||||
let href = url.href;
|
||||
return href.slice(0, href.lastIndexOf("/"));
|
||||
},
|
||||
|
||||
/**
|
||||
@ -778,12 +785,12 @@ TabSources.prototype = {
|
||||
*/
|
||||
_normalize: function (...aURLs) {
|
||||
assert(aURLs.length > 1, "Should have more than 1 URL");
|
||||
let base = Services.io.newURI(aURLs.pop(), null, null);
|
||||
let base = new URL(aURLs.pop());
|
||||
let url;
|
||||
while ((url = aURLs.pop())) {
|
||||
base = Services.io.newURI(url, null, base);
|
||||
base = new URL(url, base);
|
||||
}
|
||||
return base.spec;
|
||||
return base.href;
|
||||
},
|
||||
|
||||
iter: function () {
|
||||
|
@ -84,7 +84,7 @@ function WebConsoleActor(aConnection, aParentActor)
|
||||
};
|
||||
}
|
||||
|
||||
WebConsoleActor.l10n = new WebConsoleUtils.l10n("chrome://global/locale/console.properties");
|
||||
WebConsoleActor.l10n = new WebConsoleUtils.L10n("chrome://global/locale/console.properties");
|
||||
|
||||
WebConsoleActor.prototype =
|
||||
{
|
||||
|
@ -71,6 +71,10 @@ XPCOMUtils.defineLazyGetter(loaderModules, "CSS", () => {
|
||||
return Cu.Sandbox(this, {wantGlobalProperties: ["CSS"]}).CSS;
|
||||
});
|
||||
|
||||
XPCOMUtils.defineLazyGetter(loaderModules, "URL", () => {
|
||||
return Cu.Sandbox(this, {wantGlobalProperties: ["URL"]}).URL;
|
||||
});
|
||||
|
||||
var sharedGlobalBlocklist = ["sdk/indexed-db"];
|
||||
|
||||
/**
|
||||
@ -394,6 +398,22 @@ DevToolsLoader.prototype = {
|
||||
id: this.id,
|
||||
main: this.main
|
||||
},
|
||||
// Make sure `define` function exists. This allows defining some modules
|
||||
// in AMD format while retaining CommonJS compatibility through this hook.
|
||||
// JSON Viewer needs modules in AMD format, as it currently uses RequireJS
|
||||
// from a content document and can't access our usual loaders. So, any
|
||||
// modules shared with the JSON Viewer should include a define wrapper:
|
||||
//
|
||||
// // Make this available to both AMD and CJS environments
|
||||
// define(function(require, exports, module) {
|
||||
// ... code ...
|
||||
// });
|
||||
//
|
||||
// Bug 1248830 will work out a better plan here for our content module
|
||||
// loading needs, especially as we head towards devtools.html.
|
||||
define(factory) {
|
||||
factory(this.require, this.exports, this.module);
|
||||
},
|
||||
};
|
||||
// Lazy define console in order to load Console.jsm only when it is used
|
||||
XPCOMUtils.defineLazyGetter(this._provider.globals, "console", () => {
|
||||
|
@ -3,11 +3,13 @@
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
"use strict";
|
||||
|
||||
const { Visitor, walk } = require("resource://devtools/shared/heapsnapshot/CensusUtils.js");
|
||||
const { immutableUpdate } = require("resource://devtools/shared/ThreadSafeDevToolsUtils.js");
|
||||
const { Visitor, walk } = require("resource://devtools/shared/heapsnapshot/CensusUtils.js");
|
||||
const { deduplicatePaths } = require("resource://devtools/shared/heapsnapshot/shortest-paths");
|
||||
|
||||
const DEFAULT_MAX_DEPTH = 4;
|
||||
const DEFAULT_MAX_SIBLINGS = 15;
|
||||
const DEFAULT_MAX_NUM_PATHS = 5;
|
||||
|
||||
/**
|
||||
* A single node in a dominator tree.
|
||||
@ -34,6 +36,10 @@ function DominatorTreeNode(nodeId, label, shallowSize, retainedSize) {
|
||||
// An array of immediately dominated child `DominatorTreeNode`s, or undefined.
|
||||
this.children = undefined;
|
||||
|
||||
// An object of the form returned by `deduplicatePaths`, encoding the set of
|
||||
// the N shortest retaining paths for this node as a graph.
|
||||
this.shortestPaths = undefined;
|
||||
|
||||
// True iff the `children` property does not contain every immediately
|
||||
// dominated node.
|
||||
//
|
||||
@ -289,3 +295,42 @@ DominatorTreeNode.getNodeByIdAlongPath = function (id, tree, path) {
|
||||
|
||||
return find(tree, 0);
|
||||
};
|
||||
|
||||
/**
|
||||
* Find the shortest retaining paths for the given set of DominatorTreeNodes,
|
||||
* and populate each node's `shortestPaths` property with them in place.
|
||||
*
|
||||
* @param {HeapSnapshot} snapshot
|
||||
* @param {Object} breakdown
|
||||
* @param {NodeId} start
|
||||
* @param {Array<DominatorTreeNode>} treeNodes
|
||||
* @param {Number} maxNumPaths
|
||||
*/
|
||||
DominatorTreeNode.attachShortestPaths = function (snapshot,
|
||||
breakdown,
|
||||
start,
|
||||
treeNodes,
|
||||
maxNumPaths = DEFAULT_MAX_NUM_PATHS) {
|
||||
const idToTreeNode = new Map();
|
||||
const targets = [];
|
||||
for (let node of treeNodes) {
|
||||
const id = node.nodeId;
|
||||
idToTreeNode.set(id, node);
|
||||
targets.push(id);
|
||||
}
|
||||
|
||||
const shortestPaths = snapshot.computeShortestPaths(start,
|
||||
targets,
|
||||
maxNumPaths);
|
||||
|
||||
for (let [target, paths] of shortestPaths) {
|
||||
const deduped = deduplicatePaths(target, paths);
|
||||
deduped.nodes = deduped.nodes.map(id => {
|
||||
const { label } =
|
||||
DominatorTreeNode.getLabelAndShallowSize(id, snapshot, breakdown);
|
||||
return { id, label };
|
||||
});
|
||||
|
||||
idToTreeNode.get(target).shortestPaths = deduped;
|
||||
}
|
||||
};
|
||||
|
@ -226,6 +226,8 @@ HeapAnalysesClient.prototype.getDominatorTree = function (opts) {
|
||||
* by greatest to least retained size.
|
||||
* - {Number} maxCount
|
||||
* The maximum number of children to return.
|
||||
* - {Number} maxRetainingPaths
|
||||
* The maximum number of retaining paths to find for each node.
|
||||
*
|
||||
* @returns {Promise<Object>}
|
||||
* A promise of an object with the following properties:
|
||||
|
@ -157,7 +157,8 @@ workerHelper.createTask(self, "getDominatorTree", request => {
|
||||
dominatorTreeId,
|
||||
breakdown,
|
||||
maxDepth,
|
||||
maxSiblings
|
||||
maxSiblings,
|
||||
maxRetainingPaths,
|
||||
} = request;
|
||||
|
||||
if (!(0 <= dominatorTreeId && dominatorTreeId < dominatorTrees.length)) {
|
||||
@ -168,11 +169,29 @@ workerHelper.createTask(self, "getDominatorTree", request => {
|
||||
const dominatorTree = dominatorTrees[dominatorTreeId];
|
||||
const snapshot = dominatorTreeSnapshots[dominatorTreeId];
|
||||
|
||||
return DominatorTreeNode.partialTraversal(dominatorTree,
|
||||
snapshot,
|
||||
breakdown,
|
||||
maxDepth,
|
||||
maxSiblings);
|
||||
const tree = DominatorTreeNode.partialTraversal(dominatorTree,
|
||||
snapshot,
|
||||
breakdown,
|
||||
maxDepth,
|
||||
maxSiblings);
|
||||
|
||||
const nodes = [];
|
||||
(function getNodes(node) {
|
||||
nodes.push(node);
|
||||
if (node.children) {
|
||||
for (let i = 0, length = node.children.length; i < length; i++) {
|
||||
getNodes(node.children[i]);
|
||||
}
|
||||
}
|
||||
}(tree));
|
||||
|
||||
DominatorTreeNode.attachShortestPaths(snapshot,
|
||||
breakdown,
|
||||
dominatorTree.root,
|
||||
nodes,
|
||||
maxRetainingPaths);
|
||||
|
||||
return tree;
|
||||
});
|
||||
|
||||
/**
|
||||
@ -184,7 +203,8 @@ workerHelper.createTask(self, "getImmediatelyDominated", request => {
|
||||
nodeId,
|
||||
breakdown,
|
||||
startIndex,
|
||||
maxCount
|
||||
maxCount,
|
||||
maxRetainingPaths,
|
||||
} = request;
|
||||
|
||||
if (!(0 <= dominatorTreeId && dominatorTreeId < dominatorTrees.length)) {
|
||||
@ -228,5 +248,11 @@ workerHelper.createTask(self, "getImmediatelyDominated", request => {
|
||||
|
||||
const moreChildrenAvailable = childIds.length > end;
|
||||
|
||||
DominatorTreeNode.attachShortestPaths(snapshot,
|
||||
breakdown,
|
||||
dominatorTree.root,
|
||||
nodes,
|
||||
maxRetainingPaths);
|
||||
|
||||
return { nodes, moreChildrenAvailable, path };
|
||||
});
|
||||
|
@ -55,4 +55,5 @@ DevToolsModules(
|
||||
'HeapAnalysesClient.js',
|
||||
'HeapAnalysesWorker.js',
|
||||
'HeapSnapshotFileUtils.js',
|
||||
'shortest-paths.js',
|
||||
)
|
||||
|
79
devtools/shared/heapsnapshot/shortest-paths.js
Normal file
79
devtools/shared/heapsnapshot/shortest-paths.js
Normal file
@ -0,0 +1,79 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
"use strict";
|
||||
|
||||
/**
|
||||
* Compress a set of paths leading to `target` into a single graph, returned as
|
||||
* a set of nodes and a set of edges.
|
||||
*
|
||||
* @param {NodeId} target
|
||||
* The target node passed to `HeapSnapshot.computeShortestPaths`.
|
||||
*
|
||||
* @param {Array<Path>} paths
|
||||
* An array of paths to `target`, as returned by
|
||||
* `HeapSnapshot.computeShortestPaths`.
|
||||
*
|
||||
* @returns {Object}
|
||||
* An object with two properties:
|
||||
* - edges: An array of unique objects of the form:
|
||||
* {
|
||||
* from: <node ID>,
|
||||
* to: <node ID>,
|
||||
* name: <string or null>
|
||||
* }
|
||||
* - nodes: An array of unique node IDs. Every `from` and `to` id is
|
||||
* guaranteed to be in this array exactly once.
|
||||
*/
|
||||
exports.deduplicatePaths = function (target, paths) {
|
||||
// Use this structure to de-duplicate edges among many retaining paths from
|
||||
// start to target.
|
||||
//
|
||||
// Map<FromNodeId, Map<ToNodeId, Set<EdgeName>>>
|
||||
const deduped = new Map();
|
||||
|
||||
function insert(from, to, name) {
|
||||
let toMap = deduped.get(from);
|
||||
if (!toMap) {
|
||||
toMap = new Map();
|
||||
deduped.set(from, toMap);
|
||||
}
|
||||
|
||||
let nameSet = toMap.get(to);
|
||||
if (!nameSet) {
|
||||
nameSet = new Set();
|
||||
toMap.set(to, nameSet);
|
||||
}
|
||||
|
||||
nameSet.add(name);
|
||||
}
|
||||
|
||||
for (let path of paths) {
|
||||
const pathLength = path.length;
|
||||
for (let i = 0; i < pathLength - 1; i++) {
|
||||
insert(path[i].predecessor, path[i + 1].predecessor, path[i].edge);
|
||||
}
|
||||
|
||||
insert(path[pathLength - 1].predecessor, target, path[pathLength - 1].edge);
|
||||
}
|
||||
|
||||
const nodes = [target];
|
||||
const edges = [];
|
||||
|
||||
for (let [from, toMap] of deduped) {
|
||||
// If the second/third/etc shortest path contains the `target` anywhere
|
||||
// other than the very last node, we could accidentally put the `target` in
|
||||
// `nodes` more than once.
|
||||
if (from !== target) {
|
||||
nodes.push(from);
|
||||
}
|
||||
|
||||
for (let [to, edgeNameSet] of toMap) {
|
||||
for (let name of edgeNameSet) {
|
||||
edges.push({ from, to, name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { nodes, edges };
|
||||
};
|
@ -23,6 +23,7 @@ const Services = require("Services");
|
||||
const { censusReportToCensusTreeNode } = require("devtools/shared/heapsnapshot/census-tree-node");
|
||||
const CensusUtils = require("devtools/shared/heapsnapshot/CensusUtils");
|
||||
const DominatorTreeNode = require("devtools/shared/heapsnapshot/DominatorTreeNode");
|
||||
const { deduplicatePaths } = require("devtools/shared/heapsnapshot/shortest-paths");
|
||||
const { LabelAndShallowSizeVisitor } = DominatorTreeNode;
|
||||
|
||||
|
||||
@ -375,3 +376,51 @@ function assertDominatorTreeNodeInsertion(tree, path, newChildren, moreChildrenA
|
||||
|
||||
assertStructurallyEquivalent(actual, expected);
|
||||
}
|
||||
|
||||
function assertDeduplicatedPaths({ target, paths, expectedNodes, expectedEdges }) {
|
||||
dumpn("Deduplicating paths:");
|
||||
dumpn("target = " + target);
|
||||
dumpn("paths = " + JSON.stringify(paths, null, 2));
|
||||
dumpn("expectedNodes = " + expectedNodes);
|
||||
dumpn("expectedEdges = " + JSON.stringify(expectedEdges, null, 2));
|
||||
|
||||
const { nodes, edges } = deduplicatePaths(target, paths);
|
||||
|
||||
dumpn("Actual nodes = " + nodes);
|
||||
dumpn("Actual edges = " + JSON.stringify(edges, null, 2));
|
||||
|
||||
equal(nodes.length, expectedNodes.length,
|
||||
"actual number of nodes is equal to the expected number of nodes");
|
||||
|
||||
equal(edges.length, expectedEdges.length,
|
||||
"actual number of edges is equal to the expected number of edges");
|
||||
|
||||
const expectedNodeSet = new Set(expectedNodes);
|
||||
const nodeSet = new Set(nodes);
|
||||
ok(nodeSet.size === nodes.length,
|
||||
"each returned node should be unique");
|
||||
|
||||
for (let node of nodes) {
|
||||
ok(expectedNodeSet.has(node), `the ${node} node was expected`);
|
||||
}
|
||||
|
||||
for (let expectedEdge of expectedEdges) {
|
||||
let count = 0;
|
||||
for (let edge of edges) {
|
||||
if (edge.from === expectedEdge.from &&
|
||||
edge.to === expectedEdge.to &&
|
||||
edge.name === expectedEdge.name) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
equal(count, 1,
|
||||
"should have exactly one matching edge for the expected edge = " + JSON.stringify(edge));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a mock path entry for the given predecessor and edge.
|
||||
*/
|
||||
function pathEntry(predecessor, edge) {
|
||||
return { predecessor, edge };
|
||||
}
|
||||
|
@ -0,0 +1,132 @@
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
"use strict";
|
||||
|
||||
// Test that the DominatorTreeNode.attachShortestPaths function can correctly
|
||||
// attach the deduplicated shortest retaining paths for each node it is given.
|
||||
|
||||
const startNodeId = 9999;
|
||||
const maxNumPaths = 2;
|
||||
|
||||
// Mock data mapping node id to shortest paths to that node id.
|
||||
const shortestPaths = new Map([
|
||||
[1000, [
|
||||
[pathEntry(1100, "a"), pathEntry(1200, "b")],
|
||||
[pathEntry(1100, "c"), pathEntry(1300, "d")],
|
||||
]],
|
||||
[2000, [
|
||||
[pathEntry(2100, "e"), pathEntry(2200, "f"), pathEntry(2300, "g")]
|
||||
]],
|
||||
[3000, [
|
||||
[pathEntry(3100, "h")],
|
||||
[pathEntry(3100, "i")],
|
||||
[pathEntry(3100, "j")],
|
||||
[pathEntry(3200, "k")],
|
||||
[pathEntry(3300, "l")],
|
||||
[pathEntry(3400, "m")],
|
||||
]],
|
||||
]);
|
||||
|
||||
const actual = [
|
||||
makeTestDominatorTreeNode({ nodeId: 1000 }),
|
||||
makeTestDominatorTreeNode({ nodeId: 2000 }),
|
||||
makeTestDominatorTreeNode({ nodeId: 3000 }),
|
||||
];
|
||||
|
||||
const expected = [
|
||||
makeTestDominatorTreeNode({
|
||||
nodeId: 1000,
|
||||
shortestPaths: {
|
||||
nodes: [
|
||||
{ id: 1000, label: ["SomeType-1000"] },
|
||||
{ id: 1100, label: ["SomeType-1100"] },
|
||||
{ id: 1200, label: ["SomeType-1200"] },
|
||||
{ id: 1300, label: ["SomeType-1300"] },
|
||||
],
|
||||
edges: [
|
||||
{ from: 1100, to: 1200, name: "a" },
|
||||
{ from: 1100, to: 1300, name: "c" },
|
||||
{ from: 1200, to: 1000, name: "b" },
|
||||
{ from: 1300, to: 1000, name: "d" },
|
||||
]
|
||||
}
|
||||
}),
|
||||
|
||||
makeTestDominatorTreeNode({
|
||||
nodeId: 2000,
|
||||
shortestPaths: {
|
||||
nodes: [
|
||||
{ id: 2000, label: ["SomeType-2000"] },
|
||||
{ id: 2100, label: ["SomeType-2100"] },
|
||||
{ id: 2200, label: ["SomeType-2200"] },
|
||||
{ id: 2300, label: ["SomeType-2300"] },
|
||||
],
|
||||
edges: [
|
||||
{ from: 2100, to: 2200, name: "e" },
|
||||
{ from: 2200, to: 2300, name: "f" },
|
||||
{ from: 2300, to: 2000, name: "g" },
|
||||
]
|
||||
}
|
||||
}),
|
||||
|
||||
makeTestDominatorTreeNode({ nodeId: 3000,
|
||||
shortestPaths: {
|
||||
nodes: [
|
||||
{ id: 3000, label: ["SomeType-3000"] },
|
||||
{ id: 3100, label: ["SomeType-3100"] },
|
||||
{ id: 3200, label: ["SomeType-3200"] },
|
||||
{ id: 3300, label: ["SomeType-3300"] },
|
||||
{ id: 3400, label: ["SomeType-3400"] },
|
||||
],
|
||||
edges: [
|
||||
{ from: 3100, to: 3000, name: "h" },
|
||||
{ from: 3100, to: 3000, name: "i" },
|
||||
{ from: 3100, to: 3000, name: "j" },
|
||||
{ from: 3200, to: 3000, name: "k" },
|
||||
{ from: 3300, to: 3000, name: "l" },
|
||||
{ from: 3400, to: 3000, name: "m" },
|
||||
]
|
||||
}
|
||||
}),
|
||||
];
|
||||
|
||||
const breakdown = {
|
||||
by: "internalType",
|
||||
then: { by: "count", count: true, bytes: true }
|
||||
};
|
||||
|
||||
const mockSnapshot = {
|
||||
computeShortestPaths: (start, nodeIds, max) => {
|
||||
equal(start, startNodeId);
|
||||
equal(max, maxNumPaths);
|
||||
|
||||
return new Map(nodeIds.map(nodeId => {
|
||||
const paths = shortestPaths.get(nodeId);
|
||||
ok(paths, "Expected computeShortestPaths call for node id = " + nodeId);
|
||||
return [nodeId, paths];
|
||||
}));
|
||||
},
|
||||
|
||||
describeNode: (bd, nodeId) => {
|
||||
equal(bd, breakdown);
|
||||
return {
|
||||
["SomeType-" + nodeId]: {
|
||||
count: 1,
|
||||
bytes: 10,
|
||||
}
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
function run_test() {
|
||||
DominatorTreeNode.attachShortestPaths(mockSnapshot,
|
||||
breakdown,
|
||||
startNodeId,
|
||||
actual,
|
||||
maxNumPaths);
|
||||
|
||||
dumpn("Expected = " + JSON.stringify(expected, null, 2));
|
||||
dumpn("Actual = " + JSON.stringify(actual, null, 2));
|
||||
|
||||
assertStructurallyEquivalent(expected, actual);
|
||||
}
|
@ -60,6 +60,7 @@ const expected = {
|
||||
],
|
||||
shallowSize: 10,
|
||||
retainedSize: 10,
|
||||
shortestPaths: undefined,
|
||||
children: [
|
||||
{
|
||||
nodeId: 200,
|
||||
@ -70,6 +71,7 @@ const expected = {
|
||||
shallowSize: 10,
|
||||
retainedSize: 10,
|
||||
parentId: 100,
|
||||
shortestPaths: undefined,
|
||||
children: [
|
||||
{
|
||||
nodeId: 500,
|
||||
@ -81,6 +83,7 @@ const expected = {
|
||||
retainedSize: 10,
|
||||
parentId: 200,
|
||||
moreChildrenAvailable: false,
|
||||
shortestPaths: undefined,
|
||||
children: undefined
|
||||
},
|
||||
{
|
||||
@ -93,6 +96,7 @@ const expected = {
|
||||
retainedSize: 10,
|
||||
parentId: 200,
|
||||
moreChildrenAvailable: false,
|
||||
shortestPaths: undefined,
|
||||
children: undefined
|
||||
}
|
||||
],
|
||||
@ -107,6 +111,7 @@ const expected = {
|
||||
shallowSize: 10,
|
||||
retainedSize: 10,
|
||||
parentId: 100,
|
||||
shortestPaths: undefined,
|
||||
children: [
|
||||
{
|
||||
nodeId: 800,
|
||||
@ -118,6 +123,7 @@ const expected = {
|
||||
retainedSize: 10,
|
||||
parentId: 300,
|
||||
moreChildrenAvailable: false,
|
||||
shortestPaths: undefined,
|
||||
children: undefined
|
||||
},
|
||||
{
|
||||
@ -130,6 +136,7 @@ const expected = {
|
||||
retainedSize: 10,
|
||||
parentId: 300,
|
||||
moreChildrenAvailable: false,
|
||||
shortestPaths: undefined,
|
||||
children: undefined
|
||||
}
|
||||
],
|
||||
|
@ -51,6 +51,13 @@ add_task(function* () {
|
||||
equal(typeof node.moreChildrenAvailable, "boolean",
|
||||
"each node should indicate if there are more children available or not");
|
||||
|
||||
equal(typeof node.shortestPaths, "object",
|
||||
"Should have shortest paths");
|
||||
equal(typeof node.shortestPaths.nodes, "object",
|
||||
"Should have shortest paths' nodes");
|
||||
equal(typeof node.shortestPaths.edges, "object",
|
||||
"Should have shortest paths' edges");
|
||||
|
||||
if (node.children) {
|
||||
node.children.forEach(checkTree);
|
||||
}
|
||||
|
@ -44,6 +44,15 @@ add_task(function* () {
|
||||
equal(response.path.length, 1);
|
||||
equal(response.path[0], partialTree.nodeId);
|
||||
|
||||
for (let node of response.nodes) {
|
||||
equal(typeof node.shortestPaths, "object",
|
||||
"Should have shortest paths");
|
||||
equal(typeof node.shortestPaths.nodes, "object",
|
||||
"Should have shortest paths' nodes");
|
||||
equal(typeof node.shortestPaths.edges, "object",
|
||||
"Should have shortest paths' edges");
|
||||
}
|
||||
|
||||
// Next, test getting a subset of children available.
|
||||
const secondResponse = yield client.getImmediatelyDominated({
|
||||
dominatorTreeId,
|
||||
@ -59,5 +68,14 @@ add_task(function* () {
|
||||
equal(secondResponse.path.length, 1);
|
||||
equal(secondResponse.path[0], partialTree.nodeId);
|
||||
|
||||
for (let node of secondResponse.nodes) {
|
||||
equal(typeof node.shortestPaths, "object",
|
||||
"Should have shortest paths");
|
||||
equal(typeof node.shortestPaths.nodes, "object",
|
||||
"Should have shortest paths' nodes");
|
||||
equal(typeof node.shortestPaths.edges, "object",
|
||||
"Should have shortest paths' edges");
|
||||
}
|
||||
|
||||
client.destroy();
|
||||
});
|
||||
|
@ -0,0 +1,115 @@
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
"use strict";
|
||||
|
||||
// Test the behavior of the deduplicatePaths utility function.
|
||||
|
||||
function edge(from, to, name) {
|
||||
return { from, to, name };
|
||||
}
|
||||
|
||||
function run_test() {
|
||||
const a = 1;
|
||||
const b = 2;
|
||||
const c = 3;
|
||||
const d = 4;
|
||||
const e = 5;
|
||||
const f = 6;
|
||||
const g = 7;
|
||||
|
||||
dumpn("Single long path");
|
||||
assertDeduplicatedPaths({
|
||||
target: g,
|
||||
paths: [
|
||||
[
|
||||
pathEntry(a, "e1"),
|
||||
pathEntry(b, "e2"),
|
||||
pathEntry(c, "e3"),
|
||||
pathEntry(d, "e4"),
|
||||
pathEntry(e, "e5"),
|
||||
pathEntry(f, "e6"),
|
||||
]
|
||||
],
|
||||
expectedNodes: [a, b, c, d, e, f, g],
|
||||
expectedEdges: [
|
||||
edge(a, b, "e1"),
|
||||
edge(b, c, "e2"),
|
||||
edge(c, d, "e3"),
|
||||
edge(d, e, "e4"),
|
||||
edge(e, f, "e5"),
|
||||
edge(f, g, "e6"),
|
||||
]
|
||||
});
|
||||
|
||||
dumpn("Multiple edges from and to the same nodes");
|
||||
assertDeduplicatedPaths({
|
||||
target: a,
|
||||
paths: [
|
||||
[pathEntry(b, "x")],
|
||||
[pathEntry(b, "y")],
|
||||
[pathEntry(b, "z")],
|
||||
],
|
||||
expectedNodes: [a, b],
|
||||
expectedEdges: [
|
||||
edge(b, a, "x"),
|
||||
edge(b, a, "y"),
|
||||
edge(b, a, "z"),
|
||||
]
|
||||
});
|
||||
|
||||
dumpn("Multiple paths sharing some nodes and edges");
|
||||
assertDeduplicatedPaths({
|
||||
target: g,
|
||||
paths: [
|
||||
[
|
||||
pathEntry(a, "a->b"),
|
||||
pathEntry(b, "b->c"),
|
||||
pathEntry(c, "foo"),
|
||||
],
|
||||
[
|
||||
pathEntry(a, "a->b"),
|
||||
pathEntry(b, "b->d"),
|
||||
pathEntry(d, "bar"),
|
||||
],
|
||||
[
|
||||
pathEntry(a, "a->b"),
|
||||
pathEntry(b, "b->e"),
|
||||
pathEntry(e, "baz"),
|
||||
],
|
||||
],
|
||||
expectedNodes: [a, b, c, d, e, g],
|
||||
expectedEdges: [
|
||||
edge(a, b, "a->b"),
|
||||
edge(b, c, "b->c"),
|
||||
edge(b, d, "b->d"),
|
||||
edge(b, e, "b->e"),
|
||||
edge(c, g, "foo"),
|
||||
edge(d, g, "bar"),
|
||||
edge(e, g, "baz"),
|
||||
]
|
||||
});
|
||||
|
||||
dumpn("Second shortest path contains target itself");
|
||||
assertDeduplicatedPaths({
|
||||
target: g,
|
||||
paths: [
|
||||
[
|
||||
pathEntry(a, "a->b"),
|
||||
pathEntry(b, "b->g"),
|
||||
],
|
||||
[
|
||||
pathEntry(a, "a->b"),
|
||||
pathEntry(b, "b->g"),
|
||||
pathEntry(g, "g->f"),
|
||||
pathEntry(f, "f->g"),
|
||||
],
|
||||
],
|
||||
expectedNodes: [a, b, f, g],
|
||||
expectedEdges: [
|
||||
edge(a, b, "a->b"),
|
||||
edge(b, g, "b->g"),
|
||||
edge(g, f, "g->f"),
|
||||
edge(f, g, "f->g"),
|
||||
]
|
||||
});
|
||||
}
|
@ -29,12 +29,14 @@ support-files =
|
||||
[test_census-tree-node-06.js]
|
||||
[test_census-tree-node-07.js]
|
||||
[test_census-tree-node-08.js]
|
||||
[test_deduplicatePaths_01.js]
|
||||
[test_DominatorTree_01.js]
|
||||
[test_DominatorTree_02.js]
|
||||
[test_DominatorTree_03.js]
|
||||
[test_DominatorTree_04.js]
|
||||
[test_DominatorTree_05.js]
|
||||
[test_DominatorTree_06.js]
|
||||
[test_DominatorTreeNode_attachShortestPaths_01.js]
|
||||
[test_DominatorTreeNode_getNodeByIdAlongPath_01.js]
|
||||
[test_DominatorTreeNode_insert_01.js]
|
||||
[test_DominatorTreeNode_insert_02.js]
|
||||
|
@ -618,12 +618,11 @@ exports.Utils = WebConsoleUtils;
|
||||
// Localization
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
|
||||
WebConsoleUtils.l10n = function WCU_l10n(aBundleURI)
|
||||
{
|
||||
this._bundleUri = aBundleURI;
|
||||
WebConsoleUtils.L10n = function(bundleURI) {
|
||||
this._bundleUri = bundleURI;
|
||||
};
|
||||
|
||||
WebConsoleUtils.l10n.prototype = {
|
||||
WebConsoleUtils.L10n.prototype = {
|
||||
_stringBundle: null,
|
||||
|
||||
get stringBundle()
|
||||
|
@ -10,7 +10,7 @@
|
||||
// is stubbed out to prevent errors, and will need to implemented
|
||||
// for Bug 1209353.
|
||||
|
||||
exports.Utils = { l10n: function() {} };
|
||||
exports.Utils = { L10n: function() {} };
|
||||
exports.ConsoleServiceListener = function() {};
|
||||
exports.ConsoleAPIListener = function() {};
|
||||
exports.addWebConsoleCommands = function() {};
|
||||
|
@ -494,6 +494,7 @@ this.worker = new WorkerDebuggerLoader({
|
||||
"Debugger": Debugger,
|
||||
"PromiseDebugging": PromiseDebugging,
|
||||
"Services": Object.create(null),
|
||||
"URL": null,
|
||||
"chrome": chrome,
|
||||
"xpcInspector": xpcInspector
|
||||
},
|
||||
|
@ -1,149 +0,0 @@
|
||||
buildDir "${topobjdir}/gradle/build/mobile/android/base"
|
||||
|
||||
apply plugin: 'com.android.library'
|
||||
|
||||
android {
|
||||
compileSdkVersion 23
|
||||
buildToolsVersion "23.0.1"
|
||||
|
||||
defaultConfig {
|
||||
targetSdkVersion 23
|
||||
minSdkVersion 15
|
||||
// Used by Robolectric based tests; see TestRunner.
|
||||
buildConfigField 'String', 'BUILD_DIR', "\"${project.buildDir}\""
|
||||
}
|
||||
|
||||
compileOptions {
|
||||
sourceCompatibility JavaVersion.VERSION_1_7
|
||||
targetCompatibility JavaVersion.VERSION_1_7
|
||||
}
|
||||
|
||||
lintOptions {
|
||||
abortOnError false
|
||||
}
|
||||
|
||||
sourceSets {
|
||||
main {
|
||||
manifest.srcFile "${topsrcdir}/mobile/android/base/AndroidManifest.xml"
|
||||
|
||||
java {
|
||||
srcDir "${topsrcdir}/mobile/android/base/java"
|
||||
srcDir "${topsrcdir}/mobile/android/search/java"
|
||||
srcDir "${topsrcdir}/mobile/android/javaaddons/java"
|
||||
srcDir "${topsrcdir}/mobile/android/services/src/main/java"
|
||||
|
||||
if (mozconfig.substs.MOZ_ANDROID_MLS_STUMBLER) {
|
||||
srcDir "${topsrcdir}/mobile/android/stumbler/java"
|
||||
}
|
||||
|
||||
if (!mozconfig.substs.MOZ_CRASHREPORTER) {
|
||||
exclude 'org/mozilla/gecko/CrashReporter.java'
|
||||
}
|
||||
|
||||
if (!mozconfig.substs.MOZ_NATIVE_DEVICES) {
|
||||
exclude 'org/mozilla/gecko/ChromeCast.java'
|
||||
exclude 'org/mozilla/gecko/GeckoMediaPlayer.java'
|
||||
exclude 'org/mozilla/gecko/MediaPlayerManager.java'
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_WEBRTC) {
|
||||
srcDir "${topsrcdir}/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src"
|
||||
srcDir "${topsrcdir}/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src"
|
||||
srcDir "${topsrcdir}/media/webrtc/trunk/webrtc/modules/video_render/android/java/src"
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_INSTALL_TRACKING) {
|
||||
exclude 'org/mozilla/gecko/adjust/StubAdjustHelper.java'
|
||||
} else {
|
||||
exclude 'org/mozilla/gecko/adjust/AdjustHelper.java'
|
||||
}
|
||||
|
||||
srcDir "${project.buildDir}/generated/source/preprocessed_code" // See syncPreprocessedCode.
|
||||
}
|
||||
|
||||
res {
|
||||
srcDir "${topsrcdir}/${mozconfig.substs.MOZ_BRANDING_DIRECTORY}/res"
|
||||
srcDir "${project.buildDir}/generated/source/preprocessed_resources" // See syncPreprocessedResources.
|
||||
srcDir "${topsrcdir}/mobile/android/base/resources"
|
||||
srcDir "${topsrcdir}/mobile/android/services/src/main/res"
|
||||
if (mozconfig.substs.MOZ_CRASHREPORTER) {
|
||||
srcDir "${topsrcdir}/mobile/android/base/crashreporter/res"
|
||||
}
|
||||
}
|
||||
|
||||
assets {
|
||||
srcDir "${topsrcdir}/mobile/android/app/assets"
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
java {
|
||||
srcDir "${topsrcdir}/mobile/android/tests/background/junit4/src"
|
||||
}
|
||||
|
||||
resources {
|
||||
srcDir "${topsrcdir}/mobile/android/tests/background/junit4/resources"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
task syncPreprocessedCode(type: Sync, dependsOn: rootProject.generateCodeAndResources) {
|
||||
into("${project.buildDir}/generated/source/preprocessed_code")
|
||||
from("${topobjdir}/mobile/android/base/generated/preprocessed")
|
||||
}
|
||||
|
||||
task syncPreprocessedResources(type: Sync, dependsOn: rootProject.generateCodeAndResources) {
|
||||
into("${project.buildDir}/generated/source/preprocessed_resources")
|
||||
from("${topobjdir}/mobile/android/base/res")
|
||||
}
|
||||
|
||||
android.libraryVariants.all { variant ->
|
||||
variant.preBuild.dependsOn syncPreprocessedCode
|
||||
variant.preBuild.dependsOn syncPreprocessedResources
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile 'com.android.support:support-v4:23.0.1'
|
||||
compile 'com.android.support:appcompat-v7:23.0.1'
|
||||
compile 'com.android.support:recyclerview-v7:23.0.1'
|
||||
compile 'com.android.support:design:23.0.1'
|
||||
|
||||
if (mozconfig.substs.MOZ_NATIVE_DEVICES) {
|
||||
compile 'com.android.support:mediarouter-v7:23.0.1'
|
||||
compile 'com.google.android.gms:play-services-basement:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-base:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-cast:8.1.0'
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_INSTALL_TRACKING) {
|
||||
compile 'com.google.android.gms:play-services-ads:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-analytics:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-appindexing:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-basement:8.1.0'
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_ANDROID_GCM) {
|
||||
compile 'com.google.android.gms:play-services-basement:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-base:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-gcm:8.1.0'
|
||||
}
|
||||
|
||||
// Gradle based builds include LeakCanary. Mach based builds only include the no-op version of
|
||||
// this library.
|
||||
compile 'com.squareup.leakcanary:leakcanary-android:1.4-beta1'
|
||||
|
||||
compile project(':thirdparty')
|
||||
|
||||
testCompile 'junit:junit:4.12'
|
||||
testCompile 'org.robolectric:robolectric:3.0'
|
||||
testCompile 'org.simpleframework:simple-http:6.0.1'
|
||||
testCompile 'org.mockito:mockito-core:1.10.19'
|
||||
}
|
||||
|
||||
apply plugin: 'idea'
|
||||
|
||||
idea {
|
||||
module {
|
||||
}
|
||||
}
|
@ -17,6 +17,8 @@ android {
|
||||
MOZ_ANDROID_MIN_SDK_VERSION: mozconfig.substs.MOZ_ANDROID_MIN_SDK_VERSION,
|
||||
MOZ_ANDROID_SHARED_ID: "${mozconfig.substs.ANDROID_PACKAGE_NAME}.sharedID",
|
||||
]
|
||||
// Used by Robolectric based tests; see TestRunner.
|
||||
buildConfigField 'String', 'BUILD_DIR', "\"${project.buildDir}\""
|
||||
}
|
||||
|
||||
compileOptions {
|
||||
@ -64,13 +66,66 @@ android {
|
||||
|
||||
sourceSets {
|
||||
main {
|
||||
manifest.srcFile "${topobjdir}/mobile/android/base/AndroidManifest.xml"
|
||||
manifest.srcFile "${project.buildDir}/generated/source/preprocessed_manifest/AndroidManifest.xml"
|
||||
|
||||
java {
|
||||
srcDir "${topsrcdir}/mobile/android/base/java"
|
||||
srcDir "${topsrcdir}/mobile/android/search/java"
|
||||
srcDir "${topsrcdir}/mobile/android/javaaddons/java"
|
||||
srcDir "${topsrcdir}/mobile/android/services/src/main/java"
|
||||
|
||||
if (mozconfig.substs.MOZ_ANDROID_MLS_STUMBLER) {
|
||||
srcDir "${topsrcdir}/mobile/android/stumbler/java"
|
||||
}
|
||||
|
||||
if (!mozconfig.substs.MOZ_CRASHREPORTER) {
|
||||
exclude 'org/mozilla/gecko/CrashReporter.java'
|
||||
}
|
||||
|
||||
if (!mozconfig.substs.MOZ_NATIVE_DEVICES) {
|
||||
exclude 'org/mozilla/gecko/ChromeCast.java'
|
||||
exclude 'org/mozilla/gecko/GeckoMediaPlayer.java'
|
||||
exclude 'org/mozilla/gecko/MediaPlayerManager.java'
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_WEBRTC) {
|
||||
srcDir "${topsrcdir}/media/webrtc/trunk/webrtc/modules/audio_device/android/java/src"
|
||||
srcDir "${topsrcdir}/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src"
|
||||
srcDir "${topsrcdir}/media/webrtc/trunk/webrtc/modules/video_render/android/java/src"
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_INSTALL_TRACKING) {
|
||||
exclude 'org/mozilla/gecko/adjust/StubAdjustHelper.java'
|
||||
} else {
|
||||
exclude 'org/mozilla/gecko/adjust/AdjustHelper.java'
|
||||
}
|
||||
|
||||
srcDir "${project.buildDir}/generated/source/preprocessed_code" // See syncPreprocessedCode.
|
||||
}
|
||||
|
||||
res {
|
||||
srcDir "${topsrcdir}/${mozconfig.substs.MOZ_BRANDING_DIRECTORY}/res"
|
||||
srcDir "${project.buildDir}/generated/source/preprocessed_resources" // See syncPreprocessedResources.
|
||||
srcDir "${topsrcdir}/mobile/android/base/resources"
|
||||
srcDir "${topsrcdir}/mobile/android/services/src/main/res"
|
||||
if (mozconfig.substs.MOZ_CRASHREPORTER) {
|
||||
srcDir "${topsrcdir}/mobile/android/base/crashreporter/res"
|
||||
}
|
||||
}
|
||||
|
||||
assets {
|
||||
if (mozconfig.substs.MOZ_ANDROID_DISTRIBUTION_DIRECTORY && !mozconfig.substs.MOZ_ANDROID_PACKAGE_INSTALL_BOUNCER) {
|
||||
// If we are packaging the bouncer, it will have the distribution, so don't put
|
||||
// it in the main APK as well.
|
||||
srcDir "${mozconfig.substs.MOZ_ANDROID_DISTRIBUTION_DIRECTORY}/assets"
|
||||
}
|
||||
srcDir "${topsrcdir}/mobile/android/app/assets"
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
java {
|
||||
srcDir "${topsrcdir}/mobile/android/tests/background/junit4/src"
|
||||
}
|
||||
}
|
||||
|
||||
@ -89,10 +144,55 @@ android {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
testOptions {
|
||||
unitTests.all {
|
||||
// We'd like to use (Runtime.runtime.availableProcessors()/2), but
|
||||
// we have tests that start test servers and the bound ports
|
||||
// collide. We'll fix this soon to have much faster test cycles.
|
||||
maxParallelForks 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':base')
|
||||
compile 'com.android.support:support-v4:23.0.1'
|
||||
compile 'com.android.support:appcompat-v7:23.0.1'
|
||||
compile 'com.android.support:recyclerview-v7:23.0.1'
|
||||
compile 'com.android.support:design:23.0.1'
|
||||
|
||||
if (mozconfig.substs.MOZ_NATIVE_DEVICES) {
|
||||
compile 'com.android.support:mediarouter-v7:23.0.1'
|
||||
compile 'com.google.android.gms:play-services-basement:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-base:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-cast:8.1.0'
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_INSTALL_TRACKING) {
|
||||
compile 'com.google.android.gms:play-services-ads:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-analytics:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-appindexing:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-basement:8.1.0'
|
||||
}
|
||||
|
||||
if (mozconfig.substs.MOZ_ANDROID_GCM) {
|
||||
compile 'com.google.android.gms:play-services-basement:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-base:8.1.0'
|
||||
compile 'com.google.android.gms:play-services-gcm:8.1.0'
|
||||
}
|
||||
|
||||
// Gradle based builds include LeakCanary. Gradle based tests include the no-op version. Mach
|
||||
// based builds only include the no-op version of this library.
|
||||
compile 'com.squareup.leakcanary:leakcanary-android:1.4-beta1'
|
||||
testCompile 'com.squareup.leakcanary:leakcanary-android-no-op:1.4-beta1'
|
||||
|
||||
compile project(':thirdparty')
|
||||
|
||||
testCompile 'junit:junit:4.12'
|
||||
testCompile 'org.robolectric:robolectric:3.0'
|
||||
testCompile 'org.simpleframework:simple-http:6.0.1'
|
||||
testCompile 'org.mockito:mockito-core:1.10.19'
|
||||
|
||||
// Including the Robotium JAR directly can cause issues with dexing.
|
||||
androidTestCompile 'com.jayway.android.robotium:robotium-solo:4.3.1'
|
||||
}
|
||||
@ -128,6 +228,32 @@ task syncAssetsFromDistDir(type: Sync, dependsOn: checkAssetsExistInDistDir) {
|
||||
}
|
||||
}
|
||||
|
||||
task syncPreprocessedCode(type: Sync, dependsOn: rootProject.generateCodeAndResources) {
|
||||
into("${project.buildDir}/generated/source/preprocessed_code")
|
||||
from("${topobjdir}/mobile/android/base/generated/preprocessed")
|
||||
}
|
||||
|
||||
// The localization system uses the moz.build preprocessor to interpolate a .dtd
|
||||
// file of XML entity definitions into an XML file of elements referencing those
|
||||
// entities. (Each locale produces its own .dtd file, backstopped by the en-US
|
||||
// .dtd file in tree.) Android Studio (and IntelliJ) don't handle these inline
|
||||
// entities smoothly. This filter merely expands the entities in place, making
|
||||
// them appear properly throughout the IDE.
|
||||
class ExpandXMLEntitiesFilter extends FilterReader {
|
||||
ExpandXMLEntitiesFilter(Reader input) {
|
||||
// Extremely inefficient, but whatever.
|
||||
super(new StringReader(groovy.xml.XmlUtil.serialize(new XmlParser(false, false, true).parse(input))))
|
||||
}
|
||||
}
|
||||
|
||||
task syncPreprocessedResources(type: Sync, dependsOn: rootProject.generateCodeAndResources) {
|
||||
into("${project.buildDir}/generated/source/preprocessed_resources")
|
||||
from("${topobjdir}/mobile/android/base/res")
|
||||
filesMatching('**/strings.xml') {
|
||||
filter(ExpandXMLEntitiesFilter)
|
||||
}
|
||||
}
|
||||
|
||||
// The omnijar inputs are listed as resource directory inputs to a dummy JAR.
|
||||
// That arrangement labels them nicely in IntelliJ. See the comment in the
|
||||
// :omnijar project for more context.
|
||||
@ -160,7 +286,21 @@ task buildOmnijar(type:Exec) {
|
||||
}
|
||||
}
|
||||
|
||||
// It's not easy -- see the backout in Bug 1242213 -- to change the <manifest>
|
||||
// package for Fennec. Gradle has grown a mechanism to achieve what we want for
|
||||
// Fennec, however, with applicationId. To use the same manifest as moz.build,
|
||||
// we replace the package with org.mozilla.gecko (the eventual package) here.
|
||||
task rewriteManifestPackage(type: Copy, dependsOn: rootProject.generateCodeAndResources) {
|
||||
into("${project.buildDir}/generated/source/preprocessed_manifest")
|
||||
from("${topobjdir}/mobile/android/base/AndroidManifest.xml")
|
||||
filter { it.replaceFirst(/package=".*?"/, 'package="org.mozilla.gecko"') }
|
||||
}
|
||||
|
||||
android.applicationVariants.all { variant ->
|
||||
variant.preBuild.dependsOn rewriteManifestPackage
|
||||
variant.preBuild.dependsOn syncPreprocessedCode
|
||||
variant.preBuild.dependsOn syncPreprocessedResources
|
||||
|
||||
// Like 'local' or 'localOld'.
|
||||
def productFlavor = variant.productFlavors[0].name
|
||||
// Like 'debug' or 'release'.
|
||||
|
@ -0,0 +1,27 @@
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
package org.mozilla.gecko;
|
||||
|
||||
import android.app.Application;
|
||||
|
||||
import org.robolectric.TestLifecycleApplication;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
/**
|
||||
* GeckoApplication isn't test-lifecycle friendly: onCreate is called multiple times, which
|
||||
* re-registers Gecko event listeners, which fails. This class is magically named so that
|
||||
* Robolectric uses it instead of the application defined in the Android manifest. See
|
||||
* http://robolectric.blogspot.ca/2013/04/the-test-lifecycle-in-20.html.
|
||||
*/
|
||||
public class TestGeckoApplication extends Application implements TestLifecycleApplication {
|
||||
@Override public void beforeTest(Method method) {
|
||||
}
|
||||
|
||||
@Override public void prepareTest(Object test) {
|
||||
}
|
||||
|
||||
@Override public void afterTest(Method method) {
|
||||
}
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="org.mozilla.gecko">
|
||||
<!-- THIS IS NOT THE REAL MANIFEST! This is for Gradle only. See
|
||||
AndroidManifest.xml.in. -->
|
||||
|
||||
</manifest>
|
@ -912,6 +912,7 @@ sync_java_files = [TOPSRCDIR + '/mobile/android/services/src/main/java/org/mozil
|
||||
'sync/net/BearerAuthHeaderProvider.java',
|
||||
'sync/net/BrowserIDAuthHeaderProvider.java',
|
||||
'sync/net/ConnectionMonitorThread.java',
|
||||
'sync/net/GzipNonChunkedCompressingEntity.java',
|
||||
'sync/net/HandleProgressException.java',
|
||||
'sync/net/HawkAuthHeaderProvider.java',
|
||||
'sync/net/HMACAuthHeaderProvider.java',
|
||||
|
@ -589,7 +589,7 @@ public class BrowserApp extends GeckoApp
|
||||
|
||||
if (AppConstants.MOZ_SWITCHBOARD) {
|
||||
// Initializes the default URLs the first time.
|
||||
SwitchBoard.initDefaultServerUrls("https://switchboard-server.dev.mozaws.net/urls", "https://switchboard-server.dev.mozaws.net/v1", true);
|
||||
SwitchBoard.initDefaultServerUrls("https://switchboard.services.mozilla.com/urls", "https://switchboard.services.mozilla.com/v1", true);
|
||||
|
||||
final String switchboardUUID = ContextUtils.getStringExtra(intent, INTENT_KEY_SWITCHBOARD_UUID);
|
||||
SwitchBoard.setUUIDFromExtra(switchboardUUID);
|
||||
|
@ -200,6 +200,8 @@ public class TelemetryUploadService extends BackgroundService {
|
||||
|
||||
delegate.setResource(resource);
|
||||
resource.delegate = delegate;
|
||||
resource.setShouldCompressUploadedEntity(true);
|
||||
resource.setShouldChunkUploadsHint(false); // Telemetry servers don't support chunking.
|
||||
|
||||
// We're in a background thread so we don't have any reason to do this asynchronously.
|
||||
// If we tried, onStartCommand would return and IntentService might stop itself before we finish.
|
||||
|
@ -27,8 +27,8 @@ import org.mozilla.gecko.widget.themed.ThemedLinearLayout;
|
||||
import org.mozilla.gecko.widget.themed.ThemedTextView;
|
||||
|
||||
import android.content.Context;
|
||||
import android.content.res.Resources;
|
||||
import android.os.SystemClock;
|
||||
import android.support.annotation.Nullable;
|
||||
import android.text.Spannable;
|
||||
import android.text.SpannableStringBuilder;
|
||||
import android.text.TextUtils;
|
||||
@ -37,9 +37,6 @@ import android.util.AttributeSet;
|
||||
import android.util.Log;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
import android.view.animation.AlphaAnimation;
|
||||
import android.view.animation.Animation;
|
||||
import android.view.animation.TranslateAnimation;
|
||||
import android.widget.Button;
|
||||
import android.widget.ImageButton;
|
||||
|
||||
@ -84,11 +81,11 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
|
||||
}
|
||||
|
||||
interface OnStopListener {
|
||||
public Tab onStop();
|
||||
Tab onStop();
|
||||
}
|
||||
|
||||
interface OnTitleChangeListener {
|
||||
public void onTitleChange(CharSequence title);
|
||||
void onTitleChange(CharSequence title);
|
||||
}
|
||||
|
||||
private final BrowserApp mActivity;
|
||||
@ -115,18 +112,15 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
|
||||
// Security level constants, which map to the icons / levels defined in:
|
||||
// http://mxr.mozilla.org/mozilla-central/source/mobile/android/base/java/org/mozilla/gecko/resources/drawable/site_security_level.xml
|
||||
// Default level (unverified pages) - globe icon:
|
||||
private final int LEVEL_DEFAULT_GLOBE = 0;
|
||||
private static final int LEVEL_DEFAULT_GLOBE = 0;
|
||||
// Levels for displaying Mixed Content state icons.
|
||||
private final int LEVEL_WARNING_MINOR = 3;
|
||||
private final int LEVEL_LOCK_DISABLED = 4;
|
||||
private static final int LEVEL_WARNING_MINOR = 3;
|
||||
private static final int LEVEL_LOCK_DISABLED = 4;
|
||||
// Levels for displaying Tracking Protection state icons.
|
||||
private final int LEVEL_SHIELD_ENABLED = 5;
|
||||
private final int LEVEL_SHIELD_DISABLED = 6;
|
||||
private static final int LEVEL_SHIELD_ENABLED = 5;
|
||||
private static final int LEVEL_SHIELD_DISABLED = 6;
|
||||
|
||||
private final ForegroundColorSpan mUrlColor;
|
||||
private final ForegroundColorSpan mBlockedColor;
|
||||
private final ForegroundColorSpan mDomainColor;
|
||||
private final ForegroundColorSpan mPrivateDomainColor;
|
||||
|
||||
public ToolbarDisplayLayout(Context context, AttributeSet attrs) {
|
||||
super(context, attrs);
|
||||
@ -139,12 +133,7 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
|
||||
mTitle = (ThemedTextView) findViewById(R.id.url_bar_title);
|
||||
mTitlePadding = mTitle.getPaddingRight();
|
||||
|
||||
final Resources res = getResources();
|
||||
|
||||
mUrlColor = new ForegroundColorSpan(ColorUtils.getColor(context, R.color.url_bar_urltext));
|
||||
mBlockedColor = new ForegroundColorSpan(ColorUtils.getColor(context, R.color.url_bar_blockedtext));
|
||||
mDomainColor = new ForegroundColorSpan(ColorUtils.getColor(context, R.color.url_bar_domaintext));
|
||||
mPrivateDomainColor = new ForegroundColorSpan(ColorUtils.getColor(context, R.color.url_bar_domaintext_private));
|
||||
|
||||
mSiteSecurity = (ImageButton) findViewById(R.id.site_security);
|
||||
|
||||
@ -158,6 +147,8 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
|
||||
|
||||
@Override
|
||||
public void onAttachedToWindow() {
|
||||
super.onAttachedToWindow();
|
||||
|
||||
mIsAttached = true;
|
||||
|
||||
mSiteSecurity.setOnClickListener(new Button.OnClickListener() {
|
||||
@ -244,6 +235,7 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
|
||||
// "Enter Search or Address" placeholder text.
|
||||
if (AboutPages.isTitlelessAboutPage(url)) {
|
||||
setTitle(null);
|
||||
setContentDescription(null);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -255,32 +247,28 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
|
||||
builder.setSpan(mBlockedColor, 0, title.length(), Spannable.SPAN_INCLUSIVE_INCLUSIVE);
|
||||
|
||||
setTitle(builder);
|
||||
setContentDescription(null);
|
||||
return;
|
||||
}
|
||||
|
||||
final String baseDomain = tab.getBaseDomain();
|
||||
|
||||
String strippedURL = stripAboutReaderURL(url);
|
||||
|
||||
if (mPrefs.shouldTrimUrls()) {
|
||||
strippedURL = StringUtils.stripCommonSubdomains(StringUtils.stripScheme(strippedURL));
|
||||
}
|
||||
|
||||
CharSequence title = strippedURL;
|
||||
// This value is not visible to screen readers but we rely on it when running UI tests. Screen
|
||||
// readers will instead focus BrowserToolbar and read the "base domain" from there. UI tests
|
||||
// will read the content description to obtain the full URL for performing assertions.
|
||||
setContentDescription(strippedURL);
|
||||
|
||||
final String baseDomain = tab.getBaseDomain();
|
||||
if (!TextUtils.isEmpty(baseDomain)) {
|
||||
final SpannableStringBuilder builder = new SpannableStringBuilder(title);
|
||||
|
||||
int index = title.toString().indexOf(baseDomain);
|
||||
if (index > -1) {
|
||||
builder.setSpan(mUrlColor, 0, title.length(), Spannable.SPAN_INCLUSIVE_INCLUSIVE);
|
||||
builder.setSpan(tab.isPrivate() ? mPrivateDomainColor : mDomainColor,
|
||||
index, index + baseDomain.length(), Spannable.SPAN_INCLUSIVE_INCLUSIVE);
|
||||
|
||||
title = builder;
|
||||
}
|
||||
setTitle(baseDomain);
|
||||
} else {
|
||||
setTitle(strippedURL);
|
||||
}
|
||||
|
||||
setTitle(title);
|
||||
}
|
||||
|
||||
private String stripAboutReaderURL(final String url) {
|
||||
@ -354,13 +342,13 @@ public class ToolbarDisplayLayout extends ThemedLinearLayout {
|
||||
mTrackingProtectionEnabled = trackingMode == TrackingMode.TRACKING_CONTENT_BLOCKED;
|
||||
}
|
||||
|
||||
private void updateProgress(Tab tab) {
|
||||
private void updateProgress(@Nullable Tab tab) {
|
||||
final boolean shouldShowThrobber = (tab != null &&
|
||||
tab.getState() == Tab.STATE_LOADING);
|
||||
|
||||
updateUiMode(shouldShowThrobber ? UIMode.PROGRESS : UIMode.DISPLAY);
|
||||
|
||||
if (Tab.STATE_SUCCESS == tab.getState() && mTrackingProtectionEnabled) {
|
||||
if (tab != null && Tab.STATE_SUCCESS == tab.getState() && mTrackingProtectionEnabled) {
|
||||
mActivity.showTrackingProtectionPromptIfApplicable();
|
||||
}
|
||||
}
|
||||
|
@ -110,9 +110,6 @@
|
||||
<color name="textbox_stroke">#000</color>
|
||||
<color name="textbox_stroke_disabled">#666</color>
|
||||
|
||||
<color name="url_bar_urltext">#A6A6A6</color>
|
||||
<color name="url_bar_domaintext">#000</color>
|
||||
<color name="url_bar_domaintext_private">#FFF</color>
|
||||
<color name="url_bar_blockedtext">#b14646</color>
|
||||
<color name="url_bar_shadow">#12000000</color>
|
||||
|
||||
|
@ -57,7 +57,17 @@ class MachCommands(MachCommandBase):
|
||||
# Avoid logging the command
|
||||
self.log_manager.terminal_handler.setLevel(logging.CRITICAL)
|
||||
|
||||
# We force the Gradle JVM to run with the UTF-8 encoding, since we
|
||||
# filter strings.xml, which is really UTF-8; the ellipsis character is
|
||||
# replaced with ??? in some encodings (including ASCII). It's not yet
|
||||
# possible to filter with encodings in Gradle
|
||||
# (https://github.com/gradle/gradle/pull/520) and it's challenging to
|
||||
# do our filtering with Gradle's Ant support. Moreover, all of the
|
||||
# Android tools expect UTF-8: see
|
||||
# http://tools.android.com/knownissues/encoding. See
|
||||
# http://stackoverflow.com/a/21267635 for discussion of this approach.
|
||||
return self.run_process([self.substs['GRADLE']] + args,
|
||||
append_env={'GRADLE_OPTS': '-Dfile.encoding=utf-8'},
|
||||
pass_thru=True, # Allow user to run gradle interactively.
|
||||
ensure_exit_code=False, # Don't throw on non-zero exit code.
|
||||
cwd=mozpath.join(self.topsrcdir))
|
||||
|
@ -30,6 +30,7 @@ import ch.boye.httpclientandroidlib.HttpResponse;
|
||||
import ch.boye.httpclientandroidlib.HttpVersion;
|
||||
import ch.boye.httpclientandroidlib.client.AuthCache;
|
||||
import ch.boye.httpclientandroidlib.client.ClientProtocolException;
|
||||
import ch.boye.httpclientandroidlib.client.entity.GzipCompressingEntity;
|
||||
import ch.boye.httpclientandroidlib.client.methods.HttpDelete;
|
||||
import ch.boye.httpclientandroidlib.client.methods.HttpGet;
|
||||
import ch.boye.httpclientandroidlib.client.methods.HttpPatch;
|
||||
@ -80,6 +81,10 @@ public class BaseResource implements Resource {
|
||||
protected HttpRequestBase request;
|
||||
public final String charset = "utf-8";
|
||||
|
||||
private boolean shouldGzipCompress = false;
|
||||
// A hint whether uploaded payloads are chunked. Default true to use GzipCompressingEntity, which is built-in functionality.
|
||||
private boolean shouldChunkUploadsHint = true;
|
||||
|
||||
/**
|
||||
* We have very few writes (observers tend to be installed around sync
|
||||
* sessions) and many iterations (every HTTP request iterates observers), so
|
||||
@ -162,6 +167,34 @@ public class BaseResource implements Resource {
|
||||
return this.getURI().getHost();
|
||||
}
|
||||
|
||||
/**
|
||||
* Causes the Resource to compress the uploaded entity payload in requests with payloads (e.g. post, put)
|
||||
* @param shouldCompress true if the entity should be compressed, false otherwise
|
||||
*/
|
||||
public void setShouldCompressUploadedEntity(final boolean shouldCompress) {
|
||||
shouldGzipCompress = shouldCompress;
|
||||
}
|
||||
|
||||
/**
|
||||
* Causes the Resource to chunk the uploaded entity payload in requests with payloads (e.g. post, put).
|
||||
* Note: this flag is only a hint - chunking is not guaranteed.
|
||||
*
|
||||
* Chunking is currently supported with gzip compression.
|
||||
*
|
||||
* @param shouldChunk true if the transfer should be chunked, false otherwise
|
||||
*/
|
||||
public void setShouldChunkUploadsHint(final boolean shouldChunk) {
|
||||
shouldChunkUploadsHint = shouldChunk;
|
||||
}
|
||||
|
||||
private HttpEntity getMaybeCompressedEntity(final HttpEntity entity) {
|
||||
if (!shouldGzipCompress) {
|
||||
return entity;
|
||||
}
|
||||
|
||||
return shouldChunkUploadsHint ? new GzipCompressingEntity(entity) : new GzipNonChunkedCompressingEntity(entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* This shuts up HttpClient, which will otherwise debug log about there
|
||||
* being no auth cache in the context.
|
||||
@ -365,6 +398,7 @@ public class BaseResource implements Resource {
|
||||
@Override
|
||||
public void post(HttpEntity body) {
|
||||
Logger.debug(LOG_TAG, "HTTP POST " + this.uri.toASCIIString());
|
||||
body = getMaybeCompressedEntity(body);
|
||||
HttpPost request = new HttpPost(this.uri);
|
||||
request.setEntity(body);
|
||||
this.go(request);
|
||||
@ -373,6 +407,7 @@ public class BaseResource implements Resource {
|
||||
@Override
|
||||
public void patch(HttpEntity body) {
|
||||
Logger.debug(LOG_TAG, "HTTP PATCH " + this.uri.toASCIIString());
|
||||
body = getMaybeCompressedEntity(body);
|
||||
HttpPatch request = new HttpPatch(this.uri);
|
||||
request.setEntity(body);
|
||||
this.go(request);
|
||||
@ -381,6 +416,7 @@ public class BaseResource implements Resource {
|
||||
@Override
|
||||
public void put(HttpEntity body) {
|
||||
Logger.debug(LOG_TAG, "HTTP PUT " + this.uri.toASCIIString());
|
||||
body = getMaybeCompressedEntity(body);
|
||||
HttpPut request = new HttpPut(this.uri);
|
||||
request.setEntity(body);
|
||||
this.go(request);
|
||||
|
@ -0,0 +1,92 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
package org.mozilla.gecko.sync.net;
|
||||
|
||||
import ch.boye.httpclientandroidlib.HttpEntity;
|
||||
import ch.boye.httpclientandroidlib.client.entity.GzipCompressingEntity;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Wrapping entity that compresses content when {@link #writeTo writing}.
|
||||
*
|
||||
* This differs from {@link GzipCompressingEntity} in that it does not chunk
|
||||
* the sent data, therefore replacing the "Transfer-Encoding" HTTP header with
|
||||
* the "Content-Length" header required by some servers.
|
||||
*
|
||||
* However, to measure the content length, the gzipped content will be temporarily
|
||||
* stored in memory so be careful what content you send!
|
||||
*/
|
||||
public class GzipNonChunkedCompressingEntity extends GzipCompressingEntity {
|
||||
final int MAX_BUFFER_SIZE_BYTES = 10 * 1000 * 1000; // 10 MB.
|
||||
|
||||
private byte[] gzippedContent;
|
||||
|
||||
public GzipNonChunkedCompressingEntity(final HttpEntity entity) {
|
||||
super(entity);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return content length for gzipped content or -1 if there is an error
|
||||
*/
|
||||
@Override
|
||||
public long getContentLength() {
|
||||
try {
|
||||
initBuffer();
|
||||
} catch (final IOException e) {
|
||||
// GzipCompressingEntity always returns -1 in which case a 'Content-Length' header is omitted.
|
||||
// Presumably, without it the request will fail (either client-side or server-side).
|
||||
return -1;
|
||||
}
|
||||
return gzippedContent.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isChunked() {
|
||||
// "Content-Length" & chunked encoding are mutually exclusive:
|
||||
// https://en.wikipedia.org/wiki/Chunked_transfer_encoding
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getContent() throws IOException {
|
||||
initBuffer();
|
||||
return new ByteArrayInputStream(gzippedContent);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(final OutputStream outstream) throws IOException {
|
||||
initBuffer();
|
||||
outstream.write(gzippedContent);
|
||||
}
|
||||
|
||||
private void initBuffer() throws IOException {
|
||||
if (gzippedContent != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
final long unzippedContentLength = wrappedEntity.getContentLength();
|
||||
if (unzippedContentLength > MAX_BUFFER_SIZE_BYTES) {
|
||||
throw new IOException(
|
||||
"Wrapped entity content length, " + unzippedContentLength + " bytes, exceeds max: " + MAX_BUFFER_SIZE_BYTES);
|
||||
}
|
||||
|
||||
// The buffer size needed by the gzipped content should be smaller than this,
|
||||
// but it's more efficient just to allocate one larger buffer than allocate
|
||||
// twice if the gzipped content is too large for the default buffer.
|
||||
final ByteArrayOutputStream s = new ByteArrayOutputStream((int) unzippedContentLength);
|
||||
try {
|
||||
super.writeTo(s);
|
||||
} finally {
|
||||
s.close();
|
||||
}
|
||||
|
||||
gzippedContent = s.toByteArray();
|
||||
}
|
||||
}
|
@ -28,7 +28,6 @@ background_junit3_sources = [
|
||||
'src/org/mozilla/gecko/background/helpers/BackgroundServiceTestCase.java',
|
||||
'src/org/mozilla/gecko/background/helpers/DBHelpers.java',
|
||||
'src/org/mozilla/gecko/background/helpers/DBProviderTestCase.java',
|
||||
'src/org/mozilla/gecko/background/helpers/FakeProfileTestCase.java',
|
||||
'src/org/mozilla/gecko/background/nativecode/test/TestNativeCrypto.java',
|
||||
'src/org/mozilla/gecko/background/sync/AndroidSyncTestCaseWithAccounts.java',
|
||||
'src/org/mozilla/gecko/background/sync/helpers/BookmarkHelpers.java',
|
||||
|
@ -1,61 +0,0 @@
|
||||
/* Any copyright is dedicated to the Public Domain.
|
||||
http://creativecommons.org/publicdomain/zero/1.0/ */
|
||||
|
||||
package org.mozilla.gecko.background.helpers;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.content.Context;
|
||||
import android.content.SharedPreferences;
|
||||
import android.test.ActivityInstrumentationTestCase2;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.mozilla.gecko.background.common.GlobalConstants;
|
||||
|
||||
import org.mozilla.gecko.background.common.TestUtils;
|
||||
|
||||
public abstract class FakeProfileTestCase extends ActivityInstrumentationTestCase2<Activity> {
|
||||
|
||||
protected Context context;
|
||||
protected File fakeProfileDirectory;
|
||||
private String sharedPrefsName;
|
||||
|
||||
public FakeProfileTestCase() {
|
||||
super(Activity.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the profile cache suffix. This is computed once for each test function (in setUp()).
|
||||
* Note that the return value is not cached.
|
||||
*/
|
||||
protected String getCacheSuffix() {
|
||||
return this.getClass().getName() + "-" + System.currentTimeMillis();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
context = getInstrumentation().getTargetContext();
|
||||
File cache = context.getCacheDir();
|
||||
fakeProfileDirectory = new File(cache.getAbsolutePath() + getCacheSuffix());
|
||||
if (fakeProfileDirectory.exists()) {
|
||||
TestUtils.deleteDirectoryRecursively(fakeProfileDirectory);
|
||||
}
|
||||
if (!fakeProfileDirectory.mkdir()) {
|
||||
throw new IllegalStateException("Could not create temporary directory.");
|
||||
}
|
||||
// Class name of the form: ActivityInstrumentationTestCase2$FakeProfileTestCase$extendingClass.
|
||||
sharedPrefsName = this.getClass().getName() + "-" + UUID.randomUUID();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
TestUtils.deleteDirectoryRecursively(fakeProfileDirectory);
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
public SharedPreferences getSharedPreferences() {
|
||||
return context.getSharedPreferences(sharedPrefsName, GlobalConstants.SHARED_PREFERENCES_MODE);
|
||||
}
|
||||
}
|
@ -249,6 +249,22 @@ abstract class BaseTest extends BaseRobocopTest {
|
||||
}
|
||||
}
|
||||
|
||||
class VerifyContentDescription implements Condition {
|
||||
private final View view;
|
||||
private final String expected;
|
||||
|
||||
public VerifyContentDescription(View view, String expected) {
|
||||
this.view = view;
|
||||
this.expected = expected;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSatisfied() {
|
||||
final CharSequence actual = view.getContentDescription();
|
||||
return TextUtils.equals(actual, expected);
|
||||
}
|
||||
}
|
||||
|
||||
protected final String getAbsoluteUrl(String url) {
|
||||
return mBaseHostnameUrl + "/" + url.replaceAll("(^/)", "");
|
||||
}
|
||||
@ -471,6 +487,33 @@ abstract class BaseTest extends BaseRobocopTest {
|
||||
mAsserter.is(pageTitle, expected, "Page title is correct");
|
||||
}
|
||||
|
||||
public final void verifyUrlInContentDescription(String url) {
|
||||
mAsserter.isnot(url, null, "The url argument is not null");
|
||||
|
||||
final String expected;
|
||||
if (mStringHelper.ABOUT_HOME_URL.equals(url)) {
|
||||
expected = mStringHelper.ABOUT_HOME_TITLE;
|
||||
} else if (url.startsWith(URL_HTTP_PREFIX)) {
|
||||
expected = url.substring(URL_HTTP_PREFIX.length());
|
||||
} else {
|
||||
expected = url;
|
||||
}
|
||||
|
||||
final View urlDisplayLayout = mSolo.getView(R.id.display_layout);
|
||||
assertNotNull("ToolbarDisplayLayout is not null", urlDisplayLayout);
|
||||
|
||||
String actualUrl = null;
|
||||
|
||||
// Wait for the title to make sure it has been displayed in case the view
|
||||
// does not update fast enough
|
||||
waitForCondition(new VerifyContentDescription(urlDisplayLayout, expected), MAX_WAIT_VERIFY_PAGE_TITLE_MS);
|
||||
if (urlDisplayLayout.getContentDescription() != null) {
|
||||
actualUrl = urlDisplayLayout.getContentDescription().toString();
|
||||
}
|
||||
|
||||
mAsserter.is(actualUrl, expected, "Url is correct");
|
||||
}
|
||||
|
||||
public final void verifyTabCount(int expectedTabCount) {
|
||||
Element tabCount = mDriver.findElement(getActivity(), R.id.tabs_counter);
|
||||
String tabCountText = tabCount.getText();
|
||||
|
@ -10,6 +10,7 @@ import static org.mozilla.gecko.tests.helpers.AssertionHelper.fAssertNotNull;
|
||||
import static org.mozilla.gecko.tests.helpers.AssertionHelper.fAssertTrue;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import org.mozilla.gecko.AppConstants;
|
||||
import org.mozilla.gecko.R;
|
||||
@ -33,7 +34,7 @@ import com.jayway.android.robotium.solo.Solo;
|
||||
* A class representing any interactions that take place on the app menu.
|
||||
*/
|
||||
public class AppMenuComponent extends BaseComponent {
|
||||
private static final long MAX_WAITTIME_FOR_MENU_UPDATE_IN_MS = 7500L;
|
||||
private static final int MAX_WAITTIME_FOR_MENU_UPDATE_IN_MS = 7500;
|
||||
|
||||
public enum MenuItem {
|
||||
FORWARD(R.string.forward),
|
||||
@ -124,40 +125,43 @@ public class AppMenuComponent extends BaseComponent {
|
||||
*
|
||||
* This method is dependent on not having two views with equivalent contentDescription / text.
|
||||
*/
|
||||
private View findAppMenuItemView(String text) {
|
||||
RobotiumHelper.waitForExactText(text, 1, MAX_WAITTIME_FOR_MENU_UPDATE_IN_MS);
|
||||
private View findAppMenuItemView(final String text) {
|
||||
return WaitHelper.waitFor(String.format("menu item view '%s'", text), new Callable<View>() {
|
||||
@Override
|
||||
public View call() throws Exception {
|
||||
final List<View> views = mSolo.getViews();
|
||||
|
||||
final List<View> views = mSolo.getViews();
|
||||
|
||||
final List<MenuItemActionBar> menuItemActionBarList = RobotiumUtils.filterViews(MenuItemActionBar.class, views);
|
||||
for (MenuItemActionBar menuItem : menuItemActionBarList) {
|
||||
if (TextUtils.equals(menuItem.getContentDescription(), text)) {
|
||||
return menuItem;
|
||||
}
|
||||
}
|
||||
|
||||
final List<MenuItemDefault> menuItemDefaultList = RobotiumUtils.filterViews(MenuItemDefault.class, views);
|
||||
for (MenuItemDefault menuItem : menuItemDefaultList) {
|
||||
if (TextUtils.equals(menuItem.getText(), text)) {
|
||||
return menuItem;
|
||||
}
|
||||
}
|
||||
|
||||
// On Android 2.3, menu items may be instances of
|
||||
// com.android.internal.view.menu.ListMenuItemView, each with a child
|
||||
// android.widget.RelativeLayout which in turn has a child
|
||||
// TextView with the appropriate text.
|
||||
final List<TextView> textViewList = RobotiumUtils.filterViews(TextView.class, views);
|
||||
for (TextView textView : textViewList) {
|
||||
if (TextUtils.equals(textView.getText(), text)) {
|
||||
View relativeLayout = (View) textView.getParent();
|
||||
if (relativeLayout instanceof RelativeLayout) {
|
||||
View listMenuItemView = (View)relativeLayout.getParent();
|
||||
return listMenuItemView;
|
||||
final List<MenuItemActionBar> menuItemActionBarList = RobotiumUtils.filterViews(MenuItemActionBar.class, views);
|
||||
for (MenuItemActionBar menuItem : menuItemActionBarList) {
|
||||
if (TextUtils.equals(menuItem.getContentDescription(), text)) {
|
||||
return menuItem;
|
||||
}
|
||||
}
|
||||
|
||||
final List<MenuItemDefault> menuItemDefaultList = RobotiumUtils.filterViews(MenuItemDefault.class, views);
|
||||
for (MenuItemDefault menuItem : menuItemDefaultList) {
|
||||
if (TextUtils.equals(menuItem.getText(), text)) {
|
||||
return menuItem;
|
||||
}
|
||||
}
|
||||
|
||||
// On Android 2.3, menu items may be instances of
|
||||
// com.android.internal.view.menu.ListMenuItemView, each with a child
|
||||
// android.widget.RelativeLayout which in turn has a child
|
||||
// TextView with the appropriate text.
|
||||
final List<TextView> textViewList = RobotiumUtils.filterViews(TextView.class, views);
|
||||
for (TextView textView : textViewList) {
|
||||
if (TextUtils.equals(textView.getText(), text)) {
|
||||
View relativeLayout = (View) textView.getParent();
|
||||
if (relativeLayout instanceof RelativeLayout) {
|
||||
View listMenuItemView = (View)relativeLayout.getParent();
|
||||
return listMenuItemView;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}, MAX_WAITTIME_FOR_MENU_UPDATE_IN_MS);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -63,7 +63,9 @@ public class ToolbarComponent extends BaseComponent {
|
||||
expected = absoluteURL;
|
||||
}
|
||||
|
||||
fAssertEquals("The Toolbar title is " + expected, expected, getTitle());
|
||||
// Since we only display a shortened "base domain" (See bug 1236431) we use the content
|
||||
// description to obtain the full URL.
|
||||
fAssertEquals("The Toolbar title is " + expected, expected, getUrlFromContentDescription());
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -148,8 +150,15 @@ public class ToolbarComponent extends BaseComponent {
|
||||
return getToolbarView().findViewById(R.id.edit_cancel);
|
||||
}
|
||||
|
||||
private String getTitle() {
|
||||
return getTitleHelper(true);
|
||||
private String getUrlFromContentDescription() {
|
||||
assertIsNotEditing();
|
||||
|
||||
final CharSequence contentDescription = getUrlDisplayLayout().getContentDescription();
|
||||
if (contentDescription == null) {
|
||||
return "";
|
||||
} else {
|
||||
return contentDescription.toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -8,6 +8,8 @@ import static org.mozilla.gecko.tests.helpers.AssertionHelper.fAssertNotNull;
|
||||
import static org.mozilla.gecko.tests.helpers.AssertionHelper.fAssertTrue;
|
||||
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.mozilla.gecko.Actions;
|
||||
@ -70,6 +72,36 @@ public final class WaitHelper {
|
||||
fAssertTrue(message, sSolo.waitForCondition(condition, waitMillis));
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the given Callable to return something that is not null, using the given wait
|
||||
* duration; will throw an AssertionError if the duration is elapsed and the callable has not
|
||||
* returned a non-null object.
|
||||
*
|
||||
* @return the value returned by the Callable. Or null if the duration has elapsed.
|
||||
*/
|
||||
public static <V> V waitFor(String message, final Callable<V> callable, int waitMillis) {
|
||||
sContext.dumpLog("WaitHelper", "Waiting for " + message + " with timeout " + waitMillis + ".");
|
||||
|
||||
final Object[] value = new Object[1];
|
||||
|
||||
Condition condition = new Condition() {
|
||||
@Override
|
||||
public boolean isSatisfied() {
|
||||
try {
|
||||
V result = callable.call();
|
||||
value[0] = result;
|
||||
return result != null;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
sSolo.waitForCondition(condition, waitMillis);
|
||||
|
||||
return (V) value[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Waits for the Gecko event declaring the page has loaded. Takes in and runs a Runnable
|
||||
* that will perform the action that will cause the page to load.
|
||||
|
@ -19,14 +19,14 @@ public class testAboutPage extends PixelTest {
|
||||
String url = mStringHelper.ABOUT_SCHEME;
|
||||
loadAndPaint(url);
|
||||
|
||||
verifyUrlBarTitle(url);
|
||||
verifyUrlInContentDescription(url);
|
||||
|
||||
// Open a new page to remove the about: page from the current tab.
|
||||
url = getAbsoluteUrl(mStringHelper.ROBOCOP_BLANK_PAGE_01_URL);
|
||||
loadUrlAndWait(url);
|
||||
|
||||
// At this point the page title should have been set.
|
||||
verifyUrlBarTitle(url);
|
||||
verifyUrlInContentDescription(url);
|
||||
|
||||
// Set up listeners to catch the page load we're about to do.
|
||||
Actions.EventExpecter tabEventExpecter = mActions.expectGeckoEvent("Tab:Added");
|
||||
@ -42,6 +42,6 @@ public class testAboutPage extends PixelTest {
|
||||
contentEventExpecter.unregisterListener();
|
||||
|
||||
// Make sure the about: page was loaded.
|
||||
verifyUrlBarTitle(mStringHelper.ABOUT_SCHEME);
|
||||
verifyUrlInContentDescription(mStringHelper.ABOUT_SCHEME);
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ public class testPictureLinkContextMenu extends ContentContextMenuTest {
|
||||
PICTURE_PAGE_URL=getAbsoluteUrl(mStringHelper.ROBOCOP_PICTURE_LINK_URL);
|
||||
BLANK_PAGE_URL=getAbsoluteUrl(mStringHelper.ROBOCOP_BLANK_PAGE_02_URL);
|
||||
loadAndPaint(PICTURE_PAGE_URL);
|
||||
verifyUrlBarTitle(PICTURE_PAGE_URL);
|
||||
verifyUrlInContentDescription(PICTURE_PAGE_URL);
|
||||
|
||||
switchTabs(imageTitle);
|
||||
verifyContextMenuItems(photoMenuItems);
|
||||
|
@ -189,9 +189,16 @@ var HistoryEntry = {
|
||||
PlacesUtils.history.removePagesFromHost(item.host, false);
|
||||
}
|
||||
else if ("begin" in item && "end" in item) {
|
||||
PlacesUtils.history.removeVisitsByTimeframe(
|
||||
usSinceEpoch + (item.begin * 60 * 60 * 1000 * 1000),
|
||||
usSinceEpoch + (item.end * 60 * 60 * 1000 * 1000));
|
||||
let cb = Async.makeSpinningCallback();
|
||||
let msSinceEpoch = parseInt(usSinceEpoch / 1000);
|
||||
let filter = {
|
||||
beginDate: new Date(msSinceEpoch + (item.begin * 60 * 60 * 1000)),
|
||||
endDate: new Date(msSinceEpoch + (item.end * 60 * 60 * 1000))
|
||||
};
|
||||
PlacesUtils.history.removeVisitsByFilter(filter)
|
||||
.catch(ex => Logger.AssertTrue(false, "An error occurred while deleting history: " + ex))
|
||||
.then(result => {cb(null, result)}, err => {cb(err)});
|
||||
Async.waitForSyncCallback(cb);
|
||||
}
|
||||
else {
|
||||
Logger.AssertTrue(false, "invalid entry in delete history");
|
||||
|
@ -29,12 +29,10 @@ if (json.substs.MOZ_BUILD_APP != 'mobile/android') {
|
||||
System.setProperty('android.home', json.substs.ANDROID_SDK_ROOT)
|
||||
|
||||
include ':app'
|
||||
include ':base'
|
||||
include ':omnijar'
|
||||
include ':thirdparty'
|
||||
|
||||
project(':app').projectDir = new File("${json.topsrcdir}/mobile/android/app")
|
||||
project(':base').projectDir = new File("${json.topsrcdir}/mobile/android/app/base")
|
||||
project(':omnijar').projectDir = new File("${json.topsrcdir}/mobile/android/app/omnijar")
|
||||
project(':thirdparty').projectDir = new File("${json.topsrcdir}/mobile/android/thirdparty")
|
||||
|
||||
|
@ -100,6 +100,7 @@ flags:
|
||||
- aries-dogfood
|
||||
- aries-noril
|
||||
- android-api-15
|
||||
- android-api-15-frontend
|
||||
- android-partner-sample1
|
||||
- android-b2gdroid
|
||||
- linux
|
||||
|
@ -162,6 +162,12 @@ builds:
|
||||
types:
|
||||
opt:
|
||||
task: tasks/builds/opt_macosx64_st-an.yml
|
||||
android-api-15-frontend:
|
||||
platforms:
|
||||
- Android
|
||||
types:
|
||||
opt:
|
||||
task: tasks/builds/android_api_15_frontend.yml
|
||||
android-b2gdroid:
|
||||
platforms:
|
||||
- Android
|
||||
|
@ -167,21 +167,12 @@
|
||||
// No unnecessary comparisons
|
||||
"no-self-compare": 2,
|
||||
|
||||
// No declaring variables from an outer scope
|
||||
"no-shadow": 1,
|
||||
|
||||
// No declaring variables that hide things like arguments
|
||||
"no-shadow-restricted-names": 2,
|
||||
|
||||
// No spaces between function name and parentheses
|
||||
"no-spaced-func": 1,
|
||||
|
||||
// No trailing whitespace
|
||||
"no-trailing-spaces": 2,
|
||||
|
||||
// No using undeclared variables
|
||||
"no-undef": 2,
|
||||
|
||||
// Error on newline where a semicolon is needed
|
||||
"no-unexpected-multiline": 2,
|
||||
|
||||
@ -203,9 +194,6 @@
|
||||
// Always require semicolon at end of statement
|
||||
"semi": [2, "always"],
|
||||
|
||||
// Require space after keywords
|
||||
"space-after-keywords": 2,
|
||||
|
||||
// Require space before blocks
|
||||
"space-before-blocks": 2,
|
||||
|
||||
@ -492,9 +480,6 @@
|
||||
// Disallow labels that share a name with a variable
|
||||
"no-label-var": 2,
|
||||
|
||||
// Disallow negation of the left operand of an in expression
|
||||
"no-negated-in-lhs": 2,
|
||||
|
||||
// Disallow creating new instances of String, Number, and Boolean
|
||||
"no-new-wrappers": 2,
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ class BaseContext {
|
||||
* function for the promise, and no promise is returned. In this case,
|
||||
* the callback is called when the promise resolves or rejects. In the
|
||||
* latter case, `lastError` is set to the rejection value, and the
|
||||
* callback funciton must check `browser.runtime.lastError` or
|
||||
* callback function must check `browser.runtime.lastError` or
|
||||
* `extension.runtime.lastError` in order to prevent it being reported
|
||||
* to the console.
|
||||
*
|
||||
|
@ -25,7 +25,7 @@ Object.defineProperty(this, "WebConsoleUtils", {
|
||||
});
|
||||
|
||||
const STRINGS_URI = "chrome://global/locale/security/security.properties";
|
||||
var l10n = new WebConsoleUtils.l10n(STRINGS_URI);
|
||||
var l10n = new WebConsoleUtils.L10n(STRINGS_URI);
|
||||
|
||||
this.InsecurePasswordUtils = {
|
||||
|
||||
|
@ -294,269 +294,6 @@ CreateRoot(nsCOMPtr<mozIStorageConnection>& aDBConn,
|
||||
|
||||
} // namespace
|
||||
|
||||
/**
|
||||
* An AsyncShutdown blocker in charge of shutting down places
|
||||
*/
|
||||
class DatabaseShutdown final:
|
||||
public nsIAsyncShutdownBlocker,
|
||||
public nsIAsyncShutdownCompletionCallback,
|
||||
public mozIStorageCompletionCallback
|
||||
{
|
||||
public:
|
||||
NS_DECL_THREADSAFE_ISUPPORTS
|
||||
NS_DECL_NSIASYNCSHUTDOWNBLOCKER
|
||||
NS_DECL_NSIASYNCSHUTDOWNCOMPLETIONCALLBACK
|
||||
NS_DECL_MOZISTORAGECOMPLETIONCALLBACK
|
||||
|
||||
explicit DatabaseShutdown(Database* aDatabase);
|
||||
|
||||
already_AddRefed<nsIAsyncShutdownClient> GetClient();
|
||||
|
||||
/**
|
||||
* `true` if we have not started shutdown, i.e. if
|
||||
* `BlockShutdown()` hasn't been called yet, false otherwise.
|
||||
*/
|
||||
static bool IsStarted() {
|
||||
return sIsStarted;
|
||||
}
|
||||
|
||||
private:
|
||||
nsCOMPtr<nsIAsyncShutdownBarrier> mBarrier;
|
||||
nsCOMPtr<nsIAsyncShutdownClient> mParentClient;
|
||||
|
||||
// The owning database.
|
||||
// The cycle is broken in method Complete(), once the connection
|
||||
// has been closed by mozStorage.
|
||||
RefPtr<Database> mDatabase;
|
||||
|
||||
// The current state, used both internally and for
|
||||
// forensics/debugging purposes.
|
||||
enum State {
|
||||
NOT_STARTED,
|
||||
|
||||
// Execution of `BlockShutdown` in progress
|
||||
// a. `BlockShutdown` is starting.
|
||||
RECEIVED_BLOCK_SHUTDOWN,
|
||||
// b. `BlockShutdown` is complete, waiting for clients.
|
||||
CALLED_WAIT_CLIENTS,
|
||||
|
||||
// Execution of `Done` in progress
|
||||
// a. `Done` is starting.
|
||||
RECEIVED_DONE,
|
||||
// b. We have notified observers that Places will close connection.
|
||||
NOTIFIED_OBSERVERS_PLACES_WILL_CLOSE_CONNECTION,
|
||||
// c. Execution of `Done` is complete, waiting for mozStorage shutdown.
|
||||
CALLED_STORAGESHUTDOWN,
|
||||
|
||||
// Execution of `Complete` in progress
|
||||
// a. `Complete` is starting.
|
||||
RECEIVED_STORAGESHUTDOWN_COMPLETE,
|
||||
// b. We have notified observers that Places as closed connection.
|
||||
NOTIFIED_OBSERVERS_PLACES_CONNECTION_CLOSED,
|
||||
};
|
||||
State mState;
|
||||
|
||||
// As tests may resurrect a dead `Database`, we use a counter to
|
||||
// give the instances of `DatabaseShutdown` unique names.
|
||||
uint16_t mCounter;
|
||||
static uint16_t sCounter;
|
||||
|
||||
static Atomic<bool> sIsStarted;
|
||||
|
||||
~DatabaseShutdown() {}
|
||||
};
|
||||
uint16_t DatabaseShutdown::sCounter = 0;
|
||||
Atomic<bool> DatabaseShutdown::sIsStarted(false);
|
||||
|
||||
DatabaseShutdown::DatabaseShutdown(Database* aDatabase)
|
||||
: mDatabase(aDatabase)
|
||||
, mState(NOT_STARTED)
|
||||
, mCounter(sCounter++)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
nsCOMPtr<nsIAsyncShutdownService> asyncShutdownSvc = services::GetAsyncShutdown();
|
||||
MOZ_ASSERT(asyncShutdownSvc);
|
||||
|
||||
if (asyncShutdownSvc) {
|
||||
DebugOnly<nsresult> rv = asyncShutdownSvc->MakeBarrier(
|
||||
NS_LITERAL_STRING("Places Database shutdown"),
|
||||
getter_AddRefs(mBarrier)
|
||||
);
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
}
|
||||
}
|
||||
|
||||
already_AddRefed<nsIAsyncShutdownClient>
|
||||
DatabaseShutdown::GetClient()
|
||||
{
|
||||
nsCOMPtr<nsIAsyncShutdownClient> client;
|
||||
if (mBarrier) {
|
||||
DebugOnly<nsresult> rv = mBarrier->GetClient(getter_AddRefs(client));
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
}
|
||||
return client.forget();
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownBlocker::GetName
|
||||
NS_IMETHODIMP
|
||||
DatabaseShutdown::GetName(nsAString& aName)
|
||||
{
|
||||
if (mCounter > 0) {
|
||||
// During tests, we can end up with the Database singleton being resurrected.
|
||||
// Make sure that each instance of DatabaseShutdown has a unique name.
|
||||
nsPrintfCString name("Places DatabaseShutdown: Blocking profile-before-change (%x)", this);
|
||||
aName = NS_ConvertUTF8toUTF16(name);
|
||||
} else {
|
||||
aName = NS_LITERAL_STRING("Places DatabaseShutdown: Blocking profile-before-change");
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownBlocker::GetState
|
||||
NS_IMETHODIMP DatabaseShutdown::GetState(nsIPropertyBag** aState)
|
||||
{
|
||||
nsresult rv;
|
||||
nsCOMPtr<nsIWritablePropertyBag2> bag =
|
||||
do_CreateInstance("@mozilla.org/hash-property-bag;1", &rv);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
// Put `mState` in field `progress`
|
||||
RefPtr<nsVariant> progress = new nsVariant();
|
||||
|
||||
rv = progress->SetAsUint8(mState);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
rv = bag->SetPropertyAsInterface(NS_LITERAL_STRING("progress"), progress);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
// Put `mBarrier`'s state in field `barrier`, if possible
|
||||
if (!mBarrier) {
|
||||
return NS_OK;
|
||||
}
|
||||
nsCOMPtr<nsIPropertyBag> barrierState;
|
||||
rv = mBarrier->GetState(getter_AddRefs(barrierState));
|
||||
if (NS_FAILED(rv)) {
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
RefPtr<nsVariant> barrier = new nsVariant();
|
||||
|
||||
rv = barrier->SetAsInterface(NS_GET_IID(nsIPropertyBag), barrierState);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
rv = bag->SetPropertyAsInterface(NS_LITERAL_STRING("Barrier"), barrier);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
||||
// nsIAsyncShutdownBlocker::BlockShutdown
|
||||
//
|
||||
// Step 1 in shutdown, called during profile-before-change.
|
||||
// As a `nsIAsyncShutdownBarrier`, we now need to wait until all clients
|
||||
// of `this` barrier have completed their own shutdown.
|
||||
//
|
||||
// See `Done()` for step 2.
|
||||
NS_IMETHODIMP
|
||||
DatabaseShutdown::BlockShutdown(nsIAsyncShutdownClient* aParentClient)
|
||||
{
|
||||
mParentClient = aParentClient;
|
||||
mState = RECEIVED_BLOCK_SHUTDOWN;
|
||||
sIsStarted = true;
|
||||
|
||||
if (NS_WARN_IF(!mBarrier)) {
|
||||
return NS_ERROR_NOT_AVAILABLE;
|
||||
}
|
||||
|
||||
// Wait until all clients have removed their blockers, then proceed
|
||||
// with own shutdown.
|
||||
DebugOnly<nsresult> rv = mBarrier->Wait(this);
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
|
||||
mState = CALLED_WAIT_CLIENTS;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownCompletionCallback::Done
|
||||
//
|
||||
// Step 2 in shutdown, called once all clients have removed their blockers.
|
||||
// We may now check sanity, inform observers, and close the database handler.
|
||||
//
|
||||
// See `Complete()` for step 3.
|
||||
NS_IMETHODIMP
|
||||
DatabaseShutdown::Done()
|
||||
{
|
||||
mState = RECEIVED_DONE;
|
||||
|
||||
// Fire internal shutdown notifications.
|
||||
nsCOMPtr<nsIObserverService> os = services::GetObserverService();
|
||||
MOZ_ASSERT(os);
|
||||
if (os) {
|
||||
(void)os->NotifyObservers(nullptr, TOPIC_PLACES_WILL_CLOSE_CONNECTION, nullptr);
|
||||
}
|
||||
mState = NOTIFIED_OBSERVERS_PLACES_WILL_CLOSE_CONNECTION;
|
||||
|
||||
// At this stage, any use of this database is forbidden. Get rid of
|
||||
// `gDatabase`. Note, however, that the database could be
|
||||
// resurrected. This can happen in particular during tests.
|
||||
MOZ_ASSERT(Database::gDatabase == nullptr || Database::gDatabase == mDatabase);
|
||||
Database::gDatabase = nullptr;
|
||||
|
||||
mDatabase->Shutdown();
|
||||
mState = CALLED_STORAGESHUTDOWN;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
||||
// mozIStorageCompletionCallback::Complete
|
||||
//
|
||||
// Step 3 (and last step) of shutdown
|
||||
//
|
||||
// Called once the connection has been closed by mozStorage.
|
||||
// Inform observers of TOPIC_PLACES_CONNECTION_CLOSED.
|
||||
//
|
||||
NS_IMETHODIMP
|
||||
DatabaseShutdown::Complete(nsresult, nsISupports*)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
mState = RECEIVED_STORAGESHUTDOWN_COMPLETE;
|
||||
mDatabase = nullptr;
|
||||
|
||||
nsresult rv;
|
||||
if (mParentClient) {
|
||||
// mParentClient may be nullptr in tests
|
||||
rv = mParentClient->RemoveBlocker(this);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
}
|
||||
|
||||
nsCOMPtr<nsIObserverService> os = mozilla::services::GetObserverService();
|
||||
MOZ_ASSERT(os);
|
||||
if (os) {
|
||||
rv = os->NotifyObservers(nullptr,
|
||||
TOPIC_PLACES_CONNECTION_CLOSED,
|
||||
nullptr);
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
}
|
||||
mState = NOTIFIED_OBSERVERS_PLACES_CONNECTION_CLOSED;
|
||||
|
||||
if (NS_WARN_IF(!mBarrier)) {
|
||||
return NS_ERROR_NOT_AVAILABLE;
|
||||
}
|
||||
|
||||
NS_ReleaseOnMainThread(mBarrier.forget());
|
||||
NS_ReleaseOnMainThread(mParentClient.forget());
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
NS_IMPL_ISUPPORTS(
|
||||
DatabaseShutdown
|
||||
, nsIAsyncShutdownBlocker
|
||||
, nsIAsyncShutdownCompletionCallback
|
||||
, mozIStorageCompletionCallback
|
||||
)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
//// Database
|
||||
|
||||
@ -574,30 +311,18 @@ Database::Database()
|
||||
, mDBPageSize(0)
|
||||
, mDatabaseStatus(nsINavHistoryService::DATABASE_STATUS_OK)
|
||||
, mClosed(false)
|
||||
, mConnectionShutdown(new DatabaseShutdown(this))
|
||||
, mClientsShutdown(new ClientsShutdownBlocker())
|
||||
, mConnectionShutdown(new ConnectionShutdownBlocker(this))
|
||||
{
|
||||
MOZ_ASSERT(!XRE_IsContentProcess(),
|
||||
"Cannot instantiate Places in the content process");
|
||||
// Attempting to create two instances of the service?
|
||||
MOZ_ASSERT(!gDatabase);
|
||||
gDatabase = this;
|
||||
|
||||
// Prepare async shutdown
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase = GetShutdownPhase();
|
||||
MOZ_ASSERT(shutdownPhase);
|
||||
|
||||
if (shutdownPhase) {
|
||||
DebugOnly<nsresult> rv = shutdownPhase->AddBlocker(
|
||||
static_cast<nsIAsyncShutdownBlocker*>(mConnectionShutdown.get()),
|
||||
NS_LITERAL_STRING(__FILE__),
|
||||
__LINE__,
|
||||
NS_LITERAL_STRING(""));
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
}
|
||||
}
|
||||
|
||||
already_AddRefed<nsIAsyncShutdownClient>
|
||||
Database::GetShutdownPhase()
|
||||
Database::GetProfileChangeTeardownPhase()
|
||||
{
|
||||
nsCOMPtr<nsIAsyncShutdownService> asyncShutdownSvc = services::GetAsyncShutdown();
|
||||
MOZ_ASSERT(asyncShutdownSvc);
|
||||
@ -605,6 +330,24 @@ Database::GetShutdownPhase()
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Consumers of Places should shutdown before us, at profile-change-teardown.
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase;
|
||||
DebugOnly<nsresult> rv = asyncShutdownSvc->
|
||||
GetProfileChangeTeardown(getter_AddRefs(shutdownPhase));
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
return shutdownPhase.forget();
|
||||
}
|
||||
|
||||
already_AddRefed<nsIAsyncShutdownClient>
|
||||
Database::GetProfileBeforeChangePhase()
|
||||
{
|
||||
nsCOMPtr<nsIAsyncShutdownService> asyncShutdownSvc = services::GetAsyncShutdown();
|
||||
MOZ_ASSERT(asyncShutdownSvc);
|
||||
if (NS_WARN_IF(!asyncShutdownSvc)) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Consumers of Places should shutdown before us, at profile-change-teardown.
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase;
|
||||
DebugOnly<nsresult> rv = asyncShutdownSvc->
|
||||
GetProfileBeforeChange(getter_AddRefs(shutdownPhase));
|
||||
@ -649,19 +392,19 @@ Database::GetStatement(const nsACString& aQuery) const
|
||||
}
|
||||
|
||||
already_AddRefed<nsIAsyncShutdownClient>
|
||||
Database::GetConnectionShutdown()
|
||||
Database::GetClientsShutdown()
|
||||
{
|
||||
MOZ_ASSERT(mConnectionShutdown);
|
||||
|
||||
return mConnectionShutdown->GetClient();
|
||||
MOZ_ASSERT(mClientsShutdown);
|
||||
return mClientsShutdown->GetClient();
|
||||
}
|
||||
|
||||
// static
|
||||
already_AddRefed<Database>
|
||||
Database::GetDatabase()
|
||||
{
|
||||
if (DatabaseShutdown::IsStarted())
|
||||
if (PlacesShutdownBlocker::IsStarted()) {
|
||||
return nullptr;
|
||||
}
|
||||
return GetSingleton();
|
||||
}
|
||||
|
||||
@ -732,6 +475,36 @@ Database::Init()
|
||||
rv = NS_DispatchToMainThread(completeEvent);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
// At this point we know the Database object points to a valid connection
|
||||
// and we need to setup async shutdown.
|
||||
{
|
||||
// First of all Places clients should block profile-change-teardown.
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase = GetProfileChangeTeardownPhase();
|
||||
MOZ_ASSERT(shutdownPhase);
|
||||
if (shutdownPhase) {
|
||||
DebugOnly<nsresult> rv = shutdownPhase->AddBlocker(
|
||||
static_cast<nsIAsyncShutdownBlocker*>(mClientsShutdown.get()),
|
||||
NS_LITERAL_STRING(__FILE__),
|
||||
__LINE__,
|
||||
NS_LITERAL_STRING(""));
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// Then connection closing should block profile-before-change.
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase = GetProfileBeforeChangePhase();
|
||||
MOZ_ASSERT(shutdownPhase);
|
||||
if (shutdownPhase) {
|
||||
DebugOnly<nsresult> rv = shutdownPhase->AddBlocker(
|
||||
static_cast<nsIAsyncShutdownBlocker*>(mConnectionShutdown.get()),
|
||||
NS_LITERAL_STRING(__FILE__),
|
||||
__LINE__,
|
||||
NS_LITERAL_STRING(""));
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
}
|
||||
}
|
||||
|
||||
// Finally observe profile shutdown notifications.
|
||||
nsCOMPtr<nsIObserverService> os = mozilla::services::GetObserverService();
|
||||
if (os) {
|
||||
@ -1859,19 +1632,18 @@ Database::MigrateV30Up() {
|
||||
void
|
||||
Database::Shutdown()
|
||||
{
|
||||
|
||||
// As the last step in the shutdown path, finalize the database handle.
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
MOZ_ASSERT(!mClosed);
|
||||
|
||||
// Break cycle
|
||||
nsCOMPtr<mozIStorageCompletionCallback> closeListener = mConnectionShutdown.forget();
|
||||
// Break cycles with the shutdown blockers.
|
||||
mClientsShutdown = nullptr;
|
||||
nsCOMPtr<mozIStorageCompletionCallback> connectionShutdown = mConnectionShutdown.forget();
|
||||
|
||||
if (!mMainConn) {
|
||||
// The connection has never been initialized. Just mark it
|
||||
// as closed.
|
||||
// The connection has never been initialized. Just mark it as closed.
|
||||
mClosed = true;
|
||||
(void)closeListener->Complete(NS_OK, nullptr);
|
||||
(void)connectionShutdown->Complete(NS_OK, nullptr);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1930,7 +1702,7 @@ Database::Shutdown()
|
||||
|
||||
mClosed = true;
|
||||
|
||||
(void)mMainConn->AsyncClose(closeListener);
|
||||
(void)mMainConn->AsyncClose(connectionShutdown);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
@ -1942,8 +1714,7 @@ Database::Observe(nsISupports *aSubject,
|
||||
const char16_t *aData)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
if (strcmp(aTopic, TOPIC_PROFILE_CHANGE_TEARDOWN) == 0 ||
|
||||
strcmp(aTopic, TOPIC_SIMULATE_PLACES_MUST_CLOSE_1) == 0) {
|
||||
if (strcmp(aTopic, TOPIC_PROFILE_CHANGE_TEARDOWN) == 0) {
|
||||
// Tests simulating shutdown may cause multiple notifications.
|
||||
if (IsShutdownStarted()) {
|
||||
return NS_OK;
|
||||
@ -1972,20 +1743,40 @@ Database::Observe(nsISupports *aSubject,
|
||||
|
||||
// Notify all Places users that we are about to shutdown.
|
||||
(void)os->NotifyObservers(nullptr, TOPIC_PLACES_SHUTDOWN, nullptr);
|
||||
} else if (strcmp(aTopic, TOPIC_SIMULATE_PLACES_MUST_CLOSE_2) == 0) {
|
||||
} else if (strcmp(aTopic, TOPIC_SIMULATE_PLACES_SHUTDOWN) == 0) {
|
||||
// This notification is (and must be) only used by tests that are trying
|
||||
// to simulate Places shutdown out of the normal shutdown path.
|
||||
|
||||
// Tests simulating shutdown may cause re-entrance.
|
||||
if (IsShutdownStarted()) {
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// Since we are going through shutdown of Database,
|
||||
// we don't need to block actual shutdown anymore.
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase = GetShutdownPhase();
|
||||
if (shutdownPhase) {
|
||||
shutdownPhase->RemoveBlocker(mConnectionShutdown.get());
|
||||
// We are simulating a shutdown, so invoke the shutdown blockers,
|
||||
// wait for them, then proceed with connection shutdown.
|
||||
// Since we are already going through shutdown, but it's not the real one,
|
||||
// we won't need to block the real one anymore, so we can unblock it.
|
||||
{
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase = GetProfileChangeTeardownPhase();
|
||||
if (shutdownPhase) {
|
||||
shutdownPhase->RemoveBlocker(mClientsShutdown.get());
|
||||
}
|
||||
(void)mClientsShutdown->BlockShutdown(nullptr);
|
||||
}
|
||||
|
||||
return mConnectionShutdown->BlockShutdown(nullptr);
|
||||
// Spin the events loop until the clients are done.
|
||||
// Note, this is just for tests, specifically test_clearHistory_shutdown.js
|
||||
while (mClientsShutdown->State() != PlacesShutdownBlocker::States::RECEIVED_DONE) {
|
||||
(void)NS_ProcessNextEvent();
|
||||
}
|
||||
|
||||
{
|
||||
nsCOMPtr<nsIAsyncShutdownClient> shutdownPhase = GetProfileBeforeChangePhase();
|
||||
if (shutdownPhase) {
|
||||
shutdownPhase->RemoveBlocker(mConnectionShutdown.get());
|
||||
}
|
||||
(void)mConnectionShutdown->BlockShutdown(nullptr);
|
||||
}
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
@ -14,6 +14,7 @@
|
||||
#include "mozilla/storage/StatementCache.h"
|
||||
#include "mozilla/Attributes.h"
|
||||
#include "nsIEventTarget.h"
|
||||
#include "Shutdown.h"
|
||||
|
||||
// This is the schema version. Update it at any schema change and add a
|
||||
// corresponding migrateVxx method below.
|
||||
@ -41,11 +42,7 @@
|
||||
|
||||
// Simulate profile-before-change. This topic may only be used by
|
||||
// calling `observe` directly on the database. Used for testing only.
|
||||
#define TOPIC_SIMULATE_PLACES_MUST_CLOSE_1 "test-simulate-places-shutdown-phase-1"
|
||||
|
||||
// Simulate profile-before-change. This topic may only be used by
|
||||
// calling `observe` directly on the database. Used for testing only.
|
||||
#define TOPIC_SIMULATE_PLACES_MUST_CLOSE_2 "test-simulate-places-shutdown-phase-2"
|
||||
#define TOPIC_SIMULATE_PLACES_SHUTDOWN "test-simulate-places-shutdown"
|
||||
|
||||
class nsIRunnable;
|
||||
|
||||
@ -64,7 +61,8 @@ enum JournalMode {
|
||||
, JOURNAL_WAL
|
||||
};
|
||||
|
||||
class DatabaseShutdown;
|
||||
class ClientsShutdownBlocker;
|
||||
class ConnectionShutdownBlocker;
|
||||
|
||||
class Database final : public nsIObserver
|
||||
, public nsSupportsWeakReference
|
||||
@ -87,7 +85,7 @@ public:
|
||||
/**
|
||||
* The AsyncShutdown client used by clients of this API to be informed of shutdown.
|
||||
*/
|
||||
already_AddRefed<nsIAsyncShutdownClient> GetConnectionShutdown();
|
||||
already_AddRefed<nsIAsyncShutdownClient> GetClientsShutdown();
|
||||
|
||||
/**
|
||||
* Getter to use when instantiating the class.
|
||||
@ -269,7 +267,7 @@ protected:
|
||||
|
||||
nsresult UpdateBookmarkRootTitles();
|
||||
|
||||
friend class DatabaseShutdown;
|
||||
friend class ConnectionShutdownBlocker;
|
||||
|
||||
private:
|
||||
~Database();
|
||||
@ -292,19 +290,21 @@ private:
|
||||
bool mClosed;
|
||||
|
||||
/**
|
||||
* Determine at which shutdown phase we need to start shutting down
|
||||
* the Database.
|
||||
* Phases for shutting down the Database.
|
||||
* See Shutdown.h for further details about the shutdown procedure.
|
||||
*/
|
||||
already_AddRefed<nsIAsyncShutdownClient> GetShutdownPhase();
|
||||
already_AddRefed<nsIAsyncShutdownClient> GetProfileChangeTeardownPhase();
|
||||
already_AddRefed<nsIAsyncShutdownClient> GetProfileBeforeChangePhase();
|
||||
|
||||
/**
|
||||
* A companion object in charge of shutting down the mozStorage
|
||||
* connection once all clients have disconnected.
|
||||
* Blockers in charge of waiting for the Places clients and then shutting
|
||||
* down the mozStorage connection.
|
||||
* See Shutdown.h for further details about the shutdown procedure.
|
||||
*
|
||||
* Cycles between `this` and `mConnectionShutdown` are broken
|
||||
* in `Shutdown()`.
|
||||
* Cycles with these are broken in `Shutdown()`.
|
||||
*/
|
||||
RefPtr<DatabaseShutdown> mConnectionShutdown;
|
||||
RefPtr<ClientsShutdownBlocker> mClientsShutdown;
|
||||
RefPtr<ConnectionShutdownBlocker> mConnectionShutdown;
|
||||
};
|
||||
|
||||
} // namespace places
|
||||
|
229
toolkit/components/places/Shutdown.cpp
Normal file
229
toolkit/components/places/Shutdown.cpp
Normal file
@ -0,0 +1,229 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#include "Shutdown.h"
|
||||
#include "mozilla/unused.h"
|
||||
|
||||
namespace mozilla {
|
||||
namespace places {
|
||||
|
||||
uint16_t PlacesShutdownBlocker::sCounter = 0;
|
||||
Atomic<bool> PlacesShutdownBlocker::sIsStarted(false);
|
||||
|
||||
PlacesShutdownBlocker::PlacesShutdownBlocker(const nsString& aName)
|
||||
: mName(aName)
|
||||
, mState(NOT_STARTED)
|
||||
, mCounter(sCounter++)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
// During tests, we can end up with the Database singleton being resurrected.
|
||||
// Make sure that each instance of DatabaseShutdown has a unique name.
|
||||
if (mCounter > 1) {
|
||||
mName.AppendInt(mCounter);
|
||||
}
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownBlocker
|
||||
NS_IMETHODIMP
|
||||
PlacesShutdownBlocker::GetName(nsAString& aName)
|
||||
{
|
||||
aName = mName;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownBlocker
|
||||
NS_IMETHODIMP
|
||||
PlacesShutdownBlocker::GetState(nsIPropertyBag** aState)
|
||||
{
|
||||
nsresult rv;
|
||||
nsCOMPtr<nsIWritablePropertyBag2> bag =
|
||||
do_CreateInstance("@mozilla.org/hash-property-bag;1", &rv);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
// Put `mState` in field `progress`
|
||||
RefPtr<nsVariant> progress = new nsVariant();
|
||||
rv = progress->SetAsUint8(mState);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
rv = bag->SetPropertyAsInterface(NS_LITERAL_STRING("progress"), progress);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
// Put `mBarrier`'s state in field `barrier`, if possible
|
||||
if (!mBarrier) {
|
||||
return NS_OK;
|
||||
}
|
||||
nsCOMPtr<nsIPropertyBag> barrierState;
|
||||
rv = mBarrier->GetState(getter_AddRefs(barrierState));
|
||||
if (NS_FAILED(rv)) {
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
RefPtr<nsVariant> barrier = new nsVariant();
|
||||
rv = barrier->SetAsInterface(NS_GET_IID(nsIPropertyBag), barrierState);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
rv = bag->SetPropertyAsInterface(NS_LITERAL_STRING("Barrier"), barrier);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownBlocker
|
||||
NS_IMETHODIMP
|
||||
PlacesShutdownBlocker::BlockShutdown(nsIAsyncShutdownClient* aParentClient)
|
||||
{
|
||||
MOZ_ASSERT(false, "should always be overridden");
|
||||
return NS_ERROR_NOT_IMPLEMENTED;
|
||||
}
|
||||
|
||||
NS_IMPL_ISUPPORTS(
|
||||
PlacesShutdownBlocker,
|
||||
nsIAsyncShutdownBlocker
|
||||
)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
ClientsShutdownBlocker::ClientsShutdownBlocker()
|
||||
: PlacesShutdownBlocker(NS_LITERAL_STRING("Places Clients shutdown"))
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
// Create a barrier that will be exposed to clients through GetClient(), so
|
||||
// they can block Places shutdown.
|
||||
nsCOMPtr<nsIAsyncShutdownService> asyncShutdown = services::GetAsyncShutdown();
|
||||
MOZ_ASSERT(asyncShutdown);
|
||||
if (asyncShutdown) {
|
||||
nsCOMPtr<nsIAsyncShutdownBarrier> barrier;
|
||||
MOZ_ALWAYS_TRUE(NS_SUCCEEDED(asyncShutdown->MakeBarrier(mName, getter_AddRefs(barrier))));
|
||||
mBarrier = new nsMainThreadPtrHolder<nsIAsyncShutdownBarrier>(barrier);
|
||||
}
|
||||
}
|
||||
|
||||
already_AddRefed<nsIAsyncShutdownClient>
|
||||
ClientsShutdownBlocker::GetClient()
|
||||
{
|
||||
nsCOMPtr<nsIAsyncShutdownClient> client;
|
||||
if (mBarrier) {
|
||||
MOZ_ALWAYS_TRUE(NS_SUCCEEDED(mBarrier->GetClient(getter_AddRefs(client))));
|
||||
}
|
||||
return client.forget();
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownBlocker
|
||||
NS_IMETHODIMP
|
||||
ClientsShutdownBlocker::BlockShutdown(nsIAsyncShutdownClient* aParentClient)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
mParentClient = new nsMainThreadPtrHolder<nsIAsyncShutdownClient>(aParentClient);
|
||||
mState = RECEIVED_BLOCK_SHUTDOWN;
|
||||
|
||||
if (NS_WARN_IF(!mBarrier)) {
|
||||
return NS_ERROR_NOT_AVAILABLE;
|
||||
}
|
||||
|
||||
// Wait until all the clients have removed their blockers.
|
||||
MOZ_ALWAYS_TRUE(NS_SUCCEEDED(mBarrier->Wait(this)));
|
||||
|
||||
mState = CALLED_WAIT_CLIENTS;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownCompletionCallback
|
||||
NS_IMETHODIMP
|
||||
ClientsShutdownBlocker::Done()
|
||||
{
|
||||
// At this point all the clients are done, we can stop blocking the shutdown
|
||||
// phase.
|
||||
mState = RECEIVED_DONE;
|
||||
|
||||
// mParentClient is nullptr in tests.
|
||||
if (mParentClient) {
|
||||
nsresult rv = mParentClient->RemoveBlocker(this);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
mParentClient = nullptr;
|
||||
}
|
||||
mBarrier = nullptr;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
NS_IMPL_ISUPPORTS_INHERITED(
|
||||
ClientsShutdownBlocker,
|
||||
PlacesShutdownBlocker,
|
||||
nsIAsyncShutdownCompletionCallback
|
||||
)
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
ConnectionShutdownBlocker::ConnectionShutdownBlocker(Database* aDatabase)
|
||||
: PlacesShutdownBlocker(NS_LITERAL_STRING("Places Clients shutdown"))
|
||||
, mDatabase(aDatabase)
|
||||
{
|
||||
// Do nothing.
|
||||
}
|
||||
|
||||
// nsIAsyncShutdownBlocker
|
||||
NS_IMETHODIMP
|
||||
ConnectionShutdownBlocker::BlockShutdown(nsIAsyncShutdownClient* aParentClient)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
mParentClient = new nsMainThreadPtrHolder<nsIAsyncShutdownClient>(aParentClient);
|
||||
mState = RECEIVED_BLOCK_SHUTDOWN;
|
||||
// Annotate that Database shutdown started.
|
||||
sIsStarted = true;
|
||||
|
||||
// Fire internal database closing notification.
|
||||
nsCOMPtr<nsIObserverService> os = services::GetObserverService();
|
||||
MOZ_ASSERT(os);
|
||||
if (os) {
|
||||
Unused << os->NotifyObservers(nullptr, TOPIC_PLACES_WILL_CLOSE_CONNECTION, nullptr);
|
||||
}
|
||||
mState = NOTIFIED_OBSERVERS_PLACES_WILL_CLOSE_CONNECTION;
|
||||
|
||||
// At this stage, any use of this database is forbidden. Get rid of
|
||||
// `gDatabase`. Note, however, that the database could be
|
||||
// resurrected. This can happen in particular during tests.
|
||||
MOZ_ASSERT(Database::gDatabase == nullptr || Database::gDatabase == mDatabase);
|
||||
Database::gDatabase = nullptr;
|
||||
|
||||
// Database::Shutdown will invoke Complete once the connection is closed.
|
||||
mDatabase->Shutdown();
|
||||
mState = CALLED_STORAGESHUTDOWN;
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// mozIStorageCompletionCallback
|
||||
NS_IMETHODIMP
|
||||
ConnectionShutdownBlocker::Complete(nsresult, nsISupports*)
|
||||
{
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
mState = RECEIVED_STORAGESHUTDOWN_COMPLETE;
|
||||
|
||||
// The connection is closed, the Database has no more use, so we can break
|
||||
// possible cycles.
|
||||
mDatabase = nullptr;
|
||||
|
||||
// Notify the connection has gone.
|
||||
nsCOMPtr<nsIObserverService> os = mozilla::services::GetObserverService();
|
||||
MOZ_ASSERT(os);
|
||||
if (os) {
|
||||
MOZ_ALWAYS_TRUE(NS_SUCCEEDED(os->NotifyObservers(nullptr,
|
||||
TOPIC_PLACES_CONNECTION_CLOSED,
|
||||
nullptr)));
|
||||
}
|
||||
mState = NOTIFIED_OBSERVERS_PLACES_CONNECTION_CLOSED;
|
||||
|
||||
// mParentClient is nullptr in tests
|
||||
if (mParentClient) {
|
||||
nsresult rv = mParentClient->RemoveBlocker(this);
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) return rv;
|
||||
mParentClient = nullptr;
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
NS_IMPL_ISUPPORTS_INHERITED(
|
||||
ConnectionShutdownBlocker,
|
||||
PlacesShutdownBlocker,
|
||||
mozIStorageCompletionCallback
|
||||
)
|
||||
|
||||
} // namespace places
|
||||
} // namespace mozilla
|
171
toolkit/components/places/Shutdown.h
Normal file
171
toolkit/components/places/Shutdown.h
Normal file
@ -0,0 +1,171 @@
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#ifndef mozilla_places_Shutdown_h_
|
||||
#define mozilla_places_Shutdown_h_
|
||||
|
||||
#include "nsIAsyncShutdown.h"
|
||||
#include "Database.h"
|
||||
#include "nsProxyRelease.h"
|
||||
|
||||
namespace mozilla {
|
||||
namespace places {
|
||||
|
||||
class Database;
|
||||
|
||||
/**
|
||||
* This is most of the code responsible for Places shutdown.
|
||||
*
|
||||
* PHASE 1 (Legacy clients shutdown)
|
||||
* The shutdown procedure begins when the Database singleton receives
|
||||
* profile-change-teardown (note that tests will instead notify nsNavHistory,
|
||||
* that forwards the notification to the Database instance).
|
||||
* Database::Observe first of all checks if initialization was completed
|
||||
* properly, to avoid race conditions, then it notifies "places-shutdown" to
|
||||
* legacy clients. Legacy clients are supposed to start and complete any
|
||||
* shutdown critical work in the same tick, since we won't wait for them.
|
||||
|
||||
* PHASE 2 (Modern clients shutdown)
|
||||
* Modern clients should instead register as a blocker by passing a promise to
|
||||
* nsPIPlacesDatabase::shutdownClient (for example see sanitize.js), so they
|
||||
* block Places shutdown until the promise is resolved.
|
||||
* When profile-change-teardown is observed by async shutdown, it calls
|
||||
* ClientsShutdownBlocker::BlockShutdown. This class is registered as a teardown
|
||||
* phase blocker in Database::Init (see Database::mClientsShutdown).
|
||||
* ClientsShutdownBlocker::BlockShudown waits for all the clients registered
|
||||
* through nsPIPlacesDatabase::shutdownClient. When all the clients are done,
|
||||
* its `Done` method is invoked, and it stops blocking the shutdown phase, so
|
||||
* that it can continue.
|
||||
*
|
||||
* PHASE 3 (Connection shutdown)
|
||||
* ConnectionBlocker is registered as a profile-before-change blocker in
|
||||
* Database::Init (see Database::mConnectionShutdown).
|
||||
* When profile-before-change is observer by async shutdown, it calls
|
||||
* ConnectionShutdownBlocker::BlockShutdown.
|
||||
* This is the last chance for any Places internal work, like privacy cleanups,
|
||||
* before the connection is closed. This a places-will-close-connection
|
||||
* notification is sent to legacy clients that must complete any operation in
|
||||
* the same tick, since we won't wait for them.
|
||||
* Then the control is passed to Database::Shutdown, that executes some sanity
|
||||
* checks, clears cached statements and proceeds with asyncClose.
|
||||
* Once the connection is definitely closed, Database will call back
|
||||
* ConnectionBlocker::Complete. At this point a final
|
||||
* places-connection-closed notification is sent, for testing purposes.
|
||||
*/
|
||||
|
||||
/**
|
||||
* A base AsyncShutdown blocker in charge of shutting down Places.
|
||||
*/
|
||||
class PlacesShutdownBlocker : public nsIAsyncShutdownBlocker
|
||||
{
|
||||
public:
|
||||
NS_DECL_THREADSAFE_ISUPPORTS
|
||||
NS_DECL_NSIASYNCSHUTDOWNBLOCKER
|
||||
|
||||
explicit PlacesShutdownBlocker(const nsString& aName);
|
||||
|
||||
/**
|
||||
* `true` if we have not started shutdown, i.e. if
|
||||
* `BlockShutdown()` hasn't been called yet, false otherwise.
|
||||
*/
|
||||
static bool IsStarted() {
|
||||
return sIsStarted;
|
||||
}
|
||||
|
||||
// The current state, used internally and for forensics/debugging purposes.
|
||||
// Not all the states make sense for all the derived classes.
|
||||
enum States {
|
||||
NOT_STARTED,
|
||||
// Execution of `BlockShutdown` in progress.
|
||||
RECEIVED_BLOCK_SHUTDOWN,
|
||||
|
||||
// Values specific to ClientsShutdownBlocker
|
||||
// a. Set while we are waiting for clients to do their job and unblock us.
|
||||
CALLED_WAIT_CLIENTS,
|
||||
// b. Set when all the clients are done.
|
||||
RECEIVED_DONE,
|
||||
|
||||
// Values specific to ConnectionShutdownBlocker
|
||||
// a. Set after we notified observers that Places is closing the connection.
|
||||
NOTIFIED_OBSERVERS_PLACES_WILL_CLOSE_CONNECTION,
|
||||
// b. Set after we pass control to Database::Shutdown, and wait for it to
|
||||
// close the connection and call our `Complete` method when done.
|
||||
CALLED_STORAGESHUTDOWN,
|
||||
// c. Set when Database has closed the connection and passed control to
|
||||
// us through `Complete`.
|
||||
RECEIVED_STORAGESHUTDOWN_COMPLETE,
|
||||
// d. We have notified observers that Places has closed the connection.
|
||||
NOTIFIED_OBSERVERS_PLACES_CONNECTION_CLOSED,
|
||||
};
|
||||
States State() {
|
||||
return mState;
|
||||
}
|
||||
|
||||
protected:
|
||||
// The blocker name, also used as barrier name.
|
||||
nsString mName;
|
||||
// The current state, see States.
|
||||
States mState;
|
||||
// The barrier optionally used to wait for clients.
|
||||
nsMainThreadPtrHandle<nsIAsyncShutdownBarrier> mBarrier;
|
||||
// The parent object who registered this as a blocker.
|
||||
nsMainThreadPtrHandle<nsIAsyncShutdownClient> mParentClient;
|
||||
|
||||
// As tests may resurrect a dead `Database`, we use a counter to
|
||||
// give the instances of `PlacesShutdownBlocker` unique names.
|
||||
uint16_t mCounter;
|
||||
static uint16_t sCounter;
|
||||
|
||||
static Atomic<bool> sIsStarted;
|
||||
|
||||
virtual ~PlacesShutdownBlocker() {}
|
||||
};
|
||||
|
||||
/**
|
||||
* Blocker also used to wait for clients, through an owned barrier.
|
||||
*/
|
||||
class ClientsShutdownBlocker final : public PlacesShutdownBlocker
|
||||
, public nsIAsyncShutdownCompletionCallback
|
||||
{
|
||||
public:
|
||||
NS_DECL_ISUPPORTS_INHERITED
|
||||
NS_DECL_NSIASYNCSHUTDOWNCOMPLETIONCALLBACK
|
||||
|
||||
explicit ClientsShutdownBlocker();
|
||||
|
||||
NS_IMETHOD BlockShutdown(nsIAsyncShutdownClient* aParentClient) override;
|
||||
|
||||
already_AddRefed<nsIAsyncShutdownClient> GetClient();
|
||||
|
||||
private:
|
||||
~ClientsShutdownBlocker() {}
|
||||
};
|
||||
|
||||
/**
|
||||
* Blocker used to wait when closing the database connection.
|
||||
*/
|
||||
class ConnectionShutdownBlocker final : public PlacesShutdownBlocker
|
||||
, public mozIStorageCompletionCallback
|
||||
{
|
||||
public:
|
||||
NS_DECL_ISUPPORTS_INHERITED
|
||||
NS_DECL_MOZISTORAGECOMPLETIONCALLBACK
|
||||
|
||||
NS_IMETHOD BlockShutdown(nsIAsyncShutdownClient* aParentClient) override;
|
||||
|
||||
explicit ConnectionShutdownBlocker(mozilla::places::Database* aDatabase);
|
||||
|
||||
private:
|
||||
~ConnectionShutdownBlocker() {}
|
||||
|
||||
// The owning database.
|
||||
// The cycle is broken in method Complete(), once the connection
|
||||
// has been closed by mozStorage.
|
||||
RefPtr<mozilla::places::Database> mDatabase;
|
||||
};
|
||||
|
||||
} // namespace places
|
||||
} // namespace mozilla
|
||||
|
||||
#endif // mozilla_places_Shutdown_h_
|
@ -47,6 +47,7 @@ if CONFIG['MOZ_PLACES']:
|
||||
'nsNavHistoryResult.cpp',
|
||||
'nsPlacesModule.cpp',
|
||||
'PlaceInfo.cpp',
|
||||
'Shutdown.cpp',
|
||||
'SQLFunctions.cpp',
|
||||
'VisitInfo.cpp',
|
||||
]
|
||||
|
@ -79,6 +79,8 @@ interface nsIBrowserHistory : nsISupports
|
||||
* Microseconds from epoch, representing the final time.
|
||||
*
|
||||
* @note The removal happens in a batch.
|
||||
*
|
||||
* @deprecated Please use PlacesUtils.history.removeVisitsByFilter instead
|
||||
*/
|
||||
void removeVisitsByTimeframe(in PRTime aBeginTime,
|
||||
in PRTime aEndTime);
|
||||
|
@ -2703,6 +2703,8 @@ nsNavHistory::RemovePagesByTimeframe(PRTime aBeginTime, PRTime aEndTime)
|
||||
NS_IMETHODIMP
|
||||
nsNavHistory::RemoveVisitsByTimeframe(PRTime aBeginTime, PRTime aEndTime)
|
||||
{
|
||||
PLACES_WARN_DEPRECATED();
|
||||
|
||||
NS_ASSERTION(NS_IsMainThread(), "This can only be called on the main thread");
|
||||
|
||||
nsresult rv;
|
||||
@ -2970,7 +2972,7 @@ NS_IMETHODIMP
|
||||
nsNavHistory::GetShutdownClient(nsIAsyncShutdownClient **_shutdownClient)
|
||||
{
|
||||
NS_ENSURE_ARG_POINTER(_shutdownClient);
|
||||
RefPtr<nsIAsyncShutdownClient> client = mDB->GetConnectionShutdown();
|
||||
RefPtr<nsIAsyncShutdownClient> client = mDB->GetClientsShutdown();
|
||||
MOZ_ASSERT(client);
|
||||
client.forget(_shutdownClient);
|
||||
|
||||
@ -3082,8 +3084,7 @@ nsNavHistory::Observe(nsISupports *aSubject, const char *aTopic,
|
||||
NS_ASSERTION(NS_IsMainThread(), "This can only be called on the main thread");
|
||||
if (strcmp(aTopic, TOPIC_PROFILE_TEARDOWN) == 0 ||
|
||||
strcmp(aTopic, TOPIC_PROFILE_CHANGE) == 0 ||
|
||||
strcmp(aTopic, TOPIC_SIMULATE_PLACES_MUST_CLOSE_1) == 0 ||
|
||||
strcmp(aTopic, TOPIC_SIMULATE_PLACES_MUST_CLOSE_2) == 0) {
|
||||
strcmp(aTopic, TOPIC_SIMULATE_PLACES_SHUTDOWN) == 0) {
|
||||
// These notifications are used by tests to simulate a Places shutdown.
|
||||
// They should just be forwarded to the Database handle.
|
||||
mDB->Observe(aSubject, aTopic, aData);
|
||||
|
@ -385,10 +385,10 @@ var shutdownPlaces = function() {
|
||||
Services.obs.addObserver(resolve, "places-connection-closed", false);
|
||||
});
|
||||
let hs = PlacesUtils.history.QueryInterface(Ci.nsIObserver);
|
||||
hs.observe(null, "test-simulate-places-shutdown-phase-1", null);
|
||||
do_print("shutdownPlaces: sent test-simulate-places-shutdown-phase-1");
|
||||
hs.observe(null, "test-simulate-places-shutdown-phase-2", null);
|
||||
do_print("shutdownPlaces: sent test-simulate-places-shutdown-phase-2");
|
||||
hs.observe(null, "profile-change-teardown", null);
|
||||
do_print("shutdownPlaces: sent profile-change-teardown");
|
||||
hs.observe(null, "test-simulate-places-shutdown", null);
|
||||
do_print("shutdownPlaces: sent test-simulate-places-shutdown");
|
||||
return promise.then(() => {
|
||||
do_print("shutdownPlaces: complete");
|
||||
});
|
||||
|
@ -1,10 +1,5 @@
|
||||
/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- */
|
||||
/* vim:set ts=2 sw=2 sts=2 et: */
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
const NOW = Date.now() * 1000;
|
||||
const JS_NOW = Date.now();
|
||||
const DB_NOW = JS_NOW * 1000;
|
||||
const TEST_URI = uri("http://example.com/");
|
||||
const PLACE_URI = uri("place:queryType=0&sort=8&maxResults=10");
|
||||
|
||||
@ -21,12 +16,16 @@ add_task(function* remove_visits_outside_unbookmarked_uri() {
|
||||
do_print("Add 10 visits for the URI from way in the past.");
|
||||
let visits = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
visits.push({ uri: TEST_URI, visitDate: NOW - 1000 - i });
|
||||
visits.push({ uri: TEST_URI, visitDate: DB_NOW - 100000 - (i * 1000) });
|
||||
}
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
|
||||
do_print("Remove visits using timerange outside the URI's visits.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 10, NOW);
|
||||
let filter = {
|
||||
beginDate: new Date(JS_NOW - 10),
|
||||
endDate: new Date(JS_NOW)
|
||||
};
|
||||
yield PlacesUtils.history.removeVisitsByFilter(filter);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should still exist in moz_places.");
|
||||
@ -42,7 +41,7 @@ add_task(function* remove_visits_outside_unbookmarked_uri() {
|
||||
do_check_eq(root.childCount, 10);
|
||||
for (let i = 0; i < root.childCount; i++) {
|
||||
let visitTime = root.getChild(i).time;
|
||||
do_check_eq(visitTime, NOW - 1000 - i);
|
||||
do_check_eq(visitTime, DB_NOW - 100000 - (i * 1000));
|
||||
}
|
||||
root.containerOpen = false;
|
||||
|
||||
@ -62,7 +61,7 @@ add_task(function* remove_visits_outside_bookmarked_uri() {
|
||||
do_print("Add 10 visits for the URI from way in the past.");
|
||||
let visits = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
visits.push({ uri: TEST_URI, visitDate: NOW - 1000 - i });
|
||||
visits.push({ uri: TEST_URI, visitDate: DB_NOW - 100000 - (i * 1000) });
|
||||
}
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
do_print("Bookmark the URI.");
|
||||
@ -73,7 +72,11 @@ add_task(function* remove_visits_outside_bookmarked_uri() {
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("Remove visits using timerange outside the URI's visits.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 10, NOW);
|
||||
let filter = {
|
||||
beginDate: new Date(JS_NOW - 10),
|
||||
endDate: new Date(JS_NOW)
|
||||
};
|
||||
yield PlacesUtils.history.removeVisitsByFilter(filter);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should still exist in moz_places.");
|
||||
@ -89,7 +92,7 @@ add_task(function* remove_visits_outside_bookmarked_uri() {
|
||||
do_check_eq(root.childCount, 10);
|
||||
for (let i = 0; i < root.childCount; i++) {
|
||||
let visitTime = root.getChild(i).time;
|
||||
do_check_eq(visitTime, NOW - 1000 - i);
|
||||
do_check_eq(visitTime, DB_NOW - 100000 - (i * 1000));
|
||||
}
|
||||
root.containerOpen = false;
|
||||
|
||||
@ -109,12 +112,16 @@ add_task(function* remove_visits_unbookmarked_uri() {
|
||||
do_print("Add 10 visits for the URI from now to 9 usecs in the past.");
|
||||
let visits = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
visits.push({ uri: TEST_URI, visitDate: NOW - i });
|
||||
visits.push({ uri: TEST_URI, visitDate: DB_NOW - (i * 1000) });
|
||||
}
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
|
||||
do_print("Remove the 5 most recent visits.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 4, NOW);
|
||||
let filter = {
|
||||
beginDate: new Date(JS_NOW - 4),
|
||||
endDate: new Date(JS_NOW)
|
||||
};
|
||||
yield PlacesUtils.history.removeVisitsByFilter(filter);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should still exist in moz_places.");
|
||||
@ -130,7 +137,7 @@ add_task(function* remove_visits_unbookmarked_uri() {
|
||||
do_check_eq(root.childCount, 5);
|
||||
for (let i = 0; i < root.childCount; i++) {
|
||||
let visitTime = root.getChild(i).time;
|
||||
do_check_eq(visitTime, NOW - i - 5);
|
||||
do_check_eq(visitTime, DB_NOW - (i * 1000) - 5000);
|
||||
}
|
||||
root.containerOpen = false;
|
||||
|
||||
@ -150,7 +157,7 @@ add_task(function* remove_visits_bookmarked_uri() {
|
||||
do_print("Add 10 visits for the URI from now to 9 usecs in the past.");
|
||||
let visits = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
visits.push({ uri: TEST_URI, visitDate: NOW - i });
|
||||
visits.push({ uri: TEST_URI, visitDate: DB_NOW - (i * 1000) });
|
||||
}
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
do_print("Bookmark the URI.");
|
||||
@ -161,7 +168,11 @@ add_task(function* remove_visits_bookmarked_uri() {
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("Remove the 5 most recent visits.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 4, NOW);
|
||||
let filter = {
|
||||
beginDate: new Date(JS_NOW - 4),
|
||||
endDate: new Date(JS_NOW)
|
||||
};
|
||||
yield PlacesUtils.history.removeVisitsByFilter(filter);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should still exist in moz_places.");
|
||||
@ -177,7 +188,7 @@ add_task(function* remove_visits_bookmarked_uri() {
|
||||
do_check_eq(root.childCount, 5);
|
||||
for (let i = 0; i < root.childCount; i++) {
|
||||
let visitTime = root.getChild(i).time;
|
||||
do_check_eq(visitTime, NOW - i - 5);
|
||||
do_check_eq(visitTime, DB_NOW - (i * 1000) - 5000);
|
||||
}
|
||||
root.containerOpen = false;
|
||||
|
||||
@ -197,12 +208,16 @@ add_task(function* remove_all_visits_unbookmarked_uri() {
|
||||
do_print("Add some visits for the URI.");
|
||||
let visits = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
visits.push({ uri: TEST_URI, visitDate: NOW - i });
|
||||
visits.push({ uri: TEST_URI, visitDate: DB_NOW - (i * 1000) });
|
||||
}
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
|
||||
do_print("Remove all visits.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 10, NOW);
|
||||
let filter = {
|
||||
beginDate: new Date(JS_NOW - 10),
|
||||
endDate: new Date(JS_NOW)
|
||||
};
|
||||
yield PlacesUtils.history.removeVisitsByFilter(filter);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should no longer exist in moz_places.");
|
||||
@ -224,49 +239,13 @@ add_task(function* remove_all_visits_unbookmarked_uri() {
|
||||
yield cleanup();
|
||||
});
|
||||
|
||||
add_task(function* remove_all_visits_unbookmarked_place_uri() {
|
||||
do_print("*** TEST: Remove all visits from an unbookmarked place: URI");
|
||||
do_print("Add some visits for the URI.");
|
||||
let visits = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
visits.push({ uri: PLACE_URI, visitDate: NOW - i });
|
||||
}
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
|
||||
do_print("Remove all visits.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 10, NOW);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should still exist in moz_places.");
|
||||
do_check_true(page_in_database(PLACE_URI.spec));
|
||||
|
||||
do_print("Run a history query and check that no visits exist.");
|
||||
let query = PlacesUtils.history.getNewQuery();
|
||||
let opts = PlacesUtils.history.getNewQueryOptions();
|
||||
opts.resultType = opts.RESULTS_AS_VISIT;
|
||||
opts.sortingMode = opts.SORT_BY_DATE_DESCENDING;
|
||||
let root = PlacesUtils.history.executeQuery(query, opts).root;
|
||||
root.containerOpen = true;
|
||||
do_check_eq(root.childCount, 0);
|
||||
root.containerOpen = false;
|
||||
|
||||
do_print("asyncHistory.isURIVisited should return false.");
|
||||
do_check_false(yield promiseIsURIVisited(PLACE_URI));
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("Frecency should be zero.")
|
||||
do_check_eq(frecencyForUrl(PLACE_URI.spec), 0);
|
||||
|
||||
yield cleanup();
|
||||
});
|
||||
|
||||
add_task(function* remove_all_visits_bookmarked_uri() {
|
||||
do_print("*** TEST: Remove all visits from a bookmarked URI");
|
||||
|
||||
do_print("Add some visits for the URI.");
|
||||
let visits = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
visits.push({ uri: TEST_URI, visitDate: NOW - i });
|
||||
visits.push({ uri: TEST_URI, visitDate: DB_NOW - (i * 1000) });
|
||||
}
|
||||
yield PlacesTestUtils.addVisits(visits);
|
||||
do_print("Bookmark the URI.");
|
||||
@ -275,9 +254,14 @@ add_task(function* remove_all_visits_bookmarked_uri() {
|
||||
PlacesUtils.bookmarks.DEFAULT_INDEX,
|
||||
"bookmark title");
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
let initialFrecency = frecencyForUrl(TEST_URI);
|
||||
|
||||
do_print("Remove all visits.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 10, NOW);
|
||||
let filter = {
|
||||
beginDate: new Date(JS_NOW - 10),
|
||||
endDate: new Date(JS_NOW)
|
||||
};
|
||||
yield PlacesUtils.history.removeVisitsByFilter(filter);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should still exist in moz_places.");
|
||||
@ -300,8 +284,8 @@ add_task(function* remove_all_visits_bookmarked_uri() {
|
||||
do_check_true(PlacesUtils.bookmarks.isBookmarked(TEST_URI));
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("Frecency should be negative.")
|
||||
do_check_true(frecencyForUrl(TEST_URI) < 0);
|
||||
do_print("Frecency should be smaller.")
|
||||
do_check_true(frecencyForUrl(TEST_URI) < initialFrecency);
|
||||
|
||||
yield cleanup();
|
||||
});
|
||||
@ -311,12 +295,16 @@ add_task(function* remove_all_visits_bookmarked_uri() {
|
||||
|
||||
do_print("Add some visits for the URI.");
|
||||
yield PlacesTestUtils.addVisits([
|
||||
{ uri: TEST_URI, transition: TRANSITION_FRAMED_LINK, visitDate: (NOW - 86400000000) },
|
||||
{ uri: TEST_URI, transition: TRANSITION_FRAMED_LINK, visitDate: NOW }
|
||||
{ uri: TEST_URI, transition: TRANSITION_FRAMED_LINK, visitDate: (DB_NOW - 86400000000000) },
|
||||
{ uri: TEST_URI, transition: TRANSITION_FRAMED_LINK, visitDate: DB_NOW }
|
||||
]);
|
||||
|
||||
do_print("Remove newer visit.");
|
||||
PlacesUtils.history.removeVisitsByTimeframe(NOW - 10, NOW);
|
||||
let filter = {
|
||||
beginDate: new Date(JS_NOW - 10),
|
||||
endDate: new Date(JS_NOW)
|
||||
};
|
||||
yield PlacesUtils.history.removeVisitsByFilter(filter);
|
||||
yield PlacesTestUtils.promiseAsyncUpdates();
|
||||
|
||||
do_print("URI should still exist in moz_places.");
|
@ -3,4 +3,5 @@ head = head_history.js
|
||||
tail =
|
||||
|
||||
[test_remove.js]
|
||||
[test_removeVisits.js]
|
||||
[test_removeVisitsByFilter.js]
|
||||
|
@ -73,7 +73,7 @@ skip-if = (os == "win" && os_version == "5.1") # Bug 1158887
|
||||
[test_bookmark_catobs.js]
|
||||
[test_bookmarks_json.js]
|
||||
[test_bookmarks_html.js]
|
||||
[test_bookmarks_html_corrupt.js]
|
||||
[test_bookmarks_html_corrupt.js]
|
||||
[test_bookmarks_html_import_tags.js]
|
||||
[test_bookmarks_html_singleframe.js]
|
||||
[test_bookmarks_restore_notification.js]
|
||||
@ -134,7 +134,6 @@ skip-if = os == "android"
|
||||
skip-if = os == "android"
|
||||
[test_preventive_maintenance_runTasks.js]
|
||||
[test_promiseBookmarksTree.js]
|
||||
[test_removeVisitsByTimeframe.js]
|
||||
# Bug 676989: test hangs consistently on Android
|
||||
skip-if = os == "android"
|
||||
[test_resolveNullBookmarkTitles.js]
|
||||
|
Loading…
Reference in New Issue
Block a user