merge fx-team to mozilla-central a=merge

This commit is contained in:
Carsten "Tomcat" Book 2015-07-21 16:52:23 +02:00
commit 94af88c469
78 changed files with 2627 additions and 1855 deletions

View File

@ -541,7 +541,6 @@ pref("privacy.item.cookies", false);
pref("privacy.clearOnShutdown.history", true);
pref("privacy.clearOnShutdown.formdata", true);
pref("privacy.clearOnShutdown.passwords", false);
pref("privacy.clearOnShutdown.downloads", true);
pref("privacy.clearOnShutdown.cookies", true);
pref("privacy.clearOnShutdown.cache", true);
@ -574,6 +573,8 @@ pref("privacy.sanitize.sanitizeOnShutdown", false);
pref("privacy.sanitize.migrateFx3Prefs", false);
pref("privacy.sanitize.migrateClearSavedPwdsOnExit", false);
pref("privacy.panicButton.enabled", true);
pref("network.proxy.share_proxy_settings", false); // use the same proxy settings for all protocols
@ -1344,7 +1345,6 @@ pref("services.sync.prefs.sync.privacy.clearOnShutdown.downloads", true);
pref("services.sync.prefs.sync.privacy.clearOnShutdown.formdata", true);
pref("services.sync.prefs.sync.privacy.clearOnShutdown.history", true);
pref("services.sync.prefs.sync.privacy.clearOnShutdown.offlineApps", true);
pref("services.sync.prefs.sync.privacy.clearOnShutdown.passwords", true);
pref("services.sync.prefs.sync.privacy.clearOnShutdown.sessions", true);
pref("services.sync.prefs.sync.privacy.clearOnShutdown.siteSettings", true);
pref("services.sync.prefs.sync.privacy.donottrackheader.enabled", true);
@ -1680,9 +1680,6 @@ pref("browser.newtab.preload", true);
// Remembers if the about:newtab intro has been shown
pref("browser.newtabpage.introShown", false);
// Remembers if the about:newtab update intro has been shown
pref("browser.newtabpage.updateIntroShown", false);
// Toggles the content of 'about:newtab'. Shows the grid when enabled.
pref("browser.newtabpage.enabled", true);

View File

@ -716,6 +716,7 @@
autocompletesearchparam="enable-actions"
autocompletepopup="PopupAutoCompleteRichResult"
completeselectedindex="true"
shrinkdelay="250"
tabscrolling="true"
showcommentcolumn="true"
showimagecolumn="true"

View File

@ -5,118 +5,49 @@
#endif
const PREF_INTRO_SHOWN = "browser.newtabpage.introShown";
const PREF_UPDATE_INTRO_SHOWN = "browser.newtabpage.updateIntroShown";
const PREF_NEWTAB_ENHANCED = "browser.newtabpage.enhanced";
// These consts indicate the type of intro/onboarding we show.
const WELCOME = "welcome";
const UPDATE = "update";
// The maximum paragraph ID listed for 'newtab.intro.paragraph'
// strings in newTab.properties
const MAX_PARAGRAPH_ID = 9;
const NUM_INTRO_PAGES = 3;
let gIntro = {
_nodeIDSuffixes: [
"mask",
"modal",
"numerical-progress",
"text",
"buttons",
"image",
"header",
"footer"
],
_imageTypes: {
COG : "cog",
PIN_REMOVE : "pin-remove",
SUGGESTED : "suggested"
},
/**
* The paragraphs & buttons to show on each page in the intros.
*
* _introPages.welcome and _introPages.update contain an array of
* indices of paragraphs to be used to lookup text in _paragraphs
* for each page of the intro.
*
* Similarly, _introPages.buttons is used to lookup text for buttons
* on each page of the intro.
*/
_introPages: {
"welcome": [[0,1],[2,5],[4,3]],
"update": [[6,5],[4,3],[0,1]],
"buttons": [["skip", "continue"],["back", "next"],["back", "gotit"]],
"welcome-images": ["cog", "suggested", "pin-remove"],
"update-images": ["suggested", "pin-remove", "cog"]
},
_paragraphs: [],
_nodes: {},
_images: {},
init: function() {
for (let idSuffix of this._nodeIDSuffixes) {
this._nodes[idSuffix] = document.getElementById("newtab-intro-" + idSuffix);
}
let brand = Services.strings.createBundle("chrome://branding/locale/brand.properties");
this._brandShortName = brand.GetStringFromName("brandShortName");
},
_setImage: function(imageType) {
// Remove previously existing images, if any.
let currImageHolder = this._nodes.image;
while (currImageHolder.firstChild) {
currImageHolder.removeChild(currImageHolder.firstChild);
}
this._nodes.image.appendChild(this._images[imageType]);
},
_goToPage: function(pageNum) {
this._currPage = pageNum;
this._nodes["numerical-progress"].innerHTML = `${this._bold(pageNum + 1)} / ${NUM_INTRO_PAGES}`;
this._nodes["numerical-progress"].setAttribute("page", pageNum);
// Set the page's image
let imageType = this._introPages[this._onboardingType + "-images"][pageNum];
this._setImage(imageType);
_showMessage: function() {
// Set the paragraphs
let paragraphNodes = this._nodes.text.getElementsByTagName("p");
let paragraphIDs = this._introPages[this._onboardingType][pageNum];
paragraphIDs.forEach((arg, index) => {
paragraphNodes[index].innerHTML = this._paragraphs[arg];
this._paragraphs.forEach((arg, index) => {
paragraphNodes[index].innerHTML = arg;
});
// Set the buttons
let buttonNodes = this._nodes.buttons.getElementsByTagName("input");
let buttonIDs = this._introPages.buttons[pageNum];
buttonIDs.forEach((arg, index) => {
buttonNodes[index].setAttribute("value", newTabString("intro." + arg));
});
// Set the button
document.getElementById("newtab-intro-button").
setAttribute("value", newTabString("intro.gotit"));
},
_bold: function(str) {
return `<strong>${str}</strong>`
return `<strong>${str}</strong>`;
},
_link: function(url, text) {
return `<a href="${url}" target="_blank">${text}</a>`;
},
_span: function(text, className) {
return `<span class="${className}">${text}</span>`;
},
_exitIntro: function() {
this._nodes.mask.style.opacity = 0;
this._nodes.mask.addEventListener("transitionend", () => {
@ -124,85 +55,14 @@ let gIntro = {
});
},
_back: function() {
if (this._currPage == 0) {
// We're on the first page so 'back' means exit.
this._exitIntro();
return;
}
this._goToPage(this._currPage - 1);
},
_next: function() {
if (this._currPage == (NUM_INTRO_PAGES - 1)) {
// We're on the last page so 'next' means exit.
this._exitIntro();
return;
}
this._goToPage(this._currPage + 1);
},
_generateImages: function() {
Object.keys(this._imageTypes).forEach(type => {
let image = "";
let imageClass = "";
switch (this._imageTypes[type]) {
case this._imageTypes.COG:
// Copy the customize panel's subnodes over so that it can be styled
// appropriately for the intro.
image = document.createElementNS(HTML_NAMESPACE, "div");
image.classList.add("newtab-intro-image-customize");
let imageToCopy = document.getElementById("newtab-customize-panel").cloneNode(true);
while (imageToCopy.firstChild) {
image.appendChild(imageToCopy.firstChild);
}
break;
case this._imageTypes.PIN_REMOVE:
imageClass = "-hover";
// fall-through
case this._imageTypes.SUGGESTED:
image = document.createElementNS(HTML_NAMESPACE, "div");
image.classList.add("newtab-intro-cell-wrapper");
// Create the cell's inner HTML code.
image.innerHTML =
'<div class="newtab-intro-cell' + imageClass + '">' +
' <div class="newtab-site newtab-intro-image-tile" type="sponsored" suggested="' + (imageClass ? "false" : "true") + '">' +
' <span class="newtab-sponsored">' +
newTabString(imageClass ? "sponsored.button" : "suggested.tag") + '</span>' +
' <a class="newtab-link">' +
' <span class="newtab-thumbnail"/>' +
' <span class="newtab-title">mozilla.org</span>' +
' </a>' +
' <input type="button" class="newtab-control newtab-control-pin"/>' +
' <input type="button" class="newtab-control newtab-control-block"/>' +
' </div>' +
'</div>';
break;
}
this._images[this._imageTypes[type]] = image;
});
},
_generateParagraphs: function() {
let customizeIcon = '<input type="button" class="newtab-control newtab-customize"/>';
let substringMappings = {
"2": [this._link(TILES_PRIVACY_LINK, newTabString("privacy.link"))],
"4": [customizeIcon, this._bold(newTabString("intro.controls"))],
"6": [this._bold(newTabString("intro.paragraph6.remove")), this._bold(newTabString("intro.paragraph6.pin"))],
"7": [this._link(TILES_INTRO_LINK, newTabString("learn.link"))],
"8": [this._brandShortName, this._link(TILES_INTRO_LINK, newTabString("learn.link"))]
}
for (let i = 1; i <= MAX_PARAGRAPH_ID; i++) {
try {
let name = "intro.paragraph" + i + (i == 4 ? ".2" : "");
this._paragraphs.push(newTabString(name, substringMappings[i]));
} catch (ex) {
// Paragraph with this ID doesn't exist so continue
}
}
this._paragraphs.push(newTabString("intro1.paragraph1"));
this._paragraphs.push(newTabString("intro1.paragraph2",
[
this._link(TILES_PRIVACY_LINK, newTabString("privacy.link")),
customizeIcon
]));
},
showIfNecessary: function() {
@ -210,14 +70,9 @@ let gIntro = {
return;
}
if (!Services.prefs.getBoolPref(PREF_INTRO_SHOWN)) {
this._onboardingType = WELCOME;
this.showPanel();
} else if (!Services.prefs.getBoolPref(PREF_UPDATE_INTRO_SHOWN)) {
this._onboardingType = UPDATE;
this.showPanel();
Services.prefs.setBoolPref(PREF_INTRO_SHOWN, true);
}
Services.prefs.setBoolPref(PREF_INTRO_SHOWN, true);
Services.prefs.setBoolPref(PREF_UPDATE_INTRO_SHOWN, true);
},
showPanel: function() {
@ -227,20 +82,14 @@ let gIntro = {
if (!this._paragraphs.length) {
// It's our first time showing the panel. Do some initial setup
this._generateParagraphs();
this._generateImages();
}
this._goToPage(0);
this._showMessage();
// Header text
let boldSubstr = this._onboardingType == WELCOME ? this._span(this._brandShortName, "bold") : "";
this._nodes.header.innerHTML = newTabString("intro.header." + this._onboardingType, [boldSubstr]);
this._nodes.header.innerHTML = newTabString("intro.header.update");
// Footer links
let footerLinkNodes = this._nodes.footer.getElementsByTagName("li");
[this._link(TILES_INTRO_LINK, newTabString("learn.link2")),
this._link(TILES_PRIVACY_LINK, newTabString("privacy.link2")),
].forEach((arg, index) => {
footerLinkNodes[index].innerHTML = arg;
});
let footerLinkNode = document.getElementById("newtab-intro-link");
footerLinkNode.innerHTML = this._link(TILES_INTRO_LINK, newTabString("learn.link2"))
},
};

View File

@ -655,7 +655,7 @@ input[type=button] {
#newtab-intro-modal {
font-family: "Helvetica";
height: 512px;
max-height: 800px;
position: fixed;
left: 0;
right: 0;
@ -665,7 +665,6 @@ input[type=button] {
background: linear-gradient(#FFFFFF, #F9F9F9);
box-shadow: 0px 2px 4px rgba(0, 0, 0, 0.7);
border-radius: 8px 8px 0px 0px;
min-width: 715px;
position: relative;
display: inline-block;
top: 50%;
@ -676,14 +675,14 @@ input[type=button] {
font-size: 28px;
color: #737980;
text-align: center;
top: 50px;
top: 30px;
position: relative;
border-bottom: 2px solid #E0DFE0;
padding-bottom: 10px;
display: block;
margin: 0px auto;
margin: 0px 50px;
font-weight: 100;
padding: 0px 15px;
padding: 0px 15px 10px;
}
#newtab-intro-header .bold {
@ -697,7 +696,6 @@ input[type=button] {
margin: 0px auto;
display: block;
position: absolute;
bottom: 0px;
background-color: white;
box-shadow: 0 -1px 4px -1px #EBEBEB;
text-align: center;
@ -722,20 +720,10 @@ input[type=button] {
color: #4A90E2;
}
#newtab-intro-footer > ul > li > a:visited {
color: #171F26;
}
#newtab-intro-footer > ul > :first-child {
border-right: solid 1px #C1C1C1;
}
#newtab-intro-body {
height: 330px;
position: relative;
display: block;
top: 50px;
margin: 25px 50px 30px;
margin: 55px 50px 38px;
}
#newtab-intro-content > * {
@ -743,14 +731,12 @@ input[type=button] {
}
#newtab-intro-content {
height: 210px;
position: relative;
}
#newtab-intro-buttons {
text-align: center;
vertical-align: middle;
position: absolute;
display: block;
bottom: 0px;
width: 100%;
@ -764,53 +750,19 @@ input[type=button] {
#newtab-intro-text {
text-align: left;
right: 0px;
width: 270px;
}
#newtab-intro-text,
#newtab-intro-image {
height: 100%;
right: 0px;
font-size: 14px;
line-height: 20px;
min-width: 270px;
}
#newtab-intro-image {
left: 0px;
right: auto;
float: left;
margin-right: 40px;
}
.newtab-intro-image-customize {
box-shadow: 3px 3px 5px #888;
margin-top: 0px;
background-color: #FFF;
float: left;
z-index: 101;
margin-top: -5px;
min-width: 270px;
padding: 0;
}
.newtab-intro-image-customize #newtab-customize-title {
display: block;
max-height: 40px;
}
.newtab-intro-image-customize #newtab-customize-panel-anchor {
display: none;
}
.newtab-intro-image-customize .newtab-customize-panel-item:not([selected]):hover {
background-color: inherit;
color: #7A7A7A;
background: none;
width: 460px;
}
#newtab-intro-text > p {
margin: 0 0 1em 0;
margin: 0 0 30px;
}
#newtab-intro-text > p > a {
text-decoration: none;
color: #4A90E2;
}
#newtab-intro-text .newtab-control {
@ -820,10 +772,11 @@ input[type=button] {
vertical-align: middle;
opacity: 1;
position: inherit;
pointer-events: none;
}
#newtab-intro-buttons > input {
min-width: 150px;
min-width: 120px;
height: 50px;
margin: 0px 5px;
vertical-align: bottom;
@ -844,51 +797,3 @@ input[type=button] {
background-color: #2C72E2;
color: #FFFFFF;
}
#newtab-intro-progress {
position: absolute;
width: 100%;
}
#newtab-intro-numerical-progress {
text-align: center;
top: 15px;
position: relative;
font-size: 12px;
color: #424F5A;
}
#newtab-intro-graphical-progress {
text-align: left;
border-radius: 1.5px;
overflow: hidden;
position: relative;
margin: 10px auto 0px;
height: 3px;
top: 8px;
width: 35px;
background-color: #DCDCDC;
}
#indicator {
position: absolute;
top: 0px;
left: 0px;
display: inline-block;
width: 0%;
height: 4px;
background: none repeat scroll 0% 0% #FF9500;
transition: width 0.3s ease-in-out 0s;
}
#newtab-intro-numerical-progress[page="0"] + #newtab-intro-graphical-progress > #indicator {
width: 33%;
}
#newtab-intro-numerical-progress[page="1"] + #newtab-intro-graphical-progress > #indicator {
width: 66%;
}
#newtab-intro-numerical-progress[page="2"] + #newtab-intro-graphical-progress > #indicator {
width: 100%;
}

View File

@ -49,28 +49,20 @@
<div id="newtab-intro-mask">
<div id="newtab-intro-modal">
<div id="newtab-intro-progress">
<div id="newtab-intro-numerical-progress"/>
<div id="newtab-intro-graphical-progress">
<span id="indicator"/>
</div>
</div>
<div id="newtab-intro-header"/>
<div id="newtab-intro-body">
<div id="newtab-intro-content">
<div id="newtab-intro-image"/>
<div id="newtab-intro-text">
<p/><p/>
</div>
</div>
<div id="newtab-intro-buttons">
<input type="button" onclick="gIntro._back()"/>
<input type="button" default="true" onclick="gIntro._next()"/>
<input id="newtab-intro-button" type="button" default="true" onclick="gIntro._exitIntro()"/>
</div>
</div>
<div id="newtab-intro-footer">
<ul>
<li/><li/>
<li id="newtab-intro-link"/>
</ul>
</div>
</div>

View File

@ -447,26 +447,6 @@ Sanitizer.prototype = {
}
},
passwords: {
clear: function ()
{
TelemetryStopwatch.start("FX_SANITIZE_PASSWORDS");
var pwmgr = Components.classes["@mozilla.org/login-manager;1"]
.getService(Components.interfaces.nsILoginManager);
// Passwords are timeless, and don't respect the timeSpan setting
pwmgr.removeAllLogins();
TelemetryStopwatch.finish("FX_SANITIZE_PASSWORDS");
},
get canClear()
{
var pwmgr = Components.classes["@mozilla.org/login-manager;1"]
.getService(Components.interfaces.nsILoginManager);
var count = pwmgr.countLogins("", "", ""); // count all logins
return (count > 0);
}
},
sessions: {
clear: function ()
{
@ -785,6 +765,20 @@ Sanitizer._checkAndSanitize = function()
const prefs = Sanitizer.prefs;
if (prefs.getBoolPref(Sanitizer.prefShutdown) &&
!prefs.prefHasUserValue(Sanitizer.prefDidShutdown)) {
// One time migration to remove support for the clear saved passwords on exit feature.
if (!Services.prefs.getBoolPref("privacy.sanitize.migrateClearSavedPwdsOnExit")) {
let deprecatedPref = "privacy.clearOnShutdown.passwords";
let doUpdate = Services.prefs.prefHasUserValue(deprecatedPref) &&
Services.prefs.getBoolPref(deprecatedPref);
if (doUpdate) {
Services.logins.removeAllLogins();
Services.prefs.setBoolPref("signon.rememberSignons", false);
}
Services.prefs.clearUserPref(deprecatedPref);
Services.prefs.setBoolPref("privacy.sanitize.migrateClearSavedPwdsOnExit", true);
}
// this is a shutdown or a startup after an unclean exit
var s = new Sanitizer();
s.prefDomain = "privacy.clearOnShutdown.";

View File

@ -69,7 +69,7 @@ add_task(function* test_healthreport_search_recording() {
let oldTelemetry = Services.prefs.getBoolPref("toolkit.telemetry.enabled");
Services.prefs.setBoolPref("toolkit.telemetry.enabled", true);
m = provider.getMeasurement("engines", 1);
m = provider.getMeasurement("engines", 2);
yield provider.collectDailyData();
data = yield m.getValues();

View File

@ -2,35 +2,27 @@
http://creativecommons.org/publicdomain/zero/1.0/ */
const INTRO_PREF = "browser.newtabpage.introShown";
const UPDATE_INTRO_PREF = "browser.newtabpage.updateIntroShown";
const PRELOAD_PREF = "browser.newtab.preload";
function runTests() {
let origIntro = Services.prefs.getBoolPref(INTRO_PREF);
let origUpdateIntro = Services.prefs.getBoolPref(UPDATE_INTRO_PREF);
let origPreload = Services.prefs.getBoolPref(PRELOAD_PREF);
registerCleanupFunction(_ => {
Services.prefs.setBoolPref(INTRO_PREF, origIntro);
Services.prefs.setBoolPref(INTRO_PREF, origUpdateIntro);
Services.prefs.setBoolPref(PRELOAD_PREF, origPreload);
});
// Test with preload false
Services.prefs.setBoolPref(INTRO_PREF, false);
Services.prefs.setBoolPref(UPDATE_INTRO_PREF, false);
Services.prefs.setBoolPref(PRELOAD_PREF, false);
let intro;
let brand = Services.strings.createBundle("chrome://branding/locale/brand.properties");
let brandShortName = brand.GetStringFromName("brandShortName");
yield addNewTabPageTab();
let intro;
intro = getContentDocument().getElementById("newtab-intro-mask");
is(intro.style.opacity, 1, "intro automatically shown on first opening");
is(getContentDocument().getElementById("newtab-intro-header").innerHTML,
'Welcome to New Tab on <span xmlns="http://www.w3.org/1999/xhtml" class="bold">' + brandShortName + '</span>!', "we show the first-run intro.");
'New Tab got an update!', "we show intro.");
is(Services.prefs.getBoolPref(INTRO_PREF), true, "newtab remembers that the intro was shown");
is(Services.prefs.getBoolPref(UPDATE_INTRO_PREF), true, "newtab avoids showing update if intro was shown");
yield addNewTabPageTab();
intro = getContentDocument().getElementById("newtab-intro-mask");
@ -44,76 +36,11 @@ function runTests() {
intro = getContentDocument().getElementById("newtab-intro-mask");
is(intro.style.opacity, 1, "intro automatically shown on preloaded opening");
is(getContentDocument().getElementById("newtab-intro-header").innerHTML,
'Welcome to New Tab on <span xmlns="http://www.w3.org/1999/xhtml" class="bold">' + brandShortName + '</span>!', "we show the first-run intro.");
'New Tab got an update!', "we show intro.");
is(Services.prefs.getBoolPref(INTRO_PREF), true, "newtab remembers that the intro was shown");
is(Services.prefs.getBoolPref(UPDATE_INTRO_PREF), true, "newtab avoids showing update if intro was shown");
// Test with first run true but update false
Services.prefs.setBoolPref(UPDATE_INTRO_PREF, false);
let gotit = getContentDocument().getElementById("newtab-intro-button");
gotit.click();
yield addNewTabPageTab();
intro = getContentDocument().getElementById("newtab-intro-mask");
is(intro.style.opacity, 1, "intro automatically shown on preloaded opening");
is(getContentDocument().getElementById("newtab-intro-header").innerHTML,
"New Tab got an update!", "we show the update intro.");
is(Services.prefs.getBoolPref(INTRO_PREF), true, "INTRO_PREF stays true");
is(Services.prefs.getBoolPref(UPDATE_INTRO_PREF), true, "newtab remembers that the update intro was show");
// Test clicking the 'next' and 'back' buttons.
let buttons = getContentDocument().getElementById("newtab-intro-buttons").getElementsByTagName("input");
let progress = getContentDocument().getElementById("newtab-intro-numerical-progress");
let back = buttons[0];
let next = buttons[1];
is(progress.getAttribute("page"), 0, "we are on the first page");
is(intro.style.opacity, 1, "intro visible");
let createMutationObserver = function(fcn) {
return new Promise(resolve => {
let observer = new MutationObserver(function(mutations) {
fcn();
observer.disconnect();
resolve();
});
let config = { attributes: true, attributeFilter: ["style"], childList: true };
observer.observe(progress, config);
});
}
let p = createMutationObserver(function() {
is(progress.getAttribute("page"), 1, "we get to the 2nd page");
is(intro.style.opacity, 1, "intro visible");
});
next.click();
yield p.then(TestRunner.next);
p = createMutationObserver(function() {
is(progress.getAttribute("page"), 2, "we get to the 3rd page");
is(intro.style.opacity, 1, "intro visible");
});
next.click();
yield p.then(TestRunner.next);
p = createMutationObserver(function() {
is(progress.getAttribute("page"), 1, "go back to 2nd page");
is(intro.style.opacity, 1, "intro visible");
});
back.click();
yield p.then(TestRunner.next);
p = createMutationObserver(function() {
is(progress.getAttribute("page"), 0, "go back to 1st page");
is(intro.style.opacity, 1, "intro visible");
});
back.click();
yield p.then(TestRunner.next);
p = createMutationObserver(function() {
is(progress.getAttribute("page"), 0, "another back will 'skip tutorial'");
is(intro.style.opacity, 0, "intro exited");
});
back.click();
p.then(TestRunner.next);
is(intro.style.opacity, 0, "intro exited");
}

View File

@ -6,12 +6,10 @@
let Preferences = Cu.import("resource://gre/modules/Preferences.jsm", {}).Preferences;
let tmp = {};
Cu.import("resource://gre/modules/FxAccounts.jsm", tmp);
Cu.import("resource://gre/modules/FxAccountsCommon.js", tmp);
Cu.import("resource://services-sync/browserid_identity.js", tmp);
let {FxAccounts, BrowserIDManager, DATA_FORMAT_VERSION, CERT_LIFETIME} = tmp;
let fxaSyncIsEnabled = Weave.Service.identity instanceof BrowserIDManager;
const {FxAccounts, AccountState} = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
// FxA logs can be gotten at via this pref which helps debugging.
Preferences.set("services.sync.log.appender.dump", "Debug");
add_task(function() {
yield PanelUI.show({type: "command"});
@ -47,35 +45,56 @@ add_task(function() {
PanelUI.toggle({type: "command"});
yield hiddenPanelPromise;
if (fxaSyncIsEnabled) {
yield fxAccounts.signOut();
}
yield fxAccounts.signOut(/*localOnly = */true);
});
function configureIdentity() {
// do the FxAccounts thang...
// do the FxAccounts thang and wait for Sync to initialize the identity.
configureFxAccountIdentity();
if (fxaSyncIsEnabled) {
return Weave.Service.identity.initializeWithCurrentIdentity().then(() => {
// need to wait until this identity manager is readyToAuthenticate.
return Weave.Service.identity.whenReadyToAuthenticate.promise;
});
}
Weave.Service.createAccount("john@doe.com", "mysecretpw",
"challenge", "response");
Weave.Service.identity.account = "john@doe.com";
Weave.Service.identity.basicPassword = "mysecretpw";
Weave.Service.identity.syncKey = Weave.Utils.generatePassphrase();
Weave.Svc.Prefs.set("firstSync", "newAccount");
Weave.Service.persistLogin();
return Promise.resolve();
return Weave.Service.identity.initializeWithCurrentIdentity().then(() => {
// need to wait until this identity manager is readyToAuthenticate.
return Weave.Service.identity.whenReadyToAuthenticate.promise;
});
}
// Configure an instance of an FxAccount identity provider with the specified
// config (or the default config if not specified).
// Configure an instance of an FxAccount identity provider.
function configureFxAccountIdentity() {
// A mock "storage manager" for FxAccounts that doesn't actually write anywhere.
function MockFxaStorageManager() {
}
MockFxaStorageManager.prototype = {
promiseInitialized: Promise.resolve(),
initialize(accountData) {
this.accountData = accountData;
},
finalize() {
return Promise.resolve();
},
getAccountData() {
return Promise.resolve(this.accountData);
},
updateAccountData(updatedFields) {
for (let [name, value] of Iterator(updatedFields)) {
if (value == null) {
delete this.accountData[name];
} else {
this.accountData[name] = value;
}
}
return Promise.resolve();
},
deleteAccountData() {
this.accountData = null;
return Promise.resolve();
}
}
let user = {
assertion: "assertion",
email: "email",
@ -94,7 +113,25 @@ function configureFxAccountIdentity() {
// uid will be set to the username.
};
let MockInternal = {};
let MockInternal = {
newAccountState(credentials) {
isnot(credentials, "not expecting credentials");
let storageManager = new MockFxaStorageManager();
// and init storage with our user.
storageManager.initialize(user);
return new AccountState(this, storageManager);
},
getCertificate(data, keyPair, mustBeValidUntil) {
this.cert = {
validUntil: this.now() + 10000,
cert: "certificate",
};
return Promise.resolve(this.cert.cert);
},
getCertificateSigned() {
return Promise.resolve();
},
};
let mockTSC = { // TokenServerClient
getTokenFromBrowserIDAssertion: function(uri, assertion, cb) {
token.uid = "username";
@ -102,23 +139,10 @@ function configureFxAccountIdentity() {
},
};
let authService = Weave.Service.identity;
authService._fxaService = new FxAccounts(MockInternal);
authService._fxaService.internal.currentAccountState.signedInUser = {
version: DATA_FORMAT_VERSION,
accountData: user
}
authService._fxaService.internal.currentAccountState.getCertificate = function(data, keyPair, mustBeValidUntil) {
this.cert = {
validUntil: authService._fxaService.internal.now() + CERT_LIFETIME,
cert: "certificate",
};
return Promise.resolve(this.cert.cert);
};
authService._tokenServerClient = mockTSC;
let fxa = new FxAccounts(MockInternal);
Weave.Service.identity._fxaService = fxa;
Weave.Service.identity._tokenServerClient = mockTSC;
// Set the "account" of the browserId manager to be the "email" of the
// logged in user of the mockFXA service.
authService._account = user.email;
Weave.Service.identity._account = user.email;
}

View File

@ -57,7 +57,6 @@ add_task(function* test_execute() {
Services.prefs.setBoolPref("privacy.clearOnShutdown.downloads", true);
Services.prefs.setBoolPref("privacy.clearOnShutdown.cookies", true);
Services.prefs.setBoolPref("privacy.clearOnShutdown.formData", true);
Services.prefs.setBoolPref("privacy.clearOnShutdown.passwords", true);
Services.prefs.setBoolPref("privacy.clearOnShutdown.sessions", true);
Services.prefs.setBoolPref("privacy.clearOnShutdown.siteSettings", true);

View File

@ -268,6 +268,9 @@
<!-- logged in locally but server rejected credentials -->
<hbox id="fxaLoginRejected"
flex="1">
<vbox>
<image id="fxaLoginRejectedWarning"/>
</vbox>
<description>
&signedInLoginFailure.beforename.label;
<label id="fxaEmailAddress3"/>

View File

@ -34,7 +34,6 @@
<preference id="privacy.clearOnShutdown.history" name="privacy.clearOnShutdown.history" type="bool"
onchange="return gSanitizeDialog.onClearHistoryChanged();"/>
<preference id="privacy.clearOnShutdown.formdata" name="privacy.clearOnShutdown.formdata" type="bool"/>
<preference id="privacy.clearOnShutdown.passwords" name="privacy.clearOnShutdown.passwords" type="bool"/>
<preference id="privacy.clearOnShutdown.downloads" name="privacy.clearOnShutdown.downloads" type="bool"/>
<preference id="privacy.clearOnShutdown.cookies" name="privacy.clearOnShutdown.cookies" type="bool"/>
<preference id="privacy.clearOnShutdown.cache" name="privacy.clearOnShutdown.cache" type="bool"/>
@ -87,18 +86,13 @@
<column flex="1"/>
</columns>
<rows>
<row>
<checkbox label="&itemPasswords.label;"
accesskey="&itemPasswords.accesskey;"
preference="privacy.clearOnShutdown.passwords"/>
<checkbox label="&itemOfflineApps.label;"
accesskey="&itemOfflineApps.accesskey;"
preference="privacy.clearOnShutdown.offlineApps"/>
</row>
<row>
<checkbox label="&itemSitePreferences.label;"
accesskey="&itemSitePreferences.accesskey;"
preference="privacy.clearOnShutdown.siteSettings"/>
<checkbox label="&itemOfflineApps.label;"
accesskey="&itemOfflineApps.accesskey;"
preference="privacy.clearOnShutdown.offlineApps"/>
</row>
</rows>
</grid>

View File

@ -1161,7 +1161,6 @@ function CssRuleView(inspector, document, aStore, aPageStyle) {
this.store = aStore || {};
this.pageStyle = aPageStyle;
this._editorsExpandedForFilter = [];
this._outputParser = new OutputParser();
this._onKeypress = this._onKeypress.bind(this);
@ -1244,6 +1243,11 @@ CssRuleView.prototype = {
// Used for cancelling timeouts in the style filter.
_filterChangedTimeout: null,
// Get the filter search value.
get searchValue() {
return this.searchField.value.toLowerCase();
},
/**
* Get an instance of SelectorHighlighter (used to highlight nodes that match
* selectors in the rule-view). A new instance is only created the first time
@ -1562,9 +1566,9 @@ CssRuleView.prototype = {
clearTimeout(this._filterChangedTimeout);
}
let filterTimeout = (this.searchField.value.length > 0)
? FILTER_CHANGED_TIMEOUT : 0;
this.searchClearButton.hidden = this.searchField.value.length === 0;
let filterTimeout = (this.searchValue.length > 0) ?
FILTER_CHANGED_TIMEOUT : 0;
this.searchClearButton.hidden = this.searchValue.length === 0;
this._filterChangedTimeout = setTimeout(() => {
if (this.searchField.value.length > 0) {
@ -1573,7 +1577,16 @@ CssRuleView.prototype = {
this.searchField.removeAttribute("filled");
}
this._clearHighlights();
// Parse search value as a single property line and extract the property
// name and value. Otherwise, use the search value as both the name and
// value.
this.searchPropertyMatch = FILTER_PROP_RE.exec(this.searchValue);
this.searchPropertyName = this.searchPropertyMatch ?
this.searchPropertyMatch[1] : this.searchValue;
this.searchPropertyValue = this.searchPropertyMatch ?
this.searchPropertyMatch[2] : this.searchValue;
this._clearHighlight(this.element);
this._clearRules();
this._createEditors();
@ -1635,7 +1648,6 @@ CssRuleView.prototype = {
this._prefObserver.destroy();
this._outputParser = null;
this._editorsExpandedForFilter = null;
// Remove context menu
if (this._contextmenu) {
@ -1959,8 +1971,6 @@ CssRuleView.prototype = {
let seenNormalElement = false;
let seenSearchTerm = false;
let container = null;
let searchTerm = this.searchField.value.toLowerCase();
let isValidSearchTerm = searchTerm.trim().length > 0;
if (!this._elementStyle.rules) {
return;
@ -1977,8 +1987,8 @@ CssRuleView.prototype = {
}
// Filter the rules and highlight any matches if there is a search input
if (isValidSearchTerm) {
if (this.highlightRules(rule, searchTerm)) {
if (this.searchValue) {
if (this.highlightRule(rule)) {
seenSearchTerm = true;
} else if (rule.domRule.type !== ELEMENT_STYLE) {
continue;
@ -2022,7 +2032,7 @@ CssRuleView.prototype = {
}
}
if (searchTerm && !seenSearchTerm) {
if (this.searchValue && !seenSearchTerm) {
this.searchField.classList.add("devtools-style-searchbox-no-match");
} else {
this.searchField.classList.remove("devtools-style-searchbox-no-match");
@ -2030,122 +2040,200 @@ CssRuleView.prototype = {
},
/**
* Highlight rules that matches the given search value and returns a boolean
* indicating whether or not rules were highlighted.
* Highlight rules that matches the filter search value and returns a
* boolean indicating whether or not rules were highlighted.
*
* @param {Rule} aRule
* The rule object we're highlighting if its rule selectors or property
* values match the search value.
* @param {String} aValue
* The search value.
* @param {Rule} rule
* The rule object we're highlighting if its rule selectors or
* property values match the search value.
* @return {bool} true if the rule was highlighted, false otherwise.
*/
highlightRules: function(aRule, aValue) {
let isHighlighted = false;
let selectorNodes = [...aRule.editor.selectorText.childNodes];
if (aRule.domRule.type === Ci.nsIDOMCSSRule.KEYFRAME_RULE) {
selectorNodes = [aRule.editor.selectorText];
} else if (aRule.domRule.type === ELEMENT_STYLE) {
selectorNodes = [];
}
aValue = aValue.trim();
// Highlight search matches in the rule selectors
for (let selectorNode of selectorNodes) {
if (selectorNode.textContent.toLowerCase().includes(aValue)) {
selectorNode.classList.add("ruleview-highlight");
isHighlighted = true;
}
}
// Parse search value as a single property line and extract the property
// name and value. Otherwise, use the search value as both the name and
// value.
let propertyMatch = FILTER_PROP_RE.exec(aValue);
let name = propertyMatch ? propertyMatch[1] : aValue;
let value = propertyMatch ? propertyMatch[2] : aValue;
highlightRule: function(rule) {
let isRuleSelectorHighlighted = this._highlightRuleSelector(rule);
let isStyleSheetHighlighted = this._highlightStyleSheet(rule);
let isHighlighted = isRuleSelectorHighlighted || isStyleSheetHighlighted;
// Highlight search matches in the rule properties
for (let textProp of aRule.textProps) {
// Get the actual property value displayed in the rule view
let propertyValue = textProp.editor.valueSpan.textContent.toLowerCase();
let propertyName = textProp.name.toLowerCase();
let styleSheetSource = textProp.rule.title.toLowerCase();
let editor = textProp.editor;
let source = editor.ruleEditor.source;
let isPropertyHighlighted = this._highlightMatches(editor.container, {
searchName: name,
searchValue: value,
propertyName: propertyName,
propertyValue: propertyValue,
propertyMatch: propertyMatch
});
let isComputedHighlighted = false;
// Highlight search matches in the computed list of properties
for (let computed of textProp.computed) {
if (computed.element) {
// Get the actual property value displayed in the computed list
let computedValue = computed.parsedValue.toLowerCase();
let computedName = computed.name.toLowerCase();
isComputedHighlighted = this._highlightMatches(computed.element, {
searchName: name,
searchValue: value,
propertyName: computedName,
propertyValue: computedValue,
propertyMatch: propertyMatch
}) ? true : isComputedHighlighted;
}
}
// Highlight search matches in the stylesheet source
let isStyleSheetHighlighted = styleSheetSource.includes(aValue);
if (isStyleSheetHighlighted) {
source.classList.add("ruleview-highlight");
}
if (isPropertyHighlighted || isComputedHighlighted ||
isStyleSheetHighlighted) {
for (let textProp of rule.textProps) {
if (this._highlightProperty(textProp.editor)) {
isHighlighted = true;
}
// Expand the computed list if a computed rule is highlighted and the
// property rule is not highlighted
if (!isPropertyHighlighted && isComputedHighlighted &&
!editor.computed.hasAttribute("user-open")) {
editor.expandForFilter();
this._editorsExpandedForFilter.push(editor);
}
}
return isHighlighted;
},
/**
* Highlights the rule selector that matches the filter search value and
* returns a boolean indicating whether or not the selector was highlighted.
*
* @param {Rule} rule
* The Rule object.
* @return {bool} true if the rule selector was highlighted, false otherwise.
*/
_highlightRuleSelector: function(rule) {
let isSelectorHighlighted = false;
let selectorNodes = [...rule.editor.selectorText.childNodes];
if (rule.domRule.type === Ci.nsIDOMCSSRule.KEYFRAME_RULE) {
selectorNodes = [rule.editor.selectorText];
} else if (rule.domRule.type === ELEMENT_STYLE) {
selectorNodes = [];
}
// Highlight search matches in the rule selectors
for (let selectorNode of selectorNodes) {
if (selectorNode.textContent.toLowerCase().includes(this.searchValue)) {
selectorNode.classList.add("ruleview-highlight");
isSelectorHighlighted = true;
}
}
return isSelectorHighlighted;
},
/**
* Highlights the stylesheet source that matches the filter search value and
* returns a boolean indicating whether or not the stylesheet source was
* highlighted.
*
* @return {bool} true if the stylesheet source was highlighted, false
* otherwise.
*/
_highlightStyleSheet: function(rule) {
let styleSheetSource = rule.title.toLowerCase();
let isStyleSheetHighlighted = styleSheetSource.includes(this.searchValue);
if (isStyleSheetHighlighted) {
rule.editor.source.classList.add("ruleview-highlight");
}
return isStyleSheetHighlighted;
},
/**
* Highlights the rule properties and computed properties that match the
* filter search value and returns a boolean indicating whether or not the
* property or computed property was highlighted.
*
* @param {TextPropertyEditor} editor
* The rule property TextPropertyEditor object.
* @return {bool} true if the property or computed property was highlighted,
* false otherwise.
*/
_highlightProperty: function(editor) {
let isPropertyHighlighted = this._highlightRuleProperty(editor);
let isComputedHighlighted = this._highlightComputedProperty(editor);
// Expand the computed list if a computed property is highlighted and the
// property rule is not highlighted
if (!isPropertyHighlighted && isComputedHighlighted &&
!editor.computed.hasAttribute("user-open")) {
editor.expandForFilter();
}
return isPropertyHighlighted || isComputedHighlighted;
},
/**
* Called when TextPropertyEditor is updated and updates the rule property
* highlight.
*
* @param {TextPropertyEditor} editor
* The rule property TextPropertyEditor object.
*/
_updatePropertyHighlight: function(editor) {
if (!this.searchValue) {
return;
}
this._clearHighlight(editor.element);
if (this._highlightProperty(editor)) {
this.searchField.classList.remove("devtools-style-searchbox-no-match");
}
},
/**
* Highlights the rule property that matches the filter search value
* and returns a boolean indicating whether or not the property was
* highlighted.
*
* @param {TextPropertyEditor} editor
* The rule property TextPropertyEditor object.
* @return {bool} true if the rule property was highlighted, false otherwise.
*/
_highlightRuleProperty: function(editor) {
// Get the actual property value displayed in the rule view
let propertyName = editor.prop.name.toLowerCase();
let propertyValue = editor.valueSpan.textContent.toLowerCase();
let isPropertyHighlighted = this._highlightMatches(editor.container, {
searchName: this.searchPropertyName,
searchValue: this.searchPropertyValue,
propertyName: propertyName,
propertyValue: propertyValue,
propertyMatch: this.searchPropertyMatch
});
return isPropertyHighlighted;
},
/**
* Highlights the computed property that matches the filter search value and
* returns a boolean indicating whether or not the computed property was
* highlighted.
*
* @param {TextPropertyEditor} editor
* The rule property TextPropertyEditor object.
* @return {bool} true if the computed property was highlighted, false
* otherwise.
*/
_highlightComputedProperty: function(editor) {
let isComputedHighlighted = false;
// Highlight search matches in the computed list of properties
for (let computed of editor.prop.computed) {
if (computed.element) {
// Get the actual property value displayed in the computed list
let computedName = computed.name.toLowerCase();
let computedValue = computed.parsedValue.toLowerCase();
isComputedHighlighted = this._highlightMatches(computed.element, {
searchName: this.searchPropertyName,
searchValue: this.searchPropertyValue,
propertyName: computedName,
propertyValue: computedValue,
propertyMatch: this.searchPropertyMatch
}) ? true : isComputedHighlighted;
}
}
return isComputedHighlighted;
},
/**
* Helper function for highlightRules that carries out highlighting the given
* element if the provided search terms match the property, and returns
* a boolean indicating whether or not the search terms match.
*
* @param {DOMNode} aElement
* The node to highlight if search terms match
* @param {String} searchName
* The parsed search name
* @param {String} searchValue
* The parsed search value
* @param {String} propertyName
* The property name of a rule
* @param {String} propertyValue
* The property value of a rule
* @param {Boolean} propertyMatch
* Whether or not the search term matches a property line like
* `font-family: arial`
* @param {DOMNode} element
* The node to highlight if search terms match
* @param {String} searchName
* The parsed search name
* @param {String} searchValue
* The parsed search value
* @param {String} propertyName
* The property name of a rule
* @param {String} propertyValue
* The property value of a rule
* @param {Boolean} propertyMatch
* Whether or not the search term matches a property line like
* `font-family: arial`
* @return {bool} true if the given search terms match the property, false
* otherwise.
*/
_highlightMatches: function(aElement, { searchName, searchValue, propertyName,
_highlightMatches: function(element, { searchName, searchValue, propertyName,
propertyValue, propertyMatch }) {
let matches = false;
@ -2162,7 +2250,7 @@ CssRuleView.prototype = {
}
if (matches) {
aElement.classList.add("ruleview-highlight");
element.classList.add("ruleview-highlight");
}
return matches;
@ -2172,16 +2260,15 @@ CssRuleView.prototype = {
* Clear all search filter highlights in the panel, and close the computed
* list if toggled opened
*/
_clearHighlights: function() {
for (let element of this.element.querySelectorAll(".ruleview-highlight")) {
element.classList.remove("ruleview-highlight");
_clearHighlight: function(element) {
for (let el of element.querySelectorAll(".ruleview-highlight")) {
el.classList.remove("ruleview-highlight");
}
for (let editor of this._editorsExpandedForFilter) {
editor.collapseForFilter();
for (let computed of element.querySelectorAll(
".ruleview-computedlist[filter-open]")) {
computed.parentNode._textPropertyEditor.collapseForFilter();
}
this._editorsExpandedForFilter = [];
},
/**
@ -2719,8 +2806,9 @@ RuleEditor.prototype = {
*/
function TextPropertyEditor(aRuleEditor, aProperty) {
this.ruleEditor = aRuleEditor;
this.ruleView = this.ruleEditor.ruleView;
this.doc = this.ruleEditor.doc;
this.popup = this.ruleEditor.ruleView.popup;
this.popup = this.ruleView.popup;
this.prop = aProperty;
this.prop.editor = this;
this.browserWindow = this.doc.defaultView.top;
@ -2744,7 +2832,7 @@ TextPropertyEditor.prototype = {
*/
get editing() {
return !!(this.nameSpan.inplaceEditor || this.valueSpan.inplaceEditor ||
this.ruleEditor.ruleView.tooltips.isEditing) || this.popup.isOpen;
this.ruleView.tooltips.isEditing) || this.popup.isOpen;
},
/**
@ -2753,6 +2841,7 @@ TextPropertyEditor.prototype = {
_create: function() {
this.element = this.doc.createElementNS(HTML_NS, "li");
this.element.classList.add("ruleview-property");
this.element._textPropertyEditor = this;
this.container = createChild(this.element, "div", {
class: "ruleview-propertycontainer"
@ -2807,7 +2896,7 @@ TextPropertyEditor.prototype = {
// so that colors can be coerced into the default color type. This prevents
// us from thinking that when colors are coerced they have been changed by
// the user.
let outputParser = this.ruleEditor.ruleView._outputParser;
let outputParser = this.ruleView._outputParser;
let frag = outputParser.parseCssProperty(this.prop.name, this.prop.value);
let parsedValue = frag.textContent;
@ -2935,7 +3024,7 @@ TextPropertyEditor.prototype = {
* Populate the span based on changes to the TextProperty.
*/
update: function() {
if (this.ruleEditor.ruleView.isDestroyed) {
if (this.ruleView.isDestroyed) {
return;
}
@ -2980,7 +3069,7 @@ TextPropertyEditor.prototype = {
const bezierSwatchClass = "ruleview-bezierswatch";
const filterSwatchClass = "ruleview-filterswatch";
let outputParser = this.ruleEditor.ruleView._outputParser;
let outputParser = this.ruleView._outputParser;
let parserOptions = {
colorSwatchClass: sharedSwatchClass + colorSwatchClass,
colorClass: "ruleview-color",
@ -3003,7 +3092,7 @@ TextPropertyEditor.prototype = {
for (let span of this._colorSwatchSpans) {
// Adding this swatch to the list of swatches our colorpicker
// knows about
this.ruleEditor.ruleView.tooltips.colorPicker.addSwatch(span, {
this.ruleView.tooltips.colorPicker.addSwatch(span, {
onPreview: () => this._previewValue(this.valueSpan.textContent),
onCommit: () => this._onValueDone(this.valueSpan.textContent, true),
onRevert: () => this._onValueDone(undefined, false)
@ -3018,7 +3107,7 @@ TextPropertyEditor.prototype = {
for (let span of this._bezierSwatchSpans) {
// Adding this swatch to the list of swatches our colorpicker
// knows about
this.ruleEditor.ruleView.tooltips.cubicBezier.addSwatch(span, {
this.ruleView.tooltips.cubicBezier.addSwatch(span, {
onPreview: () => this._previewValue(this.valueSpan.textContent),
onCommit: () => this._onValueDone(this.valueSpan.textContent, true),
onRevert: () => this._onValueDone(undefined, false)
@ -3032,7 +3121,7 @@ TextPropertyEditor.prototype = {
if (span) {
parserOptions.filterSwatch = true;
this.ruleEditor.ruleView.tooltips.filterEditor.addSwatch(span, {
this.ruleView.tooltips.filterEditor.addSwatch(span, {
onPreview: () => this._previewValue(this.valueSpan.textContent),
onCommit: () => this._onValueDone(this.valueSpan.textContent, true),
onRevert: () => this._onValueDone(undefined, false)
@ -3042,6 +3131,9 @@ TextPropertyEditor.prototype = {
// Populate the computed styles.
this._updateComputed();
// Update the rule property highlight.
this.ruleView._updatePropertyHighlight(this);
},
_onStartEditing: function() {
@ -3082,7 +3174,7 @@ TextPropertyEditor.prototype = {
});
appendText(li, ": ");
let outputParser = this.ruleEditor.ruleView._outputParser;
let outputParser = this.ruleView._outputParser;
let frag = outputParser.parseCssProperty(
computed.name, computed.value, {
colorSwatchClass: "ruleview-swatch ruleview-colorswatch",
@ -3211,7 +3303,7 @@ TextPropertyEditor.prototype = {
remove: function() {
if (this._colorSwatchSpans && this._colorSwatchSpans.length) {
for (let span of this._colorSwatchSpans) {
this.ruleEditor.ruleView.tooltips.colorPicker.removeSwatch(span);
this.ruleView.tooltips.colorPicker.removeSwatch(span);
}
}

View File

@ -131,6 +131,7 @@ skip-if = e10s # Bug 1090340
[browser_ruleview_search-filter-computed-list_05.js]
[browser_ruleview_search-filter-computed-list_06.js]
[browser_ruleview_search-filter-computed-list_07.js]
[browser_ruleview_search-filter-computed-list_08.js]
[browser_ruleview_search-filter-computed-list_clear.js]
[browser_ruleview_search-filter-computed-list_expander.js]
[browser_ruleview_search-filter_01.js]
@ -146,6 +147,9 @@ skip-if = e10s # Bug 1090340
[browser_ruleview_search-filter_11.js]
[browser_ruleview_search-filter_12.js]
[browser_ruleview_search-filter_13.js]
[browser_ruleview_search-filter_14.js]
[browser_ruleview_search-filter_15.js]
[browser_ruleview_search-filter_16.js]
[browser_ruleview_search-filter_clear.js]
[browser_ruleview_search-filter_context-menu.js]
[browser_ruleview_search-filter_escape-keypress.js]

View File

@ -0,0 +1,69 @@
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
// Tests that the rule view search filter works properly in the computed list
// for newly modified property values.
const SEARCH = "0px";
let TEST_URI = [
"<style type='text/css'>",
" #testid {",
" margin: 4px;",
" top: 0px;",
" }",
"</style>",
"<h1 id='testid'>Styled Node</h1>"
].join("\n");
add_task(function*() {
yield addTab("data:text/html;charset=utf-8," + encodeURIComponent(TEST_URI));
let {inspector, view} = yield openRuleView();
yield selectNode("#testid", inspector);
yield testModifyPropertyValueFilter(inspector, view);
});
function* testModifyPropertyValueFilter(inspector, view) {
info("Setting filter text to \"" + SEARCH + "\"");
let searchField = view.searchField;
let onRuleViewFiltered = inspector.once("ruleview-filtered");
searchField.focus();
synthesizeKeys(SEARCH, view.styleWindow);
yield onRuleViewFiltered;
let rule = getRuleViewRuleEditor(view, 1).rule;
let propEditor = rule.textProps[0].editor;
let computed = propEditor.computed;
let editor = yield focusEditableField(view, propEditor.valueSpan);
info("Check that the correct rules are visible");
is(rule.selectorText, "#testid", "Second rule is #testid.");
ok(!propEditor.container.classList.contains("ruleview-highlight"),
"margin text property is not highlighted.");
ok(rule.textProps[1].editor.container.classList.contains("ruleview-highlight"),
"top text property is correctly highlighted.");
let onBlur = once(editor.input, "blur");
let onModification = rule._applyingModifications;
EventUtils.sendString("4px 0px", view.styleWindow);
EventUtils.synthesizeKey("VK_RETURN", {});
yield onBlur;
yield onModification;
ok(propEditor.container.classList.contains("ruleview-highlight"),
"margin text property is correctly highlighted.");
ok(!computed.hasAttribute("filter-open"), "margin computed list is closed.");
ok(!computed.children[0].classList.contains("ruleview-highlight"),
"margin-top computed property is not highlighted.");
ok(computed.children[1].classList.contains("ruleview-highlight"),
"margin-right computed property is correctly highlighted.");
ok(!computed.children[2].classList.contains("ruleview-highlight"),
"margin-bottom computed property is not highlighted.");
ok(computed.children[3].classList.contains("ruleview-highlight"),
"margin-left computed property is correctly highlighted.");
}

View File

@ -0,0 +1,62 @@
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
// Tests that the rule view search filter works properly for newly modified
// property value.
const SEARCH = "100%";
let TEST_URI = [
"<style type='text/css'>",
" #testid {",
" width: 100%;",
" height: 50%;",
" }",
"</style>",
"<h1 id='testid'>Styled Node</h1>"
].join("\n");
add_task(function*() {
yield addTab("data:text/html;charset=utf-8," + encodeURIComponent(TEST_URI));
let {inspector, view} = yield openRuleView();
yield selectNode("#testid", inspector);
yield testModifyPropertyValueFilter(inspector, view);
});
function* testModifyPropertyValueFilter(inspector, view) {
info("Setting filter text to \"" + SEARCH + "\"");
let searchField = view.searchField;
let onvRuleViewFiltered = inspector.once("ruleview-filtered");
searchField.focus();
synthesizeKeys(SEARCH, view.styleWindow);
yield onvRuleViewFiltered;
let ruleEditor = getRuleViewRuleEditor(view, 1);
let rule = ruleEditor.rule;
let propEditor = rule.textProps[1].editor;
let editor = yield focusEditableField(view, propEditor.valueSpan);
info("Check that the correct rules are visible");
is(view.element.children.length, 2, "Should have 2 rules.");
is(rule.selectorText, "#testid", "Second rule is #testid.");
ok(rule.textProps[0].editor.container.classList.contains("ruleview-highlight"),
"width text property is correctly highlighted.");
ok(!propEditor.container.classList.contains("ruleview-highlight"),
"height text property is not highlighted.");
let onBlur = once(editor.input, "blur");
let onModification = rule._applyingModifications;
EventUtils.sendString("100%", view.styleWindow);
EventUtils.synthesizeKey("VK_RETURN", {});
yield onBlur;
yield onModification;
ok(propEditor.container.classList.contains("ruleview-highlight"),
"height text property is correctly highlighted.");
}

View File

@ -0,0 +1,62 @@
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
// Tests that the rule view search filter works properly for newly modified
// property name.
const SEARCH = "e";
let TEST_URI = [
"<style type='text/css'>",
" #testid {",
" width: 100%;",
" height: 50%;",
" }",
"</style>",
"<h1 id='testid'>Styled Node</h1>"
].join("\n");
add_task(function*() {
yield addTab("data:text/html;charset=utf-8," + encodeURIComponent(TEST_URI));
let {inspector, view} = yield openRuleView();
yield selectNode("#testid", inspector);
yield testModifyPropertyNameFilter(inspector, view);
});
function* testModifyPropertyNameFilter(inspector, view) {
info("Setting filter text to \"" + SEARCH + "\"");
let searchField = view.searchField;
let onRuleViewFiltered = inspector.once("ruleview-filtered");
searchField.focus();
synthesizeKeys(SEARCH, view.styleWindow);
yield onRuleViewFiltered;
let ruleEditor = getRuleViewRuleEditor(view, 1);
let rule = ruleEditor.rule;
let propEditor = rule.textProps[0].editor;
let editor = yield focusEditableField(view, propEditor.nameSpan);
info("Check that the correct rules are visible");
is(view.element.children.length, 2, "Should have 2 rules.");
is(rule.selectorText, "#testid", "Second rule is #testid.");
ok(!propEditor.container.classList.contains("ruleview-highlight"),
"width text property is not highlighted.");
ok(rule.textProps[1].editor.container.classList.contains("ruleview-highlight"),
"height text property is correctly highlighted.");
let onBlur = once(editor.input, "blur");
let onModification = rule._applyingModifications;
EventUtils.sendString("margin-left", view.styleWindow);
EventUtils.synthesizeKey("VK_RETURN", {});
yield onBlur;
yield onModification;
ok(propEditor.container.classList.contains("ruleview-highlight"),
"margin-left text property is correctly highlighted.");
}

View File

@ -0,0 +1,78 @@
/* vim: set ft=javascript ts=2 et sw=2 tw=80: */
/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
// Tests that the rule view search filter works properly for newly added
// property.
const SEARCH = "100%";
let TEST_URI = [
"<style type='text/css'>",
" #testid {",
" width: 100%;",
" height: 50%;",
" }",
"</style>",
"<h1 id='testid'>Styled Node</h1>"
].join("\n");
add_task(function*() {
yield addTab("data:text/html;charset=utf-8," + encodeURIComponent(TEST_URI));
let {inspector, view} = yield openRuleView();
yield selectNode("#testid", inspector);
yield testNewPropertyFilter(inspector, view);
});
function* testNewPropertyFilter(inspector, view) {
info("Setting filter text to \"" + SEARCH + "\"");
let searchField = view.searchField;
let onRuleViewFiltered = inspector.once("ruleview-filtered");
searchField.focus();
synthesizeKeys(SEARCH, view.styleWindow);
yield onRuleViewFiltered;
let ruleEditor = getRuleViewRuleEditor(view, 1);
let rule = ruleEditor.rule;
let editor = yield focusEditableField(view, ruleEditor.closeBrace);
info("Check that the correct rules are visible");
is(view.element.children.length, 2, "Should have 2 rules.");
is(rule.selectorText, "#testid", "Second rule is #testid.");
ok(rule.textProps[0].editor.container.classList.contains("ruleview-highlight"),
"width text property is correctly highlighted.");
ok(!rule.textProps[1].editor.container.classList.contains("ruleview-highlight"),
"height text property is not highlighted.");
info("Test creating a new property");
info("Entering margin-left in the property name editor");
editor.input.value = "margin-left";
info("Pressing return to commit and focus the new value field");
let onValueFocus = once(ruleEditor.element, "focus", true);
let onModifications = ruleEditor.rule._applyingModifications;
EventUtils.synthesizeKey("VK_RETURN", {}, view.styleWindow);
yield onValueFocus;
yield onModifications;
// Getting the new value editor after focus
editor = inplaceEditor(view.styleDocument.activeElement);
let propEditor = ruleEditor.rule.textProps[2].editor;
info("Entering a value and bluring the field to expect a rule change");
editor.input.value = "100%";
let onBlur = once(editor.input, "blur");
onModifications = ruleEditor.rule._applyingModifications;
editor.input.blur();
yield onBlur;
yield onModifications;
ok(propEditor.container.classList.contains("ruleview-highlight"),
"margin-left text property is correctly highlighted.");
}

View File

@ -34,40 +34,13 @@ newtab.suggested.explain=This site is suggested to you by Mozilla. You can remov
# the gear icon used to customize the new tab window. %2$S will be replaced by
# an active link using string newtab.learn.link as text.
newtab.enhanced.explain=A Mozilla partner has visually enhanced this tile, replacing the screenshot. You can turn off enhanced tiles by clicking the %1$S button for your preferences. %2$S
# LOCALIZATION NOTE(newtab.intro.paragraph2): %1$S will be replaced inline by
# active link using string newtab.privacy.link as text.
newtab.intro.paragraph2=In order to provide this service, Mozilla collects and uses certain analytics information relating to your use of the tiles in accordance with our %1$S.
# LOCALIZATION NOTE(newtab.intro.paragraph4.2): %1$S will be replaced inline by
# the gear icon used to customize the new tab window. %2$S will be replaced by
# newtab.intro.controls as text. The quoted strings should be the same as
# newtab.customize.cog.enhanced.
newtab.intro.paragraph4.2=You can turn off this service by clicking the gear (%1$S) button and unchecking "Include suggested sites" in the %2$S menu.
newtab.intro.paragraph5=New Tab will show the sites you visit most frequently, along with sites we think might be of interest to you. To get started, you'll see several sites from Mozilla.
# LOCALIZATION NOTE(newtab.intro.paragraph6): %1$S will be replaced by
# newtab.intro.paragraph6.remove as bold text. %2$S will be replaced by
# newtab.intro.paragraph6.pin as bold text
newtab.intro.paragraph6=You can %1$S or %2$S any site by using the controls available on rollover.
newtab.intro.paragraph6.remove=remove
newtab.intro.paragraph6.pin=pin
newtab.intro.paragraph7=Some of the sites you will see may be suggested by Mozilla and may be sponsored by a Mozilla partner. We'll always indicate which sites are sponsored.
# LOCALIZATION NOTE(newtab.intro.paragraph8): %1$S will be replaced by
# brandShortName as text. %2$S will be replaced inline by an active link using
# string newtab.learn.link as text.
newtab.intro.paragraph8=%1$S will only show sites that most closely match your interests on the Web. %2$S
newtab.intro.paragraph9=Now when you open New Tab, you'll also see sites we think might be interesting to you.
# LOCALIZATION NOTE(newtab.intro.controls): the controls in the gear icon
# menu for customizing the new tab window. Used in newtab.intro.paragraph4
newtab.intro.controls=New Tab Controls
newtab.intro1.paragraph1=Now when you open New Tab, you'll also see sites we think might be interesting to you. Some may be suggested by Mozilla or sponsored by one of our partners.
# LOCALIZATION NOTE(newtab.intro1.paragraph2): %1$S will be replaced inline by
# an active link using string newtab.privacy.link as text. %2$S will be replaced
# inline by the gear icon used to customize the new tab window.
newtab.intro1.paragraph2=In order to provide this service, some data is automatically sent back to us in accordance with our %1$S. You can turn this off by unchecking the option under the gear icon (%2$S).
newtab.learn.link=Learn more…
newtab.privacy.link=Privacy Notice
newtab.learn.link2=More about New Tab
newtab.privacy.link2=About your privacy
# LOCALIZATION NOTE(newtab.intro.header.welcome): %1$S will be replaced by
# brandShortName as bold text.
newtab.intro.header.welcome=Welcome to New Tab on %1$S!
newtab.intro.header.update=New Tab got an update!
newtab.intro.skip=Skip this
newtab.intro.continue=Continue tour
newtab.intro.back=Back
newtab.intro.next=Next
newtab.intro.gotit=Got it!

View File

@ -52,8 +52,6 @@ that require it. -->
<!ENTITY itemHistoryAndDownloads.accesskey "B">
<!ENTITY itemFormSearchHistory.label "Form &amp; Search History">
<!ENTITY itemFormSearchHistory.accesskey "F">
<!ENTITY itemPasswords.label "Saved Passwords">
<!ENTITY itemPasswords.accesskey "P">
<!ENTITY itemCookies.label "Cookies">
<!ENTITY itemCookies.accesskey "C">
<!ENTITY itemCache.label "Cache">

View File

@ -963,6 +963,10 @@ toolbarbutton[constrain-size="true"][cui-areatype="toolbar"] > .toolbarbutton-ba
color: GrayText;
}
#PopupAutoCompleteRichResult > richlistbox {
transition: height 100ms;
}
#search-container {
min-width: calc(54px + 11ch);
}

View File

@ -1827,6 +1827,10 @@ toolbarbutton[constrain-size="true"][cui-areatype="toolbar"] > .toolbarbutton-ba
color: GrayText;
}
#PopupAutoCompleteRichResult > richlistbox {
transition: height 100ms;
}
#PopupAutoCompleteRichResult {
margin-top: 2px;
}

View File

@ -515,6 +515,17 @@ toolbarpaletteitem[place="palette"] > toolbaritem > toolbarbutton {
display: none;
}
#PanelUI-footer-fxa[fxastatus="error"] > #PanelUI-fxa-status::after {
content: url(chrome://browser/skin/warning.svg);
filter: drop-shadow(0 1px 0 hsla(206, 50%, 10%, .15));
width: 47px;
padding-top: 1px;
display: block;
text-align: center;
position: relative;
top: 25%;
}
#PanelUI-fxa-status {
display: flex;
flex: 1 1 0%;
@ -755,16 +766,16 @@ toolbarpaletteitem[place="palette"] > toolbaritem > toolbarbutton {
}
#PanelUI-footer-fxa[fxastatus="error"] {
background-color: rgb(255, 236, 158);
border-top: 1px solid rgb(254, 212, 21);
background-color: hsla(42, 94%, 88%, 1.0);
border-top: 1px solid hsla(42, 94%, 70%, 1.0);
}
#PanelUI-footer-fxa[fxastatus="error"] > #PanelUI-fxa-status:hover {
background-color: #F9E79A;
background-color: hsla(42, 94%, 85%, 1.0);
}
#PanelUI-footer-fxa[fxastatus="error"] > #PanelUI-fxa-status:hover:active {
background-color: #ECDB92;
background-color: hsla(42, 94%, 82%, 1.0);
box-shadow: 0 1px 0 hsla(210,4%,10%,.05) inset;
}

View File

@ -404,3 +404,9 @@ description > html|a {
#tosPP-small-ToS {
margin-bottom: 1em;
}
#fxaLoginRejectedWarning {
list-style-image: url(chrome://browser/skin/warning.svg);
filter: drop-shadow(0 1px 0 hsla(206, 50%, 10%, .15));
margin: 4px 8px 0px 0px;
}

View File

@ -87,9 +87,7 @@
}
/* CELLS */
.newtab-cell,
.newtab-intro-cell,
.newtab-intro-cell-hover {
.newtab-cell {
background-color: rgba(255,255,255,.2);
border-radius: 8px;
}
@ -109,19 +107,12 @@
.newtab-cell:not([ignorehover]) .newtab-control:hover ~ .newtab-link,
.newtab-cell:not([ignorehover]) .newtab-link:hover,
.newtab-site[dragged],
.newtab-intro-cell-hover .newtab-link {
.newtab-site[dragged] {
border: 2px solid white;
box-shadow: 0 0 6px 2px #4cb1ff;
margin: -2px;
}
.newtab-intro-cell .newtab-thumbnail,
.newtab-intro-cell-hover .newtab-thumbnail {
background-color: #cae1f4;
background-image: url("chrome://browser/skin/newtab/whimsycorn.png");
}
.newtab-site[dragged] {
transition-property: box-shadow, background-color;
background-color: rgb(242,242,242);

View File

@ -1403,6 +1403,10 @@ html|*.urlbar-input:-moz-lwtheme::-moz-placeholder,
color: GrayText;
}
#PopupAutoCompleteRichResult > richlistbox {
transition: height 100ms;
}
#search-container {
min-width: calc(54px + 11ch);
}

View File

@ -2929,26 +2929,17 @@ nsDocShell::HistoryTransactionRemoved(int32_t aIndex)
return NS_OK;
}
mozilla::LinkedList<nsDocShell::ObservedDocShell>* nsDocShell::gObservedDocShells = nullptr;
NS_IMETHODIMP
nsDocShell::SetRecordProfileTimelineMarkers(bool aValue)
{
bool currentValue = nsIDocShell::GetRecordProfileTimelineMarkers();
if (currentValue != aValue) {
if (aValue) {
TimelineConsumers::AddConsumer();
TimelineConsumers::AddConsumer(this, mObserved);
UseEntryScriptProfiling();
MOZ_ASSERT(!mObserved);
mObserved.reset(new ObservedDocShell(this));
GetOrCreateObservedDocShells().insertFront(mObserved.get());
} else {
TimelineConsumers::RemoveConsumer();
TimelineConsumers::RemoveConsumer(this, mObserved);
UnuseEntryScriptProfiling();
mObserved.reset(nullptr);
ClearProfileTimelineMarkers();
}
}

View File

@ -35,6 +35,7 @@
#include "nsContentUtils.h"
#include "timeline/TimelineMarker.h"
#include "timeline/TimelineConsumers.h"
#include "timeline/ObservedDocShell.h"
// Threshold value in ms for META refresh based redirects
#define REFRESH_REDIRECT_TIMER 15000
@ -263,43 +264,12 @@ public:
void AddProfileTimelineMarker(const char* aName, TracingMetadata aMetaData);
void AddProfileTimelineMarker(mozilla::UniquePtr<TimelineMarker>&& aMarker);
class ObservedDocShell : public mozilla::LinkedListElement<ObservedDocShell>
{
public:
explicit ObservedDocShell(nsDocShell* aDocShell)
: mDocShell(aDocShell)
{ }
nsDocShell* operator*() const { return mDocShell.get(); }
private:
nsRefPtr<nsDocShell> mDocShell;
};
private:
static mozilla::LinkedList<ObservedDocShell>* gObservedDocShells;
static mozilla::LinkedList<ObservedDocShell>& GetOrCreateObservedDocShells()
{
if (!gObservedDocShells) {
gObservedDocShells = new mozilla::LinkedList<ObservedDocShell>();
}
return *gObservedDocShells;
}
// Never null if timeline markers are being observed.
mozilla::UniquePtr<ObservedDocShell> mObserved;
// Return true if timeline markers are being observed for this docshell. False
// otherwise.
// An observed docshell wrapper is created when recording markers is enabled.
mozilla::UniquePtr<mozilla::ObservedDocShell> mObserved;
bool IsObserved() const { return !!mObserved; }
public:
static const mozilla::LinkedList<ObservedDocShell>& GetObservedDocShells()
{
return GetOrCreateObservedDocShells();
}
// Tell the favicon service that aNewURI has the same favicon as aOldURI.
static void CopyFavicon(nsIURI* aOldURI,
nsIURI* aNewURI,

View File

@ -12,28 +12,11 @@
namespace mozilla {
void
AutoGlobalTimelineMarker::PopulateDocShells()
{
const LinkedList<nsDocShell::ObservedDocShell>& docShells =
nsDocShell::GetObservedDocShells();
MOZ_ASSERT(!docShells.isEmpty());
for (const nsDocShell::ObservedDocShell* ds = docShells.getFirst();
ds;
ds = ds->getNext()) {
mOk = mDocShells.append(**ds);
if (!mOk) {
return;
}
}
}
AutoGlobalTimelineMarker::AutoGlobalTimelineMarker(const char* aName
MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
: mOk(true)
: mName(aName)
, mDocShells()
, mName(aName)
, mDocShellsRetrieved(false)
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
MOZ_ASSERT(NS_IsMainThread());
@ -42,8 +25,8 @@ AutoGlobalTimelineMarker::AutoGlobalTimelineMarker(const char* aName
return;
}
PopulateDocShells();
if (!mOk) {
mDocShellsRetrieved = TimelineConsumers::GetKnownDocShells(mDocShells);
if (!mDocShellsRetrieved) {
// If we don't successfully populate our vector with *all* docshells being
// observed, don't add markers to *any* of them.
return;
@ -58,7 +41,7 @@ AutoGlobalTimelineMarker::AutoGlobalTimelineMarker(const char* aName
AutoGlobalTimelineMarker::~AutoGlobalTimelineMarker()
{
if (!mOk) {
if (!mDocShellsRetrieved) {
return;
}

View File

@ -34,21 +34,18 @@ class MOZ_STACK_CLASS AutoGlobalTimelineMarker
{
MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER;
// True as long as no operation has failed, eg due to OOM.
bool mOk;
// The set of docshells that are being observed and will get markers.
mozilla::Vector<nsRefPtr<nsDocShell>> mDocShells;
// The name of the marker we are adding.
const char* mName;
void PopulateDocShells();
// The set of docshells that will get the marker.
Vector<nsRefPtr<nsDocShell>> mDocShells;
// True as long as no operation has failed, eg due to OOM.
bool mDocShellsRetrieved;
public:
explicit AutoGlobalTimelineMarker(const char* aName
MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
~AutoGlobalTimelineMarker();
AutoGlobalTimelineMarker(const AutoGlobalTimelineMarker& aOther) = delete;

View File

@ -11,26 +11,23 @@
namespace mozilla {
bool
AutoTimelineMarker::DocShellIsRecording(nsDocShell& aDocShell)
{
bool isRecording = false;
if (!TimelineConsumers::IsEmpty()) {
aDocShell.GetRecordProfileTimelineMarkers(&isRecording);
}
return isRecording;
}
AutoTimelineMarker::AutoTimelineMarker(nsIDocShell* aDocShell, const char* aName
MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
: mDocShell(nullptr)
, mName(aName)
: mName(aName)
, mDocShell(nullptr)
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
MOZ_ASSERT(NS_IsMainThread());
if (TimelineConsumers::IsEmpty()) {
return;
}
bool isRecordingEnabledForDocShell = false;
nsDocShell* docShell = static_cast<nsDocShell*>(aDocShell);
if (docShell && DocShellIsRecording(*docShell)) {
aDocShell->GetRecordProfileTimelineMarkers(&isRecordingEnabledForDocShell);
if (isRecordingEnabledForDocShell) {
mDocShell = docShell;
mDocShell->AddProfileTimelineMarker(mName, TRACING_INTERVAL_START);
}

View File

@ -32,10 +32,11 @@ class MOZ_STACK_CLASS AutoTimelineMarker
{
MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER;
nsRefPtr<nsDocShell> mDocShell;
// The name of the marker we are adding.
const char* mName;
bool DocShellIsRecording(nsDocShell& aDocShell);
// The docshell that is associated with this marker.
nsRefPtr<nsDocShell> mDocShell;
public:
explicit AutoTimelineMarker(nsIDocShell* aDocShell, const char* aName

View File

@ -0,0 +1,15 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "ObservedDocShell.h"
namespace mozilla {
ObservedDocShell::ObservedDocShell(nsDocShell* aDocShell)
: mDocShell(aDocShell)
{}
} // namespace mozilla

View File

@ -0,0 +1,32 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef ObservedDocShell_h_
#define ObservedDocShell_h_
#include "nsRefPtr.h"
class nsDocShell;
namespace mozilla {
// # ObservedDocShell
//
// A wrapper around a docshell for which docshell-specific markers are
// allowed to exist. See TimelineConsumers for register/unregister logic.
class ObservedDocShell : public LinkedListElement<ObservedDocShell>
{
private:
nsRefPtr<nsDocShell> mDocShell;
public:
explicit ObservedDocShell(nsDocShell* aDocShell);
nsDocShell* operator*() const { return mDocShell.get(); }
};
} // namespace mozilla
#endif /* ObservedDocShell_h_ */

View File

@ -9,17 +9,35 @@
namespace mozilla {
unsigned long TimelineConsumers::sActiveConsumers = 0;
LinkedList<ObservedDocShell>* TimelineConsumers::sObservedDocShells = nullptr;
void
TimelineConsumers::AddConsumer()
LinkedList<ObservedDocShell>&
TimelineConsumers::GetOrCreateObservedDocShellsList()
{
sActiveConsumers++;
if (!sObservedDocShells) {
sObservedDocShells = new LinkedList<ObservedDocShell>();
}
return *sObservedDocShells;
}
void
TimelineConsumers::RemoveConsumer()
TimelineConsumers::AddConsumer(nsDocShell* aDocShell,
UniquePtr<ObservedDocShell>& aObservedPtr)
{
MOZ_ASSERT(!aObservedPtr);
sActiveConsumers++;
aObservedPtr.reset(new ObservedDocShell(aDocShell));
GetOrCreateObservedDocShellsList().insertFront(aObservedPtr.get());
}
void
TimelineConsumers::RemoveConsumer(nsDocShell* aDocShell,
UniquePtr<ObservedDocShell>& aObservedPtr)
{
MOZ_ASSERT(aObservedPtr);
sActiveConsumers--;
aObservedPtr.get()->remove();
aObservedPtr.reset(nullptr);
}
bool
@ -28,4 +46,20 @@ TimelineConsumers::IsEmpty()
return sActiveConsumers == 0;
}
bool
TimelineConsumers::GetKnownDocShells(Vector<nsRefPtr<nsDocShell>>& aStore)
{
const LinkedList<ObservedDocShell>& docShells = GetOrCreateObservedDocShellsList();
for (const ObservedDocShell* rds = docShells.getFirst();
rds != nullptr;
rds = rds->getNext()) {
if (!aStore.append(**rds)) {
return false;
}
}
return true;
}
} // namespace mozilla

View File

@ -7,24 +7,29 @@
#ifndef mozilla_TimelineConsumers_h_
#define mozilla_TimelineConsumers_h_
#include "mozilla/LinkedList.h"
#include "mozilla/UniquePtr.h"
#include "timeline/ObservedDocShell.h"
class nsDocShell;
namespace mozilla {
// # TimelineConsumers
//
// A class to trace how many frontends are interested in markers. Whenever
// interest is expressed in markers, these fields will keep track of that.
class TimelineConsumers
{
private:
// Counter for how many timelines are currently interested in markers.
static unsigned long sActiveConsumers;
static LinkedList<ObservedDocShell>* sObservedDocShells;
static LinkedList<ObservedDocShell>& GetOrCreateObservedDocShellsList();
public:
static void AddConsumer();
static void RemoveConsumer();
static void AddConsumer(nsDocShell* aDocShell,
UniquePtr<ObservedDocShell>& aObservedPtr);
static void RemoveConsumer(nsDocShell* aDocShell,
UniquePtr<ObservedDocShell>& aObservedPtr);
static bool IsEmpty();
static bool GetKnownDocShells(Vector<nsRefPtr<nsDocShell>>& aStore);
};
} // namespace mozilla

View File

@ -13,6 +13,7 @@ EXPORTS.mozilla += [
UNIFIED_SOURCES += [
'AutoGlobalTimelineMarker.cpp',
'AutoTimelineMarker.cpp',
'ObservedDocShell.cpp',
'TimelineConsumers.cpp',
'TimelineMarker.cpp',
]

View File

@ -7025,7 +7025,7 @@ var IdentityHandler = {
// Loaded active mixed content. Yellow triangle icon is shown.
MIXED_MODE_CONTENT_LOADED: "mixed_content_loaded",
// The following tracking content modes are only used if "privacy.trackingprotection.enabled"
// The following tracking content modes are only used if tracking protection
// is enabled. Our Java frontend coalesces them into one indicator.
// No tracking content information. No tracking content icon is shown.
@ -7103,15 +7103,18 @@ var IdentityHandler = {
return this.MIXED_MODE_UNKNOWN;
},
getTrackingMode: function getTrackingMode(aState) {
getTrackingMode: function getTrackingMode(aState, aBrowser) {
if (aState & Ci.nsIWebProgressListener.STATE_BLOCKED_TRACKING_CONTENT) {
Telemetry.addData("TRACKING_PROTECTION_SHIELD", 2);
return this.TRACKING_MODE_CONTENT_BLOCKED;
}
// Only show an indicator for loaded tracking content if the pref to block it is enabled
if ((aState & Ci.nsIWebProgressListener.STATE_LOADED_TRACKING_CONTENT) &&
Services.prefs.getBoolPref("privacy.trackingprotection.enabled")) {
let tpEnabled = Services.prefs.getBoolPref("privacy.trackingprotection.enabled") ||
(Services.prefs.getBoolPref("privacy.trackingprotection.pbmode.enabled") &&
PrivateBrowsingUtils.isBrowserPrivate(aBrowser));
if ((aState & Ci.nsIWebProgressListener.STATE_LOADED_TRACKING_CONTENT) && tpEnabled) {
Telemetry.addData("TRACKING_PROTECTION_SHIELD", 1);
return this.TRACKING_MODE_CONTENT_LOADED;
}
@ -7148,7 +7151,7 @@ var IdentityHandler = {
let identityMode = this.getIdentityMode(aState);
let mixedMode = this.getMixedMode(aState);
let trackingMode = this.getTrackingMode(aState);
let trackingMode = this.getTrackingMode(aState, aBrowser);
let result = {
origin: locationObj.origin,
mode: {

View File

@ -575,6 +575,21 @@ function do_report_result(passed, text, stack, todo) {
}
}
/**
* Checks for a true condition, with a success message.
*/
function ok(condition, msg) {
do_report_result(condition, msg, Components.stack.caller, false);
}
/**
* Checks for a condition equality, with a success message.
*/
function is(left, right, msg) {
do_report_result(left === right, "[ " + left + " === " + right + " ] " + msg,
Components.stack.caller, false);
}
function _do_check_eq(left, right, stack, todo) {
if (!stack)
stack = Components.stack.caller;

View File

@ -9,10 +9,6 @@ const { interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/AndroidLog.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
const LOGIN_FIELDS = {
hostname: "http://example.org/tests/robocop/robocop_blank_01.html",
formSubmitUrl: "",

View File

@ -9,10 +9,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
// We use a global variable to track the <browser> where the tests are happening
let browser;

View File

@ -9,14 +9,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
function is(lhs, rhs, text) {
do_report_result(lhs === rhs, text, Components.stack.caller, false);
}
function promiseBrowserEvent(browser, eventType) {
return new Promise((resolve) => {
function handle(event) {

View File

@ -9,14 +9,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
function is(lhs, rhs, text) {
do_report_result(lhs === rhs, text, Components.stack.caller, false);
}
add_test(function filepicker_open() {
let chromeWin = Services.wm.getMostRecentWindow("navigator:browser");

View File

@ -10,10 +10,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://gre/modules/Services.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
// Make the timer global so it doesn't get GC'd
let gTimer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);

View File

@ -9,10 +9,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
add_test(function check_linktype() {
// Let's exercise the interface. Even if the network is not up, we can make sure nothing blows up.
let network = Cc["@mozilla.org/network/network-link-service;1"].getService(Ci.nsINetworkLinkService);

View File

@ -10,14 +10,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/Messaging.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
function is(lhs, rhs, text) {
do_report_result(lhs === rhs, text, Components.stack.caller, false);
}
function promiseBrowserEvent(browser, eventType) {
return new Promise((resolve) => {
function handle(event) {

View File

@ -24,18 +24,6 @@ const TAB_STOP_EVENT = "STOP";
const gChromeWin = Services.wm.getMostRecentWindow("navigator:browser");
/**
* Robocop test helpers.
*/
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
function is(lhs, rhs, text) {
do_report_result(lhs === rhs, "[ " + lhs + " === " + rhs + " ] " + text,
Components.stack.caller, false);
}
/**
* Wait for and return, when an expected tab change event occurs.
*

View File

@ -11,14 +11,6 @@ Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/Task.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
function is(lhs, rhs, text) {
do_report_result(lhs === rhs, text, Components.stack.caller, false);
}
// Make the timer global so it doesn't get GC'd
let gTimer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);

View File

@ -11,10 +11,6 @@ Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/Messaging.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
function promiseLoadEvent(browser, url, eventType="load", runBeforeLoad) {
return new Promise((resolve, reject) => {
do_print("Wait browser event: " + eventType);
@ -47,7 +43,6 @@ function promiseLoadEvent(browser, url, eventType="load", runBeforeLoad) {
// Code is mostly stolen from:
// http://mxr.mozilla.org/mozilla-central/source/browser/base/content/test/general/browser_trackingUI.js
var PREF = "privacy.trackingprotection.enabled";
var TABLE = "urlclassifier.trackingTable";
// Update tracking database
@ -88,36 +83,24 @@ function doUpdate() {
});
}
// Track the <browser> where the tests are happening
let browser;
let BrowserApp = Services.wm.getMostRecentWindow("navigator:browser").BrowserApp;
add_test(function setup_browser() {
let chromeWin = Services.wm.getMostRecentWindow("navigator:browser");
let BrowserApp = chromeWin.BrowserApp;
do_register_cleanup(function cleanup() {
Services.prefs.clearUserPref(PREF);
Services.prefs.clearUserPref(TABLE);
BrowserApp.closeTab(BrowserApp.getTabForBrowser(browser));
// Tests the tracking protection UI in private browsing. By default, tracking protection is
// enabled in private browsing ("privacy.trackingprotection.pbmode.enabled").
add_task(function* test_tracking_pb() {
// Load a blank page
let browser = BrowserApp.addTab("about:blank", { selected: true, parentId: BrowserApp.selectedTab.id, isPrivate: true }).browser;
yield new Promise((resolve, reject) => {
browser.addEventListener("load", function startTests(event) {
browser.removeEventListener("load", startTests, true);
Services.tm.mainThread.dispatch(resolve, Ci.nsIThread.DISPATCH_NORMAL);
}, true);
});
// Load a blank page
let url = "about:blank";
browser = BrowserApp.addTab(url, { selected: true, parentId: BrowserApp.selectedTab.id }).browser;
browser.addEventListener("load", function startTests(event) {
browser.removeEventListener("load", startTests, true);
Services.tm.mainThread.dispatch(run_next_test, Ci.nsIThread.DISPATCH_NORMAL);
}, true);
});
add_task(function* () {
// Populate and use 'test-track-simple' for tracking protection lookups
Services.prefs.setCharPref(TABLE, "test-track-simple");
yield doUpdate();
// Enable Tracking Protection
Services.prefs.setBoolPref(PREF, true);
// Point tab to a test page NOT containing tracking elements
yield promiseLoadEvent(browser, "http://tracking.example.org/tests/robocop/tracking_good.html");
Messaging.sendRequest({ type: "Test:Expected", expected: "unknown" });
@ -139,8 +122,8 @@ add_task(function* () {
});
Messaging.sendRequest({ type: "Test:Expected", expected: "tracking_content_blocked" });
// Disable Tracking Protection
Services.prefs.setBoolPref(PREF, false);
// Disable tracking protection to make sure we don't show the UI when the pref is disabled.
Services.prefs.setBoolPref("privacy.trackingprotection.pbmode.enabled", false);
// Point tab to a test page containing tracking elements
yield promiseLoadEvent(browser, "http://tracking.example.org/tests/robocop/tracking_bad.html");
@ -149,6 +132,35 @@ add_task(function* () {
// Point tab to a test page NOT containing tracking elements
yield promiseLoadEvent(browser, "http://tracking.example.org/tests/robocop/tracking_good.html");
Messaging.sendRequest({ type: "Test:Expected", expected: "unknown" });
// Reset the pref before the next testcase
Services.prefs.clearUserPref("privacy.trackingprotection.pbmode.enabled");
});
add_task(function* test_tracking_not_pb() {
// Load a blank page
let browser = BrowserApp.addTab("about:blank", { selected: true }).browser;
yield new Promise((resolve, reject) => {
browser.addEventListener("load", function startTests(event) {
browser.removeEventListener("load", startTests, true);
Services.tm.mainThread.dispatch(resolve, Ci.nsIThread.DISPATCH_NORMAL);
}, true);
});
// Point tab to a test page NOT containing tracking elements
yield promiseLoadEvent(browser, "http://tracking.example.org/tests/robocop/tracking_good.html");
Messaging.sendRequest({ type: "Test:Expected", expected: "unknown" });
// Point tab to a test page containing tracking elements (tracking protection UI *should not* be shown)
yield promiseLoadEvent(browser, "http://tracking.example.org/tests/robocop/tracking_bad.html");
Messaging.sendRequest({ type: "Test:Expected", expected: "unknown" });
// Enable tracking protection in normal tabs
Services.prefs.setBoolPref("privacy.trackingprotection.enabled", true);
// Point tab to a test page containing tracking elements (tracking protection UI *should* be shown)
yield promiseLoadEvent(browser, "http://tracking.example.org/tests/robocop/tracking_bad.html");
Messaging.sendRequest({ type: "Test:Expected", expected: "tracking_content_blocked" });
});
run_next_test();

View File

@ -10,10 +10,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/SimpleServiceDiscovery.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
// The chrome window
let chromeWin;

View File

@ -12,10 +12,6 @@ const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/SimpleServiceDiscovery.jsm");
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
// The chrome window
let chromeWin;

View File

@ -17,18 +17,6 @@ const HTTP_ENDPOINT = "/tests/robocop/testWebChannel.html";
const gChromeWin = Services.wm.getMostRecentWindow("navigator:browser");
let BrowserApp = gChromeWin.BrowserApp;
/**
* Robocop test helpers.
*/
function ok(passed, text) {
do_report_result(passed, text, Components.stack.caller, false);
}
function is(lhs, rhs, text) {
do_report_result(lhs === rhs, "[ " + lhs + " === " + rhs + " ] " + text,
Components.stack.caller, false);
}
// Keep this synced with /browser/base/content/test/general/browser_web_channel.js
// as much as possible. (We only have this since we can't run browser chrome
// tests on Android. Yet?)

View File

@ -1,6 +1,7 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
this.EXPORTED_SYMBOLS = ["fxAccounts", "FxAccounts"];
@ -8,13 +9,13 @@ const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
Cu.import("resource://gre/modules/Log.jsm");
Cu.import("resource://gre/modules/Promise.jsm");
Cu.import("resource://gre/modules/osfile.jsm");
Cu.import("resource://services-common/utils.js");
Cu.import("resource://services-crypto/utils.js");
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/XPCOMUtils.jsm");
Cu.import("resource://gre/modules/Timer.jsm");
Cu.import("resource://gre/modules/Task.jsm");
Cu.import("resource://gre/modules/FxAccountsStorage.jsm");
Cu.import("resource://gre/modules/FxAccountsCommon.js");
XPCOMUtils.defineLazyModuleGetter(this, "FxAccountsClient",
@ -50,7 +51,6 @@ let publicProperties = [
"resendVerificationEmail",
"setSignedInUser",
"signOut",
"version",
"whenVerified"
];
@ -72,28 +72,27 @@ let publicProperties = [
// }
// If the state has changed between the function being called and the promise
// being resolved, the .resolve() call will actually be rejected.
let AccountState = function(fxaInternal, signedInUserStorage, accountData = null) {
let AccountState = this.AccountState = function(fxaInternal, storageManager) {
this.fxaInternal = fxaInternal;
this.signedInUserStorage = signedInUserStorage;
this.signedInUser = accountData ? {version: DATA_FORMAT_VERSION, accountData} : null;
this.uid = accountData ? accountData.uid : null;
this.oauthTokens = {};
this.storageManager = storageManager;
this.promiseInitialized = this.storageManager.getAccountData().then(data => {
this.oauthTokens = data && data.oauthTokens ? data.oauthTokens : {};
}).catch(err => {
log.error("Failed to initialize the storage manager", err);
// Things are going to fall apart, but not much we can do about it here.
});
};
AccountState.prototype = {
cert: null,
keyPair: null,
signedInUser: null,
oauthTokens: null,
whenVerifiedDeferred: null,
whenKeysReadyDeferred: null,
profile: null,
promiseInitialAccountData: null,
uid: null,
get isCurrent() this.fxaInternal && this.fxaInternal.currentAccountState === this,
abort: function() {
abort() {
if (this.whenVerifiedDeferred) {
this.whenVerifiedDeferred.reject(
new Error("Verification aborted; Another user signing in"));
@ -108,127 +107,47 @@ AccountState.prototype = {
this.cert = null;
this.keyPair = null;
this.signedInUser = null;
this.uid = null;
this.oauthTokens = null;
this.fxaInternal = null;
// Avoid finalizing the storageManager multiple times (ie, .signOut()
// followed by .abort())
if (!this.storageManager) {
return Promise.resolve();
}
let storageManager = this.storageManager;
this.storageManager = null;
return storageManager.finalize();
},
// Clobber all cached data and write that empty data to storage.
signOut() {
this.cert = null;
this.keyPair = null;
this.signedInUser = null;
this.oauthTokens = {};
this.uid = null;
return this.persistUserData();
this.oauthTokens = null;
let storageManager = this.storageManager;
this.storageManager = null;
return storageManager.deleteAccountData().then(() => {
return storageManager.finalize();
});
},
getUserAccountData() {
if (!this.isCurrent) {
return this.reject(new Error("Another user has signed in"));
return Promise.reject(new Error("Another user has signed in"));
}
if (this.promiseInitialAccountData) {
// We are still reading the data for the first and only time.
return this.promiseInitialAccountData;
}
// We've previously read it successfully (and possibly updated it since)
if (this.signedInUser) {
return this.resolve(this.signedInUser.accountData);
}
// We fetch the signedInUser data first, then fetch the token store and
// ensure the uid in the tokens matches our user.
let accountData = null;
let oauthTokens = {};
return this.promiseInitialAccountData = this.signedInUserStorage.get()
.then(user => {
if (logPII) {
log.debug("getUserAccountData", user);
}
// In an ideal world we could cache the data in this.signedInUser, but
// if we do, the interaction with the login manager breaks when the
// password is locked as this read may only have obtained partial data.
// Therefore every read *must* really read incase the login manager is
// now unlocked. We could fix this with a refactor...
accountData = user ? user.accountData : null;
}, err => {
// Error reading signed in user account data.
this.promiseInitialAccountData = null;
if (err instanceof OS.File.Error && err.becauseNoSuchFile) {
// File hasn't been created yet. That will be done
// on the first call to setSignedInUser
return;
}
// something else went wrong - report the error but continue without
// user data.
log.error("Failed to read signed in user data", err);
}).then(() => {
if (!accountData) {
return null;
}
return this.signedInUserStorage.getOAuthTokens();
}).then(tokenData => {
if (tokenData && tokenData.tokens &&
tokenData.version == DATA_FORMAT_VERSION &&
tokenData.uid == accountData.uid ) {
oauthTokens = tokenData.tokens;
}
}, err => {
// Error reading the OAuth tokens file.
if (err instanceof OS.File.Error && err.becauseNoSuchFile) {
// File hasn't been created yet, but will be when tokens are saved.
return;
}
log.error("Failed to read oauth tokens", err)
}).then(() => {
// We are done - clear our promise and save the data if we are still
// current.
this.promiseInitialAccountData = null;
if (this.isCurrent) {
// As above, we can not cache the data to this.signedInUser as we
// may only have partial data due to a locked MP, so the next
// request must re-read incase it is now unlocked.
// But we do save the tokens and the uid
this.oauthTokens = oauthTokens;
this.uid = accountData ? accountData.uid : null;
}
return accountData;
});
// phew!
return this.storageManager.getAccountData().then(result => {
return this.resolve(result);
});
},
// XXX - this should really be called "updateCurrentUserData" or similar as
// it is only ever used to add new fields to the *current* user, not to
// set a new user as current.
setUserAccountData: function(accountData) {
updateUserAccountData(updatedFields) {
if (!this.isCurrent) {
return this.reject(new Error("Another user has signed in"));
return Promise.reject(new Error("Another user has signed in"));
}
if (this.promiseInitialAccountData) {
throw new Error("Can't set account data before it's been read.");
}
if (!accountData) {
// see above - this should really be called "updateCurrentUserData" or similar.
throw new Error("Attempt to use setUserAccountData with null user data.");
}
if (accountData.uid != this.uid) {
// see above - this should really be called "updateCurrentUserData" or similar.
throw new Error("Attempt to use setUserAccountData with a different user.");
}
// Set our signedInUser before we start the write, so any updates to the
// data while the write completes are still captured.
this.signedInUser = {version: DATA_FORMAT_VERSION, accountData: accountData};
return this.signedInUserStorage.set(this.signedInUser)
.then(() => this.resolve(accountData));
return this.storageManager.updateAccountData(updatedFields);
},
getCertificate: function(data, keyPair, mustBeValidUntil) {
if (logPII) {
// don't stringify unless it will be written. We should replace this
// check with param substitutions added in bug 966674
log.debug("getCertificate" + JSON.stringify(this.signedInUser));
}
// TODO: get the lifetime from the cert's .exp field
if (this.cert && this.cert.validUntil > mustBeValidUntil) {
log.debug(" getCertificate already had one");
@ -292,7 +211,7 @@ AccountState.prototype = {
if (!this.isCurrent) {
log.info("An accountState promise was resolved, but was actually rejected" +
" due to a different user being signed in. Originally resolved" +
" with: " + result);
" with", result);
return Promise.reject(new Error("A different user signed in"));
}
return Promise.resolve(result);
@ -306,14 +225,18 @@ AccountState.prototype = {
if (!this.isCurrent) {
log.info("An accountState promise was rejected, but we are ignoring that" +
"reason and rejecting it due to a different user being signed in." +
"Originally rejected with: " + error);
"Originally rejected with", error);
return Promise.reject(new Error("A different user signed in"));
}
return Promise.reject(error);
},
// Abstractions for storage of cached tokens - these are all sync, and don't
// handle revocation etc - it's just storage.
// handle revocation etc - it's just storage (and the storage itself is async,
// but we don't return the storage promises, so it *looks* sync)
// These functions are sync simply so we can handle "token races" - when there
// are multiple in-flight requests for the same scope, we can detect this
// and revoke the redundant token.
// A preamble for the cache helpers...
_cachePreamble() {
@ -340,25 +263,16 @@ AccountState.prototype = {
getCachedToken(scopeArray) {
this._cachePreamble();
let key = getScopeKey(scopeArray);
if (this.oauthTokens[key]) {
let result = this.oauthTokens[key];
if (result) {
// later we might want to check an expiry date - but we currently
// have no such concept, so just return it.
log.trace("getCachedToken returning cached token");
return this.oauthTokens[key];
return result;
}
return null;
},
// Get an array of tokenData for all cached tokens.
getAllCachedTokens() {
this._cachePreamble();
let result = [];
for (let [key, tokenValue] in Iterator(this.oauthTokens)) {
result.push(tokenValue);
}
return result;
},
// Remove a cached token from the cache. Does *not* revoke it from anywhere.
// Returns the entire token entry if found, null otherwise.
removeCachedToken(token) {
@ -380,30 +294,8 @@ AccountState.prototype = {
// set of user data.)
_persistCachedTokens() {
this._cachePreamble();
let record;
if (this.uid) {
record = {
version: DATA_FORMAT_VERSION,
uid: this.uid,
tokens: this.oauthTokens,
};
} else {
record = null;
}
return this.signedInUserStorage.setOAuthTokens(record).catch(
err => {
log.error("Failed to save account data for token cache", err);
}
);
},
persistUserData() {
return this._persistCachedTokens().catch(err => {
log.error("Failed to persist cached tokens", err);
}).then(() => {
return this.signedInUserStorage.set(this.signedInUser);
}).catch(err => {
log.error("Failed to persist account data", err);
return this.updateUserAccountData({ oauthTokens: this.oauthTokens }).catch(err => {
log.error("Failed to update cached tokens", err);
});
},
}
@ -472,15 +364,13 @@ this.FxAccounts = function (mockInternal) {
}
if (mockInternal) {
// A little work-around to ensure the initial currentAccountState has
// the same mock storage the test passed in.
if (mockInternal.signedInUserStorage) {
internal.currentAccountState.signedInUserStorage = mockInternal.signedInUserStorage;
}
// Exposes the internal object for testing only.
external.internal = internal;
}
// wait until after the mocks are setup before initializing.
internal.initialize();
return Object.freeze(external);
}
@ -488,57 +378,17 @@ this.FxAccounts = function (mockInternal) {
* The internal API's constructor.
*/
function FxAccountsInternal() {
this.version = DATA_FORMAT_VERSION;
// Make a local copy of this constant so we can mock it in testing
this.POLL_SESSION = POLL_SESSION;
// The one and only "storage" object. While this is created here, the
// FxAccountsInternal object does *not* use it directly, but instead passes
// it to AccountState objects which has sole responsibility for storage.
// Ideally we would create it in the AccountState objects, but that makes
// testing hard as AccountState objects are regularly created and thrown
// away. Doing it this way means tests can mock/replace this storage object
// and have it used by all AccountState objects, even those created before
// and after the mock has been setup.
// We only want the fancy LoginManagerStorage on desktop.
#if defined(MOZ_B2G)
this.signedInUserStorage = new JSONStorage({
#else
this.signedInUserStorage = new LoginManagerStorage({
#endif
// We don't reference |profileDir| in the top-level module scope
// as we may be imported before we know where it is.
filename: DEFAULT_STORAGE_FILENAME,
oauthTokensFilename: DEFAULT_OAUTH_TOKENS_FILENAME,
baseDir: OS.Constants.Path.profileDir,
});
// We interact with the Firefox Accounts auth server in order to confirm that
// a user's email has been verified and also to fetch the user's keys from
// the server. We manage these processes in possibly long-lived promises
// that are internal to this object (never exposed to callers). Because
// Firefox Accounts allows for only one logged-in user, and because it's
// conceivable that while we are waiting to verify one identity, a caller
// could start verification on a second, different identity, we need to be
// able to abort all work on the first sign-in process. The currentTimer and
// currentAccountState are used for this purpose.
// (XXX - should the timer be directly on the currentAccountState?)
this.currentTimer = null;
this.currentAccountState = new AccountState(this, this.signedInUserStorage);
// All significant initialization should be done in the initialize() method
// below as it helps with testing.
}
/**
* The internal API's prototype.
*/
FxAccountsInternal.prototype = {
/**
* The current data format's version number.
*/
version: DATA_FORMAT_VERSION,
// The timeout (in ms) we use to poll for a verified mail for the first 2 mins.
VERIFICATION_POLL_TIMEOUT_INITIAL: 5000, // 5 seconds
// And how often we poll after the first 2 mins.
@ -546,6 +396,13 @@ FxAccountsInternal.prototype = {
_fxAccountsClient: null,
// All significant initialization should be done in this initialize() method,
// as it's called after this object has been mocked for tests.
initialize() {
this.currentTimer = null;
this.currentAccountState = this.newAccountState();
},
get fxAccountsClient() {
if (!this._fxAccountsClient) {
this._fxAccountsClient = new FxAccountsClient();
@ -566,6 +423,13 @@ FxAccountsInternal.prototype = {
return this._profile;
},
// A hook-point for tests who may want a mocked AccountState or mocked storage.
newAccountState(credentials) {
let storage = new FxAccountsStorageManager();
storage.initialize(credentials);
return new AccountState(this, storage);
},
/**
* Return the current time in milliseconds as an integer. Allows tests to
* manipulate the date to simulate certificate expiration.
@ -676,24 +540,23 @@ FxAccountsInternal.prototype = {
*/
setSignedInUser: function setSignedInUser(credentials) {
log.debug("setSignedInUser - aborting any existing flows");
this.abortExistingFlow();
let currentAccountState = this.currentAccountState = new AccountState(
this,
this.signedInUserStorage,
JSON.parse(JSON.stringify(credentials)) // Pass a clone of the credentials object.
);
// This promise waits for storage, but not for verification.
// We're telling the caller that this is durable now.
return currentAccountState.persistUserData().then(() => {
this.notifyObservers(ONLOGIN_NOTIFICATION);
if (!this.isUserEmailVerified(credentials)) {
this.startVerifiedCheck(credentials);
}
}).then(() => {
return currentAccountState.resolve();
});
return this.abortExistingFlow().then(() => {
let currentAccountState = this.currentAccountState = this.newAccountState(
Cu.cloneInto(credentials, {}) // Pass a clone of the credentials object.
);
// This promise waits for storage, but not for verification.
// We're telling the caller that this is durable now (although is that
// really something we should commit to? Why not let the write happen in
// the background? Already does for updateAccountData ;)
return currentAccountState.promiseInitialized.then(() => {
this.notifyObservers(ONLOGIN_NOTIFICATION);
if (!this.isUserEmailVerified(credentials)) {
this.startVerifiedCheck(credentials);
}
}).then(() => {
return currentAccountState.resolve();
});
})
},
/**
@ -749,8 +612,13 @@ FxAccountsInternal.prototype = {
clearTimeout(this.currentTimer);
this.currentTimer = 0;
}
this.currentAccountState.abort();
this.currentAccountState = new AccountState(this, this.signedInUserStorage);
if (this._profile) {
this._profile.tearDown();
this._profile = null;
}
// We "abort" the accountState and assume our caller is about to throw it
// away and replace it with a new one.
return this.currentAccountState.abort();
},
accountStatus: function accountStatus() {
@ -773,7 +641,7 @@ FxAccountsInternal.prototype = {
_destroyAllOAuthTokens: function(tokenInfos) {
// let's just destroy them all in parallel...
let promises = [];
for (let tokenInfo of tokenInfos) {
for (let [key, tokenInfo] in Iterator(tokenInfos || {})) {
promises.push(this._destroyOAuthToken(tokenInfo));
}
return Promise.all(promises);
@ -786,7 +654,7 @@ FxAccountsInternal.prototype = {
return currentState.getUserAccountData().then(data => {
// Save the session token for use in the call to signOut below.
sessionToken = data && data.sessionToken;
tokensToRevoke = currentState.getAllCachedTokens();
tokensToRevoke = data && data.oauthTokens;
return this._signOutLocal();
}).then(() => {
// FxAccountsManager calls here, then does its own call
@ -821,12 +689,12 @@ FxAccountsInternal.prototype = {
*/
_signOutLocal: function signOutLocal() {
let currentAccountState = this.currentAccountState;
if (this._profile) {
this._profile.tearDown();
this._profile = null;
}
return currentAccountState.signOut().then(() => {
this.abortExistingFlow(); // this resets this.currentAccountState.
// this "aborts" this.currentAccountState but doesn't make a new one.
return this.abortExistingFlow();
}).then(() => {
this.currentAccountState = this.newAccountState();
return this.currentAccountState.promiseInitialized;
});
},
@ -917,23 +785,24 @@ FxAccountsInternal.prototype = {
if (logPII) {
log.debug("kB_hex: " + kB_hex);
}
data.kA = CommonUtils.bytesAsHex(kA);
data.kB = CommonUtils.bytesAsHex(kB_hex);
delete data.keyFetchToken;
delete data.unwrapBKey;
log.debug("Keys Obtained: kA=" + !!data.kA + ", kB=" + !!data.kB);
if (logPII) {
log.debug("Keys Obtained: kA=" + data.kA + ", kB=" + data.kB);
let updateData = {
kA: CommonUtils.bytesAsHex(kA),
kB: CommonUtils.bytesAsHex(kB_hex),
keyFetchToken: null, // null values cause the item to be removed.
unwrapBKey: null,
}
yield currentState.setUserAccountData(data);
log.debug("Keys Obtained: kA=" + !!updateData.kA + ", kB=" + !!updateData.kB);
if (logPII) {
log.debug("Keys Obtained: kA=" + updateData.kA + ", kB=" + updateData.kB);
}
yield currentState.updateUserAccountData(updateData);
// We are now ready for business. This should only be invoked once
// per setSignedInUser(), regardless of whether we've rebooted since
// setSignedInUser() was called.
this.notifyObservers(ONVERIFIED_NOTIFICATION);
return data;
return currentState.getUserAccountData();
}.bind(this)).then(result => currentState.resolve(result));
},
@ -1070,12 +939,11 @@ FxAccountsInternal.prototype = {
.then((response) => {
log.debug("checkEmailStatus -> " + JSON.stringify(response));
if (response && response.verified) {
currentState.getUserAccountData()
.then((data) => {
data.verified = true;
return currentState.setUserAccountData(data);
currentState.updateUserAccountData({ verified: true })
.then(() => {
return currentState.getUserAccountData();
})
.then((data) => {
.then(data => {
// Now that the user is verified, we can proceed to fetch keys
if (currentState.whenVerifiedDeferred) {
currentState.whenVerifiedDeferred.resolve(data);
@ -1409,7 +1277,7 @@ FxAccountsInternal.prototype = {
let currentState = this.currentAccountState;
return this.profile.getProfile().then(
profileData => {
let profile = JSON.parse(JSON.stringify(profileData));
let profile = Cu.cloneInto(profileData, {});
return currentState.resolve(profile);
},
error => {
@ -1420,241 +1288,6 @@ FxAccountsInternal.prototype = {
},
};
/**
* JSONStorage constructor that creates instances that may set/get
* to a specified file, in a directory that will be created if it
* doesn't exist.
*
* @param options {
* filename: of the file to write to
* baseDir: directory where the file resides
* }
* @return instance
*/
function JSONStorage(options) {
this.baseDir = options.baseDir;
this.path = OS.Path.join(options.baseDir, options.filename);
this.oauthTokensPath = OS.Path.join(options.baseDir, options.oauthTokensFilename);
};
JSONStorage.prototype = {
set: function(contents) {
return OS.File.makeDir(this.baseDir, {ignoreExisting: true})
.then(CommonUtils.writeJSON.bind(null, contents, this.path));
},
get: function() {
return CommonUtils.readJSON(this.path);
},
setOAuthTokens: function(contents) {
return OS.File.makeDir(this.baseDir, {ignoreExisting: true})
.then(CommonUtils.writeJSON.bind(null, contents, this.oauthTokensPath));
},
getOAuthTokens: function(contents) {
return CommonUtils.readJSON(this.oauthTokensPath);
},
};
/**
* LoginManagerStorage constructor that creates instances that may set/get
* from a combination of a clear-text JSON file and stored securely in
* the nsILoginManager.
*
* @param options {
* filename: of the plain-text file to write to
* baseDir: directory where the file resides
* }
* @return instance
*/
function LoginManagerStorage(options) {
// we reuse the JSONStorage for writing the plain-text stuff.
this.jsonStorage = new JSONStorage(options);
}
LoginManagerStorage.prototype = {
// The fields in the credentials JSON object that are stored in plain-text
// in the profile directory. All other fields are stored in the login manager,
// and thus are only available when the master-password is unlocked.
// a hook point for testing.
get _isLoggedIn() {
return Services.logins.isLoggedIn;
},
// Clear any data from the login manager. Returns true if the login manager
// was unlocked (even if no existing logins existed) or false if it was
// locked (meaning we don't even know if it existed or not.)
_clearLoginMgrData: Task.async(function* () {
try { // Services.logins might be third-party and broken...
yield Services.logins.initializationPromise;
if (!this._isLoggedIn) {
return false;
}
let logins = Services.logins.findLogins({}, FXA_PWDMGR_HOST, null, FXA_PWDMGR_REALM);
for (let login of logins) {
Services.logins.removeLogin(login);
}
return true;
} catch (ex) {
log.error("Failed to clear login data: ${}", ex);
return false;
}
}),
set: Task.async(function* (contents) {
if (!contents) {
// User is signing out - write the null to the json file.
yield this.jsonStorage.set(contents);
// And nuke it from the login manager.
let cleared = yield this._clearLoginMgrData();
if (!cleared) {
// just log a message - we verify that the email address matches when
// we reload it, so having a stale entry doesn't really hurt.
log.info("not removing credentials from login manager - not logged in");
}
return;
}
// We are saving actual data.
// Split the data into 2 chunks - one to go to the plain-text, and the
// other to write to the login manager.
let toWriteJSON = {version: contents.version};
let accountDataJSON = toWriteJSON.accountData = {};
let toWriteLoginMgr = {version: contents.version};
let accountDataLoginMgr = toWriteLoginMgr.accountData = {};
for (let [name, value] of Iterator(contents.accountData)) {
if (FXA_PWDMGR_PLAINTEXT_FIELDS.indexOf(name) >= 0) {
accountDataJSON[name] = value;
} else {
accountDataLoginMgr[name] = value;
}
}
yield this.jsonStorage.set(toWriteJSON);
try { // Services.logins might be third-party and broken...
// and the stuff into the login manager.
yield Services.logins.initializationPromise;
// If MP is locked we silently fail - the user may need to re-auth
// next startup.
if (!this._isLoggedIn) {
log.info("not saving credentials to login manager - not logged in");
return;
}
// write the rest of the data to the login manager.
let loginInfo = new Components.Constructor(
"@mozilla.org/login-manager/loginInfo;1", Ci.nsILoginInfo, "init");
let login = new loginInfo(FXA_PWDMGR_HOST,
null, // aFormSubmitURL,
FXA_PWDMGR_REALM, // aHttpRealm,
contents.accountData.email, // aUsername
JSON.stringify(toWriteLoginMgr), // aPassword
"", // aUsernameField
"");// aPasswordField
let existingLogins = Services.logins.findLogins({}, FXA_PWDMGR_HOST, null,
FXA_PWDMGR_REALM);
if (existingLogins.length) {
Services.logins.modifyLogin(existingLogins[0], login);
} else {
Services.logins.addLogin(login);
}
} catch (ex) {
log.error("Failed to save data to the login manager: ${}", ex);
}
}),
get: Task.async(function* () {
// we need to suck some data from the .json file in the profile dir and
// some other from the login manager.
let data = yield this.jsonStorage.get();
if (!data) {
// no user logged in, nuke the storage data incase we couldn't remove
// it previously and then we are done.
yield this._clearLoginMgrData();
return null;
}
// if we have encryption keys it must have been saved before we
// used the login manager, so re-save it.
if (data.accountData.kA || data.accountData.kB || data.keyFetchToken) {
// We need to migrate, but the MP might be locked (eg, on the first run
// with this enabled, we will get here very soon after startup, so will
// certainly be locked.) This means we can't actually store the data in
// the login manager (and thus might lose it if we migrated now)
// So if the MP is locked, we *don't* migrate, but still just return
// the subset of data we now store in the JSON.
// This will cause sync to notice the lack of keys, force an unlock then
// re-fetch the account data to see if the keys are there. At *that*
// point we will end up back here, but because the MP is now unlocked
// we can actually perform the migration.
if (!this._isLoggedIn) {
// return the "safe" subset but leave the storage alone.
log.info("account data needs migration to the login manager but the MP is locked.");
let result = {
version: data.version,
accountData: {},
};
for (let fieldName of FXA_PWDMGR_PLAINTEXT_FIELDS) {
result.accountData[fieldName] = data.accountData[fieldName];
}
return result;
}
// actually migrate - just calling .set() will split everything up.
log.info("account data is being migrated to the login manager.");
yield this.set(data);
}
try { // Services.logins might be third-party and broken...
// read the data from the login manager and merge it for return.
yield Services.logins.initializationPromise;
if (!this._isLoggedIn) {
log.info("returning partial account data as the login manager is locked.");
return data;
}
let logins = Services.logins.findLogins({}, FXA_PWDMGR_HOST, null, FXA_PWDMGR_REALM);
if (logins.length == 0) {
// This could happen if the MP was locked when we wrote the data.
log.info("Can't find the rest of the credentials in the login manager");
return data;
}
let login = logins[0];
if (login.username == data.accountData.email) {
let lmData = JSON.parse(login.password);
if (lmData.version == data.version) {
// Merge the login manager data
copyObjectProperties(lmData.accountData, data.accountData);
} else {
log.info("version field in the login manager doesn't match - ignoring it");
yield this._clearLoginMgrData();
}
} else {
log.info("username in the login manager doesn't match - ignoring it");
yield this._clearLoginMgrData();
}
} catch (ex) {
log.error("Failed to get data from the login manager: ${}", ex);
}
return data;
}),
// OAuth tokens are always written to disk, so delegate to our JSON storage.
// (Bug 1013064 comments 23-25 explain why we save the sessionToken into the
// plain JSON file, and the same logic applies for oauthTokens being in JSON)
getOAuthTokens() {
return this.jsonStorage.getOAuthTokens();
},
setOAuthTokens(contents) {
return this.jsonStorage.setOAuthTokens(contents);
},
}
// A getter for the instance to export
XPCOMUtils.defineLazyGetter(this, "fxAccounts", function() {

View File

@ -66,7 +66,6 @@ exports.FXACCOUNTS_PERMISSION = "firefox-accounts";
exports.DATA_FORMAT_VERSION = 1;
exports.DEFAULT_STORAGE_FILENAME = "signedInUser.json";
exports.DEFAULT_OAUTH_TOKENS_FILENAME = "signedInUserOAuthTokens.json";
// Token life times.
// Having this parameter be short has limited security value and can cause
@ -217,7 +216,8 @@ exports.ERROR_MSG_METHOD_NOT_ALLOWED = "METHOD_NOT_ALLOWED";
// The fields we save in the plaintext JSON.
// See bug 1013064 comments 23-25 for why the sessionToken is "safe"
exports.FXA_PWDMGR_PLAINTEXT_FIELDS = ["email", "verified", "authAt",
"sessionToken", "uid"];
"sessionToken", "uid", "oauthTokens",
"profile"];
// The pseudo-host we use in the login manager
exports.FXA_PWDMGR_HOST = "chrome://FirefoxAccounts";
// The realm we use in the login manager.

View File

@ -0,0 +1,540 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
this.EXPORTED_SYMBOLS = [
"FxAccountsStorageManager",
];
const {classes: Cc, interfaces: Ci, utils: Cu} = Components;
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/Task.jsm");
Cu.import("resource://gre/modules/FxAccountsCommon.js");
Cu.import("resource://gre/modules/osfile.jsm");
Cu.import("resource://services-common/utils.js");
this.FxAccountsStorageManager = function(options = {}) {
this.options = {
filename: options.filename || DEFAULT_STORAGE_FILENAME,
baseDir: options.baseDir || OS.Constants.Path.profileDir,
}
this.plainStorage = new JSONStorage(this.options);
// On b2g we have no loginManager for secure storage, and tests may want
// to pretend secure storage isn't available.
let useSecure = 'useSecure' in options ? options.useSecure : haveLoginManager;
if (useSecure) {
this.secureStorage = new LoginManagerStorage();
} else {
this.secureStorage = null;
}
this._clearCachedData();
// See .initialize() below - this protects against it not being called.
this._promiseInitialized = Promise.reject("initialize not called");
// A promise to avoid storage races - see _queueStorageOperation
this._promiseStorageComplete = Promise.resolve();
}
this.FxAccountsStorageManager.prototype = {
_initialized: false,
_needToReadSecure: true,
// An initialization routine that *looks* synchronous to the callers, but
// is actually async as everything else waits for it to complete.
initialize(accountData) {
if (this._initialized) {
throw new Error("already initialized");
}
this._initialized = true;
// If we just throw away our pre-rejected promise it is reported as an
// unhandled exception when it is GCd - so add an empty .catch handler here
// to prevent this.
this._promiseInitialized.catch(() => {});
this._promiseInitialized = this._initialize(accountData);
},
_initialize: Task.async(function* (accountData) {
log.trace("initializing new storage manager");
try {
if (accountData) {
// If accountData is passed we don't need to read any storage.
this._needToReadSecure = false;
// split it into the 2 parts, write it and we are done.
for (let [name, val] of Iterator(accountData)) {
if (FXA_PWDMGR_PLAINTEXT_FIELDS.indexOf(name) >= 0) {
this.cachedPlain[name] = val;
} else {
this.cachedSecure[name] = val;
}
}
// write it out and we are done.
yield this._write();
return;
}
// So we were initialized without account data - that means we need to
// read the state from storage. We try and read plain storage first and
// only attempt to read secure storage if the plain storage had a user.
this._needToReadSecure = yield this._readPlainStorage();
if (this._needToReadSecure && this.secureStorage) {
yield this._doReadAndUpdateSecure();
}
} finally {
log.trace("initializing of new storage manager done");
}
}),
finalize() {
// We can't throw this instance away while it is still writing or we may
// end up racing with the newly created one.
log.trace("StorageManager finalizing");
return this._promiseInitialized.then(() => {
return this._promiseStorageComplete;
}).then(() => {
this._promiseStorageComplete = null;
this._promiseInitialized = null;
this._clearCachedData();
log.trace("StorageManager finalized");
})
},
// We want to make sure we don't end up doing multiple storage requests
// concurrently - which has a small window for reads if the master-password
// is locked at initialization time and becomes unlocked later, and always
// has an opportunity for updates.
// We also want to make sure we finished writing when finalizing, so we
// can't accidentally end up with the previous user's write finishing after
// a signOut attempts to clear it.
// So all such operations "queue" themselves via this.
_queueStorageOperation(func) {
// |result| is the promise we return - it has no .catch handler, so callers
// of the storage operation still see failure as a normal rejection.
let result = this._promiseStorageComplete.then(func);
// But the promise we assign to _promiseStorageComplete *does* have a catch
// handler so that rejections in one storage operation does not prevent
// future operations from starting (ie, _promiseStorageComplete must never
// be in a rejected state)
this._promiseStorageComplete = result.catch(err => {
log.error("${func} failed: ${err}", {func, err});
});
return result;
},
// Get the account data by combining the plain and secure storage.
getAccountData: Task.async(function* () {
yield this._promiseInitialized;
// We know we are initialized - this means our .cachedPlain is accurate
// and doesn't need to be read (it was read if necessary by initialize).
// So if there's no uid, there's no user signed in.
if (!('uid' in this.cachedPlain)) {
return null;
}
let result = {};
for (let [name, value] of Iterator(this.cachedPlain)) {
result[name] = value;
}
// But the secure data may not have been read, so try that now.
yield this._maybeReadAndUpdateSecure();
// .cachedSecure now has as much as it possibly can (which is possibly
// nothing if (a) secure storage remains locked and (b) we've never updated
// a field to be stored in secure storage.)
for (let [name, value] of Iterator(this.cachedSecure)) {
result[name] = value;
}
return result;
}),
// Update just the specified fields. This DOES NOT allow you to change to
// a different user, nor to set the user as signed-out.
updateAccountData: Task.async(function* (newFields) {
yield this._promiseInitialized;
if (!('uid' in this.cachedPlain)) {
// If this storage instance shows no logged in user, then you can't
// update fields.
throw new Error("No user is logged in");
}
if (!newFields || 'uid' in newFields || 'email' in newFields) {
// Once we support
// user changing email address this may need to change, but it's not
// clear how we would be told of such a change anyway...
throw new Error("Can't change uid or email address");
}
log.debug("_updateAccountData with items", Object.keys(newFields));
// work out what bucket.
for (let [name, value] of Iterator(newFields)) {
if (FXA_PWDMGR_PLAINTEXT_FIELDS.indexOf(name) >= 0) {
if (value == null) {
delete this.cachedPlain[name];
} else {
this.cachedPlain[name] = value;
}
} else {
// don't do the "delete on null" thing here - we need to keep it until
// we have managed to read so we can nuke it on write.
this.cachedSecure[name] = value;
}
}
// If we haven't yet read the secure data, do so now, else we may write
// out partial data.
yield this._maybeReadAndUpdateSecure();
// Now save it - but don't wait on the _write promise - it's queued up as
// a storage operation, so .finalize() will wait for completion, but no need
// for us to.
this._write();
}),
_clearCachedData() {
this.cachedPlain = {};
// If we don't have secure storage available we have cachedPlain and
// cachedSecure be the same object.
this.cachedSecure = this.secureStorage == null ? this.cachedPlain : {};
},
/* Reads the plain storage and caches the read values in this.cachedPlain.
Only ever called once and unlike the "secure" storage, is expected to never
fail (ie, plain storage is considered always available, whereas secure
storage may be unavailable if it is locked).
Returns a promise that resolves with true if valid account data was found,
false otherwise.
Note: _readPlainStorage is only called during initialize, so isn't
protected via _queueStorageOperation() nor _promiseInitialized.
*/
_readPlainStorage: Task.async(function* () {
let got;
try {
got = yield this.plainStorage.get();
} catch(err) {
// File hasn't been created yet. That will be done
// when write is called.
if (!(err instanceof OS.File.Error) || !err.becauseNoSuchFile) {
log.error("Failed to read plain storage", err);
}
// either way, we return null.
got = null;
}
if (!got || !got.accountData || !got.accountData.uid ||
got.version != DATA_FORMAT_VERSION) {
return false;
}
// We need to update our .cachedPlain, but can't just assign to it as
// it may need to be the exact same object as .cachedSecure
// As a sanity check, .cachedPlain must be empty (as we are called by init)
// XXX - this would be a good use-case for a RuntimeAssert or similar, as
// being added in bug 1080457.
if (Object.keys(this.cachedPlain).length != 0) {
throw new Error("should be impossible to have cached data already.")
}
for (let [name, value] of Iterator(got.accountData)) {
this.cachedPlain[name] = value;
}
return true;
}),
/* If we haven't managed to read the secure storage, try now, so
we can merge our cached data with the data that's already been set.
*/
_maybeReadAndUpdateSecure: Task.async(function* () {
if (this.secureStorage == null || !this._needToReadSecure) {
return;
}
return this._queueStorageOperation(() => {
if (this._needToReadSecure) { // we might have read it by now!
return this._doReadAndUpdateSecure();
}
});
}),
/* Unconditionally read the secure storage and merge our cached data (ie, data
which has already been set while the secure storage was locked) with
the read data
*/
_doReadAndUpdateSecure: Task.async(function* () {
let { uid, email } = this.cachedPlain;
try {
log.debug("reading secure storage with existing", Object.keys(this.cachedSecure));
// If we already have anything in .cachedSecure it means something has
// updated cachedSecure before we've read it. That means that after we do
// manage to read we must write back the merged data.
let needWrite = Object.keys(this.cachedSecure).length != 0;
let readSecure = yield this.secureStorage.get(uid, email);
// and update our cached data with it - anything already in .cachedSecure
// wins (including the fact it may be null or undefined, the latter
// which means it will be removed from storage.
if (readSecure && readSecure.version != DATA_FORMAT_VERSION) {
log.warn("got secure data but the data format version doesn't match");
readSecure = null;
}
if (readSecure && readSecure.accountData) {
log.debug("secure read fetched items", Object.keys(readSecure.accountData));
for (let [name, value] of Iterator(readSecure.accountData)) {
if (!(name in this.cachedSecure)) {
this.cachedSecure[name] = value;
}
}
if (needWrite) {
log.debug("successfully read secure data; writing updated data back")
yield this._doWriteSecure();
}
}
this._needToReadSecure = false;
} catch (ex if ex instanceof this.secureStorage.STORAGE_LOCKED) {
log.debug("setAccountData: secure storage is locked trying to read");
} catch (ex) {
log.error("failed to read secure storage", ex);
throw ex;
}
}),
_write() {
// We don't want multiple writes happening concurrently, and we also need to
// know when an "old" storage manager is done (this.finalize() waits for this)
return this._queueStorageOperation(() => this.__write());
},
__write: Task.async(function* () {
// Write everything back - later we could track what's actually dirty,
// but for now we write it all.
log.debug("writing plain storage", Object.keys(this.cachedPlain));
let toWritePlain = {
version: DATA_FORMAT_VERSION,
accountData: this.cachedPlain,
}
yield this.plainStorage.set(toWritePlain);
// If we have no secure storage manager we are done.
if (this.secureStorage == null) {
return;
}
// and only attempt to write to secure storage if we've managed to read it,
// otherwise we might clobber data that's already there.
if (!this._needToReadSecure) {
yield this._doWriteSecure();
}
}),
/* Do the actual write of secure data. Caller is expected to check if we actually
need to write and to ensure we are in a queued storage operation.
*/
_doWriteSecure: Task.async(function* () {
// We need to remove null items here.
for (let [name, value] of Iterator(this.cachedSecure)) {
if (value == null) {
delete this.cachedSecure[name];
}
}
log.debug("writing secure storage", Object.keys(this.cachedSecure));
let toWriteSecure = {
version: DATA_FORMAT_VERSION,
accountData: this.cachedSecure,
}
try {
yield this.secureStorage.set(this.cachedPlain.email, toWriteSecure);
} catch (ex if ex instanceof this.secureStorage.STORAGE_LOCKED) {
// This shouldn't be possible as once it is unlocked it can't be
// re-locked, and we can only be here if we've previously managed to
// read.
log.error("setAccountData: secure storage is locked trying to write");
}
}),
// Delete the data for an account - ie, called on "sign out".
deleteAccountData() {
return this._queueStorageOperation(() => this._deleteAccountData());
},
_deleteAccountData: Task.async(function() {
log.debug("removing account data");
yield this._promiseInitialized;
yield this.plainStorage.set(null);
if (this.secureStorage) {
yield this.secureStorage.set(null);
}
this._clearCachedData();
log.debug("account data reset");
}),
}
/**
* JSONStorage constructor that creates instances that may set/get
* to a specified file, in a directory that will be created if it
* doesn't exist.
*
* @param options {
* filename: of the file to write to
* baseDir: directory where the file resides
* }
* @return instance
*/
function JSONStorage(options) {
this.baseDir = options.baseDir;
this.path = OS.Path.join(options.baseDir, options.filename);
};
JSONStorage.prototype = {
set: function(contents) {
log.trace("starting write of json user data", contents ? Object.keys(contents.accountData) : "null");
let start = Date.now();
return OS.File.makeDir(this.baseDir, {ignoreExisting: true})
.then(CommonUtils.writeJSON.bind(null, contents, this.path))
.then(result => {
log.trace("finished write of json user data - took", Date.now()-start);
return result;
});
},
get: function() {
log.trace("starting fetch of json user data");
let start = Date.now();
return CommonUtils.readJSON(this.path).then(result => {
log.trace("finished fetch of json user data - took", Date.now()-start);
return result;
});
},
};
function StorageLockedError() {
}
/**
* LoginManagerStorage constructor that creates instances that set/get
* data stored securely in the nsILoginManager.
*
* @return instance
*/
function LoginManagerStorage() {
}
LoginManagerStorage.prototype = {
STORAGE_LOCKED: StorageLockedError,
// The fields in the credentials JSON object that are stored in plain-text
// in the profile directory. All other fields are stored in the login manager,
// and thus are only available when the master-password is unlocked.
// a hook point for testing.
get _isLoggedIn() {
return Services.logins.isLoggedIn;
},
// Clear any data from the login manager. Returns true if the login manager
// was unlocked (even if no existing logins existed) or false if it was
// locked (meaning we don't even know if it existed or not.)
_clearLoginMgrData: Task.async(function* () {
try { // Services.logins might be third-party and broken...
yield Services.logins.initializationPromise;
if (!this._isLoggedIn) {
return false;
}
let logins = Services.logins.findLogins({}, FXA_PWDMGR_HOST, null, FXA_PWDMGR_REALM);
for (let login of logins) {
Services.logins.removeLogin(login);
}
return true;
} catch (ex) {
log.error("Failed to clear login data: ${}", ex);
return false;
}
}),
set: Task.async(function* (email, contents) {
if (!contents) {
// Nuke it from the login manager.
let cleared = yield this._clearLoginMgrData();
if (!cleared) {
// just log a message - we verify that the uid matches when
// we reload it, so having a stale entry doesn't really hurt.
log.info("not removing credentials from login manager - not logged in");
}
log.trace("storage set finished clearing account data");
return;
}
// We are saving actual data.
log.trace("starting write of user data to the login manager");
try { // Services.logins might be third-party and broken...
// and the stuff into the login manager.
yield Services.logins.initializationPromise;
// If MP is locked we silently fail - the user may need to re-auth
// next startup.
if (!this._isLoggedIn) {
log.info("not saving credentials to login manager - not logged in");
throw new this.STORAGE_LOCKED();
}
// write the data to the login manager.
let loginInfo = new Components.Constructor(
"@mozilla.org/login-manager/loginInfo;1", Ci.nsILoginInfo, "init");
let login = new loginInfo(FXA_PWDMGR_HOST,
null, // aFormSubmitURL,
FXA_PWDMGR_REALM, // aHttpRealm,
email, // aUsername
JSON.stringify(contents), // aPassword
"", // aUsernameField
"");// aPasswordField
let existingLogins = Services.logins.findLogins({}, FXA_PWDMGR_HOST, null,
FXA_PWDMGR_REALM);
if (existingLogins.length) {
Services.logins.modifyLogin(existingLogins[0], login);
} else {
Services.logins.addLogin(login);
}
log.trace("finished write of user data to the login manager");
} catch (ex if ex instanceof this.STORAGE_LOCKED) {
throw ex;
} catch (ex) {
// just log and consume the error here - it may be a 3rd party login
// manager replacement that's simply broken.
log.error("Failed to save data to the login manager", ex);
}
}),
get: Task.async(function* (uid, email) {
log.trace("starting fetch of user data from the login manager");
try { // Services.logins might be third-party and broken...
// read the data from the login manager and merge it for return.
yield Services.logins.initializationPromise;
if (!this._isLoggedIn) {
log.info("returning partial account data as the login manager is locked.");
throw new this.STORAGE_LOCKED();
}
let logins = Services.logins.findLogins({}, FXA_PWDMGR_HOST, null, FXA_PWDMGR_REALM);
if (logins.length == 0) {
// This could happen if the MP was locked when we wrote the data.
log.info("Can't find any credentials in the login manager");
return null;
}
let login = logins[0];
// Support either the uid or the email as the username - we plan to move
// to storing the uid once Fx41 hits the release channel as the code below
// that handles either first landed in 41. Bug 1183951 is to store the uid.
if (login.username == uid || login.username == email) {
return JSON.parse(login.password);
}
log.info("username in the login manager doesn't match - ignoring it");
yield this._clearLoginMgrData();
} catch (ex if ex instanceof this.STORAGE_LOCKED) {
throw ex;
} catch (ex) {
// just log and consume the error here - it may be a 3rd party login
// manager replacement that's simply broken.
log.error("Failed to get data from the login manager", ex);
}
return null;
}),
}
// A global variable to indicate if the login manager is available - it doesn't
// exist on b2g. Defined here as the use of preprocessor directives skews line
// numbers in the runtime, meaning stack-traces etc end up off by a few lines.
// Doing it at the end of the file makes that less of a pita.
let haveLoginManager =
#if defined(MOZ_B2G)
false;
#else
true;
#endif

View File

@ -12,6 +12,7 @@ XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini']
EXTRA_JS_MODULES += [
'Credentials.jsm',
'FxAccounts.jsm',
'FxAccountsClient.jsm',
'FxAccountsCommon.js',
'FxAccountsOAuthClient.jsm',
@ -22,7 +23,7 @@ EXTRA_JS_MODULES += [
]
EXTRA_PP_JS_MODULES += [
'FxAccounts.jsm',
'FxAccountsStorage.jsm',
]
# For now, we will only be using the FxA manager in B2G.

View File

@ -12,6 +12,9 @@ Cu.import("resource://gre/modules/FxAccountsOAuthGrantClient.jsm");
Cu.import("resource://gre/modules/Promise.jsm");
Cu.import("resource://gre/modules/Log.jsm");
// We grab some additional stuff via backstage passes.
let {AccountState} = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
const ONE_HOUR_MS = 1000 * 60 * 60;
const ONE_DAY_MS = ONE_HOUR_MS * 24;
const TWO_MINUTES_MS = 1000 * 60 * 2;
@ -47,6 +50,42 @@ Services.prefs.setCharPref("identity.fxaccounts.settings.uri", CONTENT_URL);
* We add the _verified attribute to mock the change in verification
* state on the FXA server.
*/
function MockStorageManager() {
}
MockStorageManager.prototype = {
promiseInitialized: Promise.resolve(),
initialize(accountData) {
this.accountData = accountData;
},
finalize() {
return Promise.resolve();
},
getAccountData() {
return Promise.resolve(this.accountData);
},
updateAccountData(updatedFields) {
for (let [name, value] of Iterator(updatedFields)) {
if (value == null) {
delete this.accountData[name];
} else {
this.accountData[name] = value;
}
}
return Promise.resolve();
},
deleteAccountData() {
this.accountData = null;
return Promise.resolve();
}
}
function MockFxAccountsClient() {
this._email = "nobody@example.com";
this._verified = false;
@ -96,25 +135,6 @@ MockFxAccountsClient.prototype = {
__proto__: FxAccountsClient.prototype
}
let MockStorage = function() {
this.data = null;
};
MockStorage.prototype = Object.freeze({
set: function (contents) {
this.data = contents;
return Promise.resolve(null);
},
get: function () {
return Promise.resolve(this.data);
},
getOAuthTokens() {
return Promise.resolve(null);
},
setOAuthTokens(contents) {
return Promise.resolve();
},
});
/*
* We need to mock the FxAccounts module's interfaces to external
* services, such as storage and the FxAccounts client. We also
@ -128,10 +148,15 @@ function MockFxAccounts() {
_getCertificateSigned_calls: [],
_d_signCertificate: Promise.defer(),
_now_is: new Date(),
signedInUserStorage: new MockStorage(),
now: function () {
return this._now_is;
},
newAccountState(credentials) {
// we use a real accountState but mocked storage.
let storage = new MockStorageManager();
storage.initialize(credentials);
return new AccountState(this, storage);
},
getCertificateSigned: function (sessionToken, serializedPublicKey) {
_("mock getCertificateSigned\n");
this._getCertificateSigned_calls.push([sessionToken, serializedPublicKey]);
@ -172,9 +197,13 @@ add_test(function test_non_https_remote_server_uri() {
add_task(function test_get_signed_in_user_initially_unset() {
// This test, unlike many of the the rest, uses a (largely) un-mocked
// FxAccounts instance.
// We do mock the storage to keep the test fast on b2g.
let account = new FxAccounts({
signedInUserStorage: new MockStorage(),
newAccountState(credentials) {
// we use a real accountState but mocked storage.
let storage = new MockStorageManager();
storage.initialize(credentials);
return new AccountState(this, storage);
},
});
let credentials = {
email: "foo@example.com",
@ -185,9 +214,6 @@ add_task(function test_get_signed_in_user_initially_unset() {
kB: "cafe",
verified: true
};
// and a sad hack to ensure the mocked storage is used for the initial reads.
account.internal.currentAccountState.signedInUserStorage = account.internal.signedInUserStorage;
let result = yield account.getSignedInUser();
do_check_eq(result, null);
@ -221,7 +247,12 @@ add_task(function* test_getCertificate() {
// FxAccounts instance.
// We do mock the storage to keep the test fast on b2g.
let fxa = new FxAccounts({
signedInUserStorage: new MockStorage(),
newAccountState(credentials) {
// we use a real accountState but mocked storage.
let storage = new MockStorageManager();
storage.initialize(credentials);
return new AccountState(this, storage);
},
});
let credentials = {
email: "foo@example.com",
@ -232,8 +263,6 @@ add_task(function* test_getCertificate() {
kB: "cafe",
verified: true
};
// and a sad hack to ensure the mocked storage is used for the initial reads.
fxa.internal.currentAccountState.signedInUserStorage = fxa.internal.signedInUserStorage;
yield fxa.setSignedInUser(credentials);
// Test that an expired cert throws if we're offline.
@ -814,7 +843,6 @@ add_task(function* test_getOAuthTokenCachedScopeNormalization() {
do_check_eq(result, "token");
});
Services.prefs.setCharPref("identity.fxaccounts.remote.oauth.uri", "https://example.com/v1");
add_test(function test_getOAuthToken_invalid_param() {
let fxa = new MockFxAccounts();
@ -967,13 +995,13 @@ add_test(function test_getSignedInUserProfile() {
let mockProfile = {
getProfile: function () {
return Promise.resolve({ avatar: "image" });
}
},
tearDown: function() {},
};
let fxa = new FxAccounts({
_profile: mockProfile,
});
let fxa = new FxAccounts({});
fxa.setSignedInUser(alice).then(() => {
fxa.internal._profile = mockProfile;
fxa.getSignedInUserProfile()
.then(result => {
do_check_true(!!result);

View File

@ -7,6 +7,8 @@
// Stop us hitting the real auth server.
Services.prefs.setCharPref("identity.fxaccounts.auth.uri", "http://localhost");
// See verbose logging from FxAccounts.jsm
Services.prefs.setCharPref("identity.fxaccounts.loglevel", "Trace");
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/FxAccounts.jsm");
@ -16,9 +18,18 @@ Cu.import("resource://gre/modules/osfile.jsm");
Cu.import("resource://services-common/utils.js");
Cu.import("resource://gre/modules/FxAccountsCommon.js");
// Use a backstage pass to get at our LoginManagerStorage object, so we can
// mock the prototype.
let {LoginManagerStorage} = Cu.import("resource://gre/modules/FxAccountsStorage.jsm", {});
let isLoggedIn = true;
LoginManagerStorage.prototype.__defineGetter__("_isLoggedIn", () => isLoggedIn);
function setLoginMgrLoggedInState(loggedIn) {
isLoggedIn = loggedIn;
}
initTestLogging("Trace");
// See verbose logging from FxAccounts.jsm
Services.prefs.setCharPref("identity.fxaccounts.loglevel", "DEBUG");
function run_test() {
run_next_test();
@ -37,6 +48,7 @@ add_task(function test_simple() {
let fxa = new FxAccounts({});
let creds = {
uid: "abcd",
email: "test@example.com",
sessionToken: "sessionToken",
kA: "the kA value",
@ -58,7 +70,7 @@ add_task(function test_simple() {
Assert.ok(!("kB" in data.accountData), "kB not stored in clear text");
let login = getLoginMgrData();
Assert.strictEqual(login.username, creds.email, "email matches");
Assert.strictEqual(login.username, creds.email, "email used for username");
let loginData = JSON.parse(login.password);
Assert.strictEqual(loginData.version, data.version, "same version flag in both places");
Assert.strictEqual(loginData.accountData.kA, creds.kA, "correct kA in the login mgr");
@ -76,6 +88,7 @@ add_task(function test_MPLocked() {
let fxa = new FxAccounts({});
let creds = {
uid: "abcd",
email: "test@example.com",
sessionToken: "sessionToken",
kA: "the kA value",
@ -83,8 +96,9 @@ add_task(function test_MPLocked() {
verified: true
};
Assert.strictEqual(getLoginMgrData(), null, "no login mgr at the start");
// tell the storage that the MP is locked.
fxa.internal.signedInUserStorage.__defineGetter__("_isLoggedIn", () => false);
setLoginMgrLoggedInState(false);
yield fxa.setSignedInUser(creds);
// This should have stored stuff in the .json, and the login manager stuff
@ -103,123 +117,14 @@ add_task(function test_MPLocked() {
yield fxa.signOut(/* localOnly = */ true)
});
add_task(function test_migrationMPUnlocked() {
// first manually save a signedInUser.json to simulate a first-run with
// pre-migrated data.
let fxa = new FxAccounts({});
let creds = {
email: "test@example.com",
sessionToken: "sessionToken",
kA: "the kA value",
kB: "the kB value",
verified: true
};
let toWrite = {
version: fxa.version,
accountData: creds,
}
let path = OS.Path.join(OS.Constants.Path.profileDir, "signedInUser.json");
yield CommonUtils.writeJSON(toWrite, path);
// now load it - it should migrate.
let data = yield fxa.getSignedInUser();
Assert.deepEqual(data, creds, "we got all the data back");
// and verify it was actually migrated - re-read signedInUser back.
data = yield CommonUtils.readJSON(path);
Assert.strictEqual(data.accountData.email, creds.email, "correct email in the clear text");
Assert.strictEqual(data.accountData.sessionToken, creds.sessionToken, "correct sessionToken in the clear text");
Assert.strictEqual(data.accountData.verified, creds.verified, "correct verified flag");
Assert.ok(!("kA" in data.accountData), "kA not stored in clear text");
Assert.ok(!("kB" in data.accountData), "kB not stored in clear text");
let login = getLoginMgrData();
Assert.strictEqual(login.username, creds.email, "email matches");
let loginData = JSON.parse(login.password);
Assert.strictEqual(loginData.version, data.version, "same version flag in both places");
Assert.strictEqual(loginData.accountData.kA, creds.kA, "correct kA in the login mgr");
Assert.strictEqual(loginData.accountData.kB, creds.kB, "correct kB in the login mgr");
Assert.ok(!("email" in loginData), "email not stored in the login mgr json");
Assert.ok(!("sessionToken" in loginData), "sessionToken not stored in the login mgr json");
Assert.ok(!("verified" in loginData), "verified not stored in the login mgr json");
yield fxa.signOut(/* localOnly = */ true);
Assert.strictEqual(getLoginMgrData(), null, "login mgr data deleted on logout");
});
add_task(function test_migrationMPLocked() {
// first manually save a signedInUser.json to simulate a first-run with
// pre-migrated data.
let fxa = new FxAccounts({});
let creds = {
email: "test@example.com",
sessionToken: "sessionToken",
kA: "the kA value",
kB: "the kB value",
verified: true
};
let toWrite = {
version: fxa.version,
accountData: creds,
}
let path = OS.Path.join(OS.Constants.Path.profileDir, "signedInUser.json");
yield CommonUtils.writeJSON(toWrite, path);
// pretend the MP is locked.
fxa.internal.signedInUserStorage.__defineGetter__("_isLoggedIn", () => false);
// now load it - it should *not* migrate, but should only give the JSON-safe
// data back.
let data = yield fxa.getSignedInUser();
Assert.ok(!data.kA);
Assert.ok(!data.kB);
// and verify the data on disk wan't migrated.
data = yield CommonUtils.readJSON(path);
Assert.deepEqual(data, toWrite);
// Now "unlock" and re-ask for the signedInUser - it should migrate.
fxa.internal.signedInUserStorage.__defineGetter__("_isLoggedIn", () => true);
data = yield fxa.getSignedInUser();
// this time we should have got all the data, not just the JSON-safe fields.
Assert.strictEqual(data.kA, creds.kA);
Assert.strictEqual(data.kB, creds.kB);
// And verify the data in the JSON was migrated
data = yield CommonUtils.readJSON(path);
Assert.strictEqual(data.accountData.email, creds.email, "correct email in the clear text");
Assert.strictEqual(data.accountData.sessionToken, creds.sessionToken, "correct sessionToken in the clear text");
Assert.strictEqual(data.accountData.verified, creds.verified, "correct verified flag");
Assert.ok(!("kA" in data.accountData), "kA not stored in clear text");
Assert.ok(!("kB" in data.accountData), "kB not stored in clear text");
let login = getLoginMgrData();
Assert.strictEqual(login.username, creds.email, "email matches");
let loginData = JSON.parse(login.password);
Assert.strictEqual(loginData.version, data.version, "same version flag in both places");
Assert.strictEqual(loginData.accountData.kA, creds.kA, "correct kA in the login mgr");
Assert.strictEqual(loginData.accountData.kB, creds.kB, "correct kB in the login mgr");
Assert.ok(!("email" in loginData), "email not stored in the login mgr json");
Assert.ok(!("sessionToken" in loginData), "sessionToken not stored in the login mgr json");
Assert.ok(!("verified" in loginData), "verified not stored in the login mgr json");
yield fxa.signOut(/* localOnly = */ true);
Assert.strictEqual(getLoginMgrData(), null, "login mgr data deleted on logout");
});
add_task(function test_consistentWithMPEdgeCases() {
setLoginMgrLoggedInState(true);
let fxa = new FxAccounts({});
let creds1 = {
uid: "uid1",
email: "test@example.com",
sessionToken: "sessionToken",
kA: "the kA value",
@ -228,6 +133,7 @@ add_task(function test_consistentWithMPEdgeCases() {
};
let creds2 = {
uid: "uid2",
email: "test2@example.com",
sessionToken: "sessionToken2",
kA: "the kA value2",
@ -240,7 +146,7 @@ add_task(function test_consistentWithMPEdgeCases() {
// tell the storage that the MP is locked - this will prevent logout from
// being able to clear the data.
fxa.internal.signedInUserStorage.__defineGetter__("_isLoggedIn", () => false);
setLoginMgrLoggedInState(false);
// now set the second credentials.
yield fxa.setSignedInUser(creds2);
@ -252,9 +158,9 @@ add_task(function test_consistentWithMPEdgeCases() {
Assert.strictEqual(JSON.parse(login.password).accountData.kA, creds1.kA,
"stale data still in login mgr");
// Make a new FxA instance (otherwise the values in memory will be used.)
// Because we haven't overridden _isLoggedIn for this new instance it will
// treat the MP as unlocked.
// Make a new FxA instance (otherwise the values in memory will be used)
// and we want the login manager to be unlocked.
setLoginMgrLoggedInState(true);
fxa = new FxAccounts({});
let accountData = yield fxa.getSignedInUser();
@ -264,46 +170,28 @@ add_task(function test_consistentWithMPEdgeCases() {
yield fxa.signOut(/* localOnly = */ true)
});
add_task(function test_migration() {
// manually write out the full creds data to the JSON - this will look like
// old data that needs migration.
let creds = {
email: "test@example.com",
sessionToken: "sessionToken",
kA: "the kA value",
kB: "the kB value",
verified: true
};
let toWrite = {
version: 1,
accountData: creds,
};
// A test for the fact we will accept either a UID or email when looking in
// the login manager.
add_task(function test_uidMigration() {
setLoginMgrLoggedInState(true);
Assert.strictEqual(getLoginMgrData(), null, "expect no logins at the start");
let path = OS.Path.join(OS.Constants.Path.profileDir, "signedInUser.json");
let data = yield CommonUtils.writeJSON(toWrite, path);
// create the login entry using uid as a key.
let contents = {kA: "kA"};
// Create an FxA object - and tell it to load the data.
let fxa = new FxAccounts({});
data = yield fxa.getSignedInUser();
let loginInfo = new Components.Constructor(
"@mozilla.org/login-manager/loginInfo;1", Ci.nsILoginInfo, "init");
let login = new loginInfo(FXA_PWDMGR_HOST,
null, // aFormSubmitURL,
FXA_PWDMGR_REALM, // aHttpRealm,
"uid", // aUsername
JSON.stringify(contents), // aPassword
"", // aUsernameField
"");// aPasswordField
Services.logins.addLogin(login);
Assert.deepEqual(data, creds, "we should have everything available");
// now sniff the data on disk - it should have been magically migrated.
data = yield CommonUtils.readJSON(path);
Assert.strictEqual(data.accountData.email, creds.email, "correct email in the clear text");
Assert.strictEqual(data.accountData.sessionToken, creds.sessionToken, "correct sessionToken in the clear text");
Assert.strictEqual(data.accountData.verified, creds.verified, "correct verified flag");
Assert.ok(!("kA" in data.accountData), "kA not stored in clear text");
Assert.ok(!("kB" in data.accountData), "kB not stored in clear text");
// and it should magically be in the login manager.
let login = getLoginMgrData();
Assert.strictEqual(login.username, creds.email);
// and that we do have the first kA in the login manager.
Assert.strictEqual(JSON.parse(login.password).accountData.kA, creds.kA,
"kA was migrated");
yield fxa.signOut(/* localOnly = */ true)
// ensure we read it.
let storage = new LoginManagerStorage();
let got = yield storage.get("uid", "foo@bar.com");
Assert.deepEqual(got, contents);
});

View File

@ -8,6 +8,9 @@ Cu.import("resource://gre/modules/FxAccountsClient.jsm");
Cu.import("resource://gre/modules/FxAccountsCommon.js");
Cu.import("resource://gre/modules/osfile.jsm");
// We grab some additional stuff via backstage passes.
let {AccountState} = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
function promiseNotification(topic) {
return new Promise(resolve => {
let observe = () => {
@ -18,6 +21,43 @@ function promiseNotification(topic) {
});
}
// A storage manager that doesn't actually write anywhere.
function MockStorageManager() {
}
MockStorageManager.prototype = {
promiseInitialized: Promise.resolve(),
initialize(accountData) {
this.accountData = accountData;
},
finalize() {
return Promise.resolve();
},
getAccountData() {
return Promise.resolve(this.accountData);
},
updateAccountData(updatedFields) {
for (let [name, value] of Iterator(updatedFields)) {
if (value == null) {
delete this.accountData[name];
} else {
this.accountData[name] = value;
}
}
return Promise.resolve();
},
deleteAccountData() {
this.accountData = null;
return Promise.resolve();
}
}
// Just enough mocks so we can avoid hawk etc.
function MockFxAccountsClient() {
this._email = "nobody@example.com";
@ -41,6 +81,12 @@ MockFxAccountsClient.prototype = {
function MockFxAccounts() {
return new FxAccounts({
fxAccountsClient: new MockFxAccountsClient(),
newAccountState(credentials) {
// we use a real accountState but mocked storage.
let storage = new MockStorageManager();
storage.initialize(credentials);
return new AccountState(this, storage);
},
});
}
@ -82,132 +128,22 @@ add_task(function testCacheStorage() {
cas.setCachedToken(scopeArray, tokenData);
deepEqual(cas.getCachedToken(scopeArray), tokenData);
deepEqual(cas.getAllCachedTokens(), [tokenData]);
deepEqual(cas.oauthTokens, {"bar|foo": tokenData});
// wait for background write to complete.
yield promiseWritten;
// Check the token cache was written to signedInUserOAuthTokens.json.
let path = OS.Path.join(OS.Constants.Path.profileDir, DEFAULT_OAUTH_TOKENS_FILENAME);
let data = yield CommonUtils.readJSON(path);
ok(data.tokens, "the data is in the json");
equal(data.uid, "1234@lcip.org", "The user's uid is in the json");
// Check it's all in the json.
let expectedKey = "bar|foo";
let entry = data.tokens[expectedKey];
ok(entry, "our key is in the json");
deepEqual(entry, tokenData, "correct token is in the json");
// Check the token cache made it to our mocked storage.
deepEqual(cas.storageManager.accountData.oauthTokens, {"bar|foo": tokenData});
// Drop the token from the cache and ensure it is removed from the json.
promiseWritten = promiseNotification("testhelper-fxa-cache-persist-done");
yield cas.removeCachedToken("token1");
deepEqual(cas.getAllCachedTokens(), []);
deepEqual(cas.oauthTokens, {});
yield promiseWritten;
data = yield CommonUtils.readJSON(path);
ok(!data.tokens[expectedKey], "our key was removed from the json");
deepEqual(cas.storageManager.accountData.oauthTokens, {});
// sign out and the token storage should end up with null.
let storageManager = cas.storageManager; // .signOut() removes the attribute.
yield fxa.signOut( /* localOnly = */ true);
data = yield CommonUtils.readJSON(path);
ok(data === null, "data wiped on signout");
});
// Test that the tokens are available after a full read of credentials from disk.
add_task(function testCacheAfterRead() {
let fxa = yield createMockFxA();
// Hook what the impl calls to save to disk.
let cas = fxa.internal.currentAccountState;
let origPersistCached = cas._persistCachedTokens.bind(cas)
cas._persistCachedTokens = function() {
return origPersistCached().then(() => {
Services.obs.notifyObservers(null, "testhelper-fxa-cache-persist-done", null);
});
};
let promiseWritten = promiseNotification("testhelper-fxa-cache-persist-done");
let tokenData = {token: "token1", somethingelse: "something else"};
let scopeArray = ["foo", "bar"];
cas.setCachedToken(scopeArray, tokenData);
yield promiseWritten;
// trick things so the data is re-read from disk.
cas.signedInUser = null;
cas.oauthTokens = null;
yield cas.getUserAccountData();
ok(cas.oauthTokens, "token data was re-read");
deepEqual(cas.getCachedToken(scopeArray), tokenData);
});
// Test that the tokens are saved after we read user credentials from disk.
add_task(function testCacheAfterRead() {
let fxa = yield createMockFxA();
// Hook what the impl calls to save to disk.
let cas = fxa.internal.currentAccountState;
let origPersistCached = cas._persistCachedTokens.bind(cas)
// trick things so that FxAccounts is in the mode where we're reading data
// from disk each time getSignedInUser() is called (ie, where .signedInUser
// remains null)
cas.signedInUser = null;
cas.oauthTokens = null;
yield cas.getUserAccountData();
// hook our "persist" function.
cas._persistCachedTokens = function() {
return origPersistCached().then(() => {
Services.obs.notifyObservers(null, "testhelper-fxa-cache-persist-done", null);
});
};
let promiseWritten = promiseNotification("testhelper-fxa-cache-persist-done");
// save a new token - it should be persisted.
let tokenData = {token: "token1", somethingelse: "something else"};
let scopeArray = ["foo", "bar"];
cas.setCachedToken(scopeArray, tokenData);
yield promiseWritten;
// re-read the tokens directly from the storage to ensure they were persisted.
let got = yield cas.signedInUserStorage.getOAuthTokens();
ok(got, "got persisted data");
ok(got.tokens, "have tokens");
// this is internal knowledge of how scopes get turned into "keys", but that's OK
ok(got.tokens["bar|foo"], "have our scope");
equal(got.tokens["bar|foo"].token, "token1", "have our token");
});
// Test that the tokens are ignored when the token storage has an incorrect uid.
add_task(function testCacheAfterReadBadUID() {
let fxa = yield createMockFxA();
// Hook what the impl calls to save to disk.
let cas = fxa.internal.currentAccountState;
let origPersistCached = cas._persistCachedTokens.bind(cas)
cas._persistCachedTokens = function() {
return origPersistCached().then(() => {
Services.obs.notifyObservers(null, "testhelper-fxa-cache-persist-done", null);
});
};
let promiseWritten = promiseNotification("testhelper-fxa-cache-persist-done");
let tokenData = {token: "token1", somethingelse: "something else"};
let scopeArray = ["foo", "bar"];
cas.setCachedToken(scopeArray, tokenData);
yield promiseWritten;
// trick things so the data is re-read from disk.
cas.signedInUser = null;
cas.oauthTokens = null;
// re-write the tokens data with an invalid UID.
let path = OS.Path.join(OS.Constants.Path.profileDir, DEFAULT_OAUTH_TOKENS_FILENAME);
let data = yield CommonUtils.readJSON(path);
ok(data.tokens, "the data is in the json");
equal(data.uid, "1234@lcip.org", "The user's uid is in the json");
data.uid = "someone_else";
yield CommonUtils.writeJSON(data, path);
yield cas.getUserAccountData();
deepEqual(cas.oauthTokens, {}, "token data ignored due to bad uid");
equal(null, cas.getCachedToken(scopeArray), "no token available");
deepEqual(storageManager.accountData, null);
});

View File

@ -0,0 +1,407 @@
/* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
// Tests for the FxA storage manager.
Cu.import("resource://gre/modules/Task.jsm");
Cu.import("resource://gre/modules/Services.jsm");
Cu.import("resource://gre/modules/FxAccountsStorage.jsm");
Cu.import("resource://gre/modules/FxAccountsCommon.js");
Cu.import("resource://gre/modules/Log.jsm");
initTestLogging("Trace");
log.level = Log.Level.Trace;
// A couple of mocks we can use.
function MockedPlainStorage(accountData) {
let data = null;
if (accountData) {
data = {
version: DATA_FORMAT_VERSION,
accountData: accountData,
}
}
this.data = data;
this.numReads = 0;
}
MockedPlainStorage.prototype = {
get: Task.async(function* () {
this.numReads++;
Assert.equal(this.numReads, 1, "should only ever be 1 read of acct data");
return this.data;
}),
set: Task.async(function* (data) {
this.data = data;
}),
};
function MockedSecureStorage(accountData) {
let data = null;
if (accountData) {
data = {
version: DATA_FORMAT_VERSION,
accountData: accountData,
}
}
this.data = data;
this.numReads = 0;
}
MockedSecureStorage.prototype = {
locked: false,
STORAGE_LOCKED: function() {},
get: Task.async(function* (uid, email) {
if (this.locked) {
throw new this.STORAGE_LOCKED();
}
this.numReads++;
Assert.equal(this.numReads, 1, "should only ever be 1 read of unlocked data");
return this.data;
}),
set: Task.async(function* (uid, contents) {
this.data = contents;
}),
}
function add_storage_task(testFunction) {
add_task(function* () {
print("Starting test with secure storage manager");
yield testFunction(new FxAccountsStorageManager());
});
add_task(function* () {
print("Starting test with simple storage manager");
yield testFunction(new FxAccountsStorageManager({useSecure: false}));
});
}
// initialized without account data and there's nothing to read. Not logged in.
add_storage_task(function* checkInitializedEmpty(sm) {
if (sm.secureStorage) {
sm.secureStorage = new MockedSecureStorage(null);
}
yield sm.initialize();
Assert.strictEqual((yield sm.getAccountData()), null);
Assert.rejects(sm.updateAccountData({foo: "bar"}), "No user is logged in")
});
// Initialized with account data (ie, simulating a new user being logged in).
// Should reflect the initial data and be written to storage.
add_storage_task(function* checkNewUser(sm) {
let initialAccountData = {
uid: "uid",
email: "someone@somewhere.com",
kA: "kA",
};
sm.plainStorage = new MockedPlainStorage()
if (sm.secureStorage) {
sm.secureStorage = new MockedSecureStorage(null);
}
yield sm.initialize(initialAccountData);
let accountData = yield sm.getAccountData();
Assert.equal(accountData.uid, initialAccountData.uid);
Assert.equal(accountData.email, initialAccountData.email);
Assert.equal(accountData.kA, initialAccountData.kA);
// and it should have been written to storage.
Assert.equal(sm.plainStorage.data.accountData.uid, initialAccountData.uid);
Assert.equal(sm.plainStorage.data.accountData.email, initialAccountData.email);
// check secure
if (sm.secureStorage) {
Assert.equal(sm.secureStorage.data.accountData.kA, initialAccountData.kA);
} else {
Assert.equal(sm.plainStorage.data.accountData.kA, initialAccountData.kA);
}
});
// Initialized without account data but storage has it available.
add_storage_task(function* checkEverythingRead(sm) {
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
if (sm.secureStorage) {
sm.secureStorage = new MockedSecureStorage(null);
}
yield sm.initialize();
let accountData = yield sm.getAccountData();
Assert.ok(accountData, "read account data");
Assert.equal(accountData.uid, "uid");
Assert.equal(accountData.email, "someone@somewhere.com");
// Update the data - we should be able to fetch it back and it should appear
// in our storage.
yield sm.updateAccountData({verified: true, foo: "bar", kA: "kA"});
accountData = yield sm.getAccountData();
Assert.equal(accountData.foo, "bar");
Assert.equal(accountData.kA, "kA");
// Check the new value was written to storage.
yield sm._promiseStorageComplete; // storage is written in the background.
// "verified" is a plain-text field.
Assert.equal(sm.plainStorage.data.accountData.verified, true);
// "kA" and "foo" are secure
if (sm.secureStorage) {
Assert.equal(sm.secureStorage.data.accountData.kA, "kA");
Assert.equal(sm.secureStorage.data.accountData.foo, "bar");
} else {
Assert.equal(sm.plainStorage.data.accountData.kA, "kA");
Assert.equal(sm.plainStorage.data.accountData.foo, "bar");
}
});
add_storage_task(function* checkInvalidUpdates(sm) {
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
if (sm.secureStorage) {
sm.secureStorage = new MockedSecureStorage(null);
}
Assert.rejects(sm.updateAccountData({uid: "another"}), "Can't change");
Assert.rejects(sm.updateAccountData({email: "someoneelse"}), "Can't change");
});
add_storage_task(function* checkNullUpdatesRemovedUnlocked(sm) {
if (sm.secureStorage) {
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
sm.secureStorage = new MockedSecureStorage({kA: "kA", kB: "kB"});
} else {
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com",
kA: "kA", kB: "kB"});
}
yield sm.initialize();
yield sm.updateAccountData({kA: null});
let accountData = yield sm.getAccountData();
Assert.ok(!accountData.kA);
Assert.equal(accountData.kB, "kB");
});
add_storage_task(function* checkDelete(sm) {
if (sm.secureStorage) {
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
sm.secureStorage = new MockedSecureStorage({kA: "kA", kB: "kB"});
} else {
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com",
kA: "kA", kB: "kB"});
}
yield sm.initialize();
yield sm.deleteAccountData();
// Storage should have been reset to null.
Assert.equal(sm.plainStorage.data, null);
if (sm.secureStorage) {
Assert.equal(sm.secureStorage.data, null);
}
// And everything should reflect no user.
Assert.equal((yield sm.getAccountData()), null);
});
// Some tests only for the secure storage manager.
add_task(function* checkNullUpdatesRemovedLocked() {
let sm = new FxAccountsStorageManager();
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
sm.secureStorage = new MockedSecureStorage({kA: "kA", kB: "kB"});
sm.secureStorage.locked = true;
yield sm.initialize();
yield sm.updateAccountData({kA: null});
let accountData = yield sm.getAccountData();
Assert.ok(!accountData.kA);
// still no kB as we are locked.
Assert.ok(!accountData.kB);
// now unlock - should still be no kA but kB should appear.
sm.secureStorage.locked = false;
accountData = yield sm.getAccountData();
Assert.ok(!accountData.kA);
Assert.equal(accountData.kB, "kB");
// And secure storage should have been written with our previously-cached
// data.
Assert.strictEqual(sm.secureStorage.data.accountData.kA, undefined);
Assert.strictEqual(sm.secureStorage.data.accountData.kB, "kB");
});
add_task(function* checkEverythingReadSecure() {
let sm = new FxAccountsStorageManager();
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
sm.secureStorage = new MockedSecureStorage({kA: "kA"});
yield sm.initialize();
let accountData = yield sm.getAccountData();
Assert.ok(accountData, "read account data");
Assert.equal(accountData.uid, "uid");
Assert.equal(accountData.email, "someone@somewhere.com");
Assert.equal(accountData.kA, "kA");
});
add_task(function* checkLockedUpdates() {
let sm = new FxAccountsStorageManager();
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
sm.secureStorage = new MockedSecureStorage({kA: "old-kA", kB: "kB"});
sm.secureStorage.locked = true;
yield sm.initialize();
let accountData = yield sm.getAccountData();
// requesting kA and kB will fail as storage is locked.
Assert.ok(!accountData.kA);
Assert.ok(!accountData.kB);
// While locked we can still update it and see the updated value.
sm.updateAccountData({kA: "new-kA"});
accountData = yield sm.getAccountData();
Assert.equal(accountData.kA, "new-kA");
// unlock.
sm.secureStorage.locked = false;
accountData = yield sm.getAccountData();
// should reflect the value we updated and the one we didn't.
Assert.equal(accountData.kA, "new-kA");
Assert.equal(accountData.kB, "kB");
// And storage should also reflect it.
Assert.strictEqual(sm.secureStorage.data.accountData.kA, "new-kA");
Assert.strictEqual(sm.secureStorage.data.accountData.kB, "kB");
});
// Some tests for the "storage queue" functionality.
// A helper for our queued tests. It creates a StorageManager and then queues
// an unresolved promise. The tests then do additional setup and checks, then
// resolves or rejects the blocked promise.
let setupStorageManagerForQueueTest = Task.async(function* () {
let sm = new FxAccountsStorageManager();
sm.plainStorage = new MockedPlainStorage({uid: "uid", email: "someone@somewhere.com"})
sm.secureStorage = new MockedSecureStorage({kA: "kA"});
sm.secureStorage.locked = true;
yield sm.initialize();
let resolveBlocked, rejectBlocked;
let blockedPromise = new Promise((resolve, reject) => {
resolveBlocked = resolve;
rejectBlocked = reject;
});
sm._queueStorageOperation(() => blockedPromise);
return {sm, blockedPromise, resolveBlocked, rejectBlocked}
});
// First the general functionality.
add_task(function* checkQueueSemantics() {
let { sm, resolveBlocked } = yield setupStorageManagerForQueueTest();
// We've one unresolved promise in the queue - add another promise.
let resolveSubsequent;
let subsequentPromise = new Promise(resolve => {
resolveSubsequent = resolve;
});
let subsequentCalled = false;
sm._queueStorageOperation(() => {
subsequentCalled = true;
resolveSubsequent();
return subsequentPromise;
});
// Our "subsequent" function should not have been called yet.
Assert.ok(!subsequentCalled);
// Release our blocked promise.
resolveBlocked();
// Our subsequent promise should end up resolved.
yield subsequentPromise;
Assert.ok(subsequentCalled);
yield sm.finalize();
});
// Check that a queued promise being rejected works correctly.
add_task(function* checkQueueSemanticsOnError() {
let { sm, blockedPromise, rejectBlocked } = yield setupStorageManagerForQueueTest();
let resolveSubsequent;
let subsequentPromise = new Promise(resolve => {
resolveSubsequent = resolve;
});
let subsequentCalled = false;
sm._queueStorageOperation(() => {
subsequentCalled = true;
resolveSubsequent();
return subsequentPromise;
});
// Our "subsequent" function should not have been called yet.
Assert.ok(!subsequentCalled);
// Reject our blocked promise - the subsequent operations should still work
// correctly.
rejectBlocked("oh no");
// Our subsequent promise should end up resolved.
yield subsequentPromise;
Assert.ok(subsequentCalled);
// But the first promise should reflect the rejection.
try {
yield blockedPromise;
Assert.ok(false, "expected this promise to reject");
} catch (ex) {
Assert.equal(ex, "oh no");
}
yield sm.finalize();
});
// And some tests for the specific operations that are queued.
add_task(function* checkQueuedReadAndUpdate() {
let { sm, resolveBlocked } = yield setupStorageManagerForQueueTest();
// Mock the underlying operations
// _doReadAndUpdateSecure is queued by _maybeReadAndUpdateSecure
let _doReadCalled = false;
sm._doReadAndUpdateSecure = () => {
_doReadCalled = true;
return Promise.resolve();
}
let resultPromise = sm._maybeReadAndUpdateSecure();
Assert.ok(!_doReadCalled);
resolveBlocked();
yield resultPromise;
Assert.ok(_doReadCalled);
yield sm.finalize();
});
add_task(function* checkQueuedWrite() {
let { sm, resolveBlocked } = yield setupStorageManagerForQueueTest();
// Mock the underlying operations
let __writeCalled = false;
sm.__write = () => {
__writeCalled = true;
return Promise.resolve();
}
let writePromise = sm._write();
Assert.ok(!__writeCalled);
resolveBlocked();
yield writePromise;
Assert.ok(__writeCalled);
yield sm.finalize();
});
add_task(function* checkQueuedDelete() {
let { sm, resolveBlocked } = yield setupStorageManagerForQueueTest();
// Mock the underlying operations
let _deleteCalled = false;
sm._deleteAccountData = () => {
_deleteCalled = true;
return Promise.resolve();
}
let resultPromise = sm.deleteAccountData();
Assert.ok(!_deleteCalled);
resolveBlocked();
yield resultPromise;
Assert.ok(_deleteCalled);
yield sm.finalize();
});
function run_test() {
run_next_test();
}

View File

@ -21,3 +21,4 @@ reason = FxAccountsManager is only available for B2G for now
[test_web_channel.js]
skip-if = (appname == 'b2g' || appname == 'thunderbird') # fxa web channels only used on desktop
[test_profile.js]
[test_storage_manager.js]

View File

@ -1603,6 +1603,15 @@ default
In other words, search engines without an ``.identifier``
are prefixed with ``other-``.
Version 2
^^^^^^^^^
Starting with Firefox 40, there is an additional optional value:
cohort
Daily cohort string identifier, recorded if the user is part of
search defaults A/B testing.
org.mozilla.sync.sync
---------------------

View File

@ -60,6 +60,7 @@ const DAILY_LAST_TEXT_FIELD = {type: Metrics.Storage.FIELD_DAILY_LAST_TEXT};
const DAILY_COUNTER_FIELD = {type: Metrics.Storage.FIELD_DAILY_COUNTER};
const TELEMETRY_PREF = "toolkit.telemetry.enabled";
const SEARCH_COHORT_PREF = "browser.search.cohort";
function isTelemetryEnabled(prefs) {
return prefs.get(TELEMETRY_PREF, false);
@ -1630,10 +1631,11 @@ SearchEnginesMeasurement1.prototype = Object.freeze({
__proto__: Metrics.Measurement.prototype,
name: "engines",
version: 1,
version: 2,
fields: {
default: DAILY_LAST_TEXT_FIELD,
cohort: DAILY_LAST_TEXT_FIELD,
},
});
@ -1688,6 +1690,9 @@ this.SearchesProvider.prototype = Object.freeze({
}
yield m.setDailyLastText("default", name);
if (Services.prefs.prefHasUserValue(SEARCH_COHORT_PREF))
yield m.setDailyLastText("cohort", Services.prefs.getCharPref(SEARCH_COHORT_PREF));
}.bind(this));
},

View File

@ -146,7 +146,7 @@ add_task(function* test_default_search_engine() {
let provider = new SearchesProvider();
yield provider.init(storage);
let m = provider.getMeasurement("engines", 1);
let m = provider.getMeasurement("engines", 2);
let now = new Date();
yield provider.collectDailyData();
@ -174,5 +174,14 @@ add_task(function* test_default_search_engine() {
data = yield m.getValues();
Assert.equal(data.days.getDay(now).get("default"), "other-testdefault");
// If no cohort identifier is set, we shouldn't report a cohort.
Assert.equal(data.days.getDay(now).get("cohort"), undefined);
// Set a cohort identifier and verify we record it.
Services.prefs.setCharPref("browser.search.cohort", "testcohort");
yield provider.collectDailyData();
data = yield m.getValues();
Assert.equal(data.days.getDay(now).get("cohort"), "testcohort");
yield storage.close();
});

View File

@ -16,6 +16,8 @@ this.EXPORTED_SYMBOLS = [
"waitForZeroTimer",
"Promise", // from a module import
"add_identity_test",
"MockFxaStorageManager",
"AccountState", // from a module import
];
const {utils: Cu} = Components;
@ -32,6 +34,45 @@ Cu.import("resource://gre/modules/FxAccounts.jsm");
Cu.import("resource://gre/modules/FxAccountsCommon.js");
Cu.import("resource://gre/modules/Promise.jsm");
// and grab non-exported stuff via a backstage pass.
const {AccountState} = Cu.import("resource://gre/modules/FxAccounts.jsm", {});
// A mock "storage manager" for FxAccounts that doesn't actually write anywhere.
function MockFxaStorageManager() {
}
MockFxaStorageManager.prototype = {
promiseInitialized: Promise.resolve(),
initialize(accountData) {
this.accountData = accountData;
},
finalize() {
return Promise.resolve();
},
getAccountData() {
return Promise.resolve(this.accountData);
},
updateAccountData(updatedFields) {
for (let [name, value] of Iterator(updatedFields)) {
if (value == null) {
delete this.accountData[name];
} else {
this.accountData[name] = value;
}
}
return Promise.resolve();
},
deleteAccountData() {
this.accountData = null;
return Promise.resolve();
}
}
/**
* First wait >100ms (nsITimers can take up to that much time to fire, so
* we can account for the timer in delayedAutoconnect) and then two event
@ -126,23 +167,33 @@ this.makeIdentityConfig = function(overrides) {
// config (or the default config if not specified).
this.configureFxAccountIdentity = function(authService,
config = makeIdentityConfig()) {
let MockInternal = {};
let fxa = new FxAccounts(MockInternal);
// until we get better test infrastructure for bid_identity, we set the
// signedin user's "email" to the username, simply as many tests rely on this.
config.fxaccount.user.email = config.username;
fxa.internal.currentAccountState.signedInUser = {
version: DATA_FORMAT_VERSION,
accountData: config.fxaccount.user
};
fxa.internal.currentAccountState.getCertificate = function(data, keyPair, mustBeValidUntil) {
this.cert = {
validUntil: fxa.internal.now() + CERT_LIFETIME,
cert: "certificate",
};
return Promise.resolve(this.cert.cert);
let fxa;
let MockInternal = {
newAccountState(credentials) {
// We only expect this to be called with null indicating the (mock)
// storage should be read.
if (credentials) {
throw new Error("Not expecting to have credentials passed");
}
let storageManager = new MockFxaStorageManager();
storageManager.initialize(config.fxaccount.user);
let accountState = new AccountState(this, storageManager);
// mock getCertificate
accountState.getCertificate = function(data, keyPair, mustBeValidUntil) {
accountState.cert = {
validUntil: fxa.internal.now() + CERT_LIFETIME,
cert: "certificate",
};
return Promise.resolve(this.cert.cert);
}
return accountState;
}
};
fxa = new FxAccounts(MockInternal);
let mockTSC = { // TokenServerClient
getTokenFromBrowserIDAssertion: function(uri, assertion, cb) {
@ -154,7 +205,7 @@ this.configureFxAccountIdentity = function(authService,
authService._tokenServerClient = mockTSC;
// Set the "account" of the browserId manager to be the "email" of the
// logged in user of the mockFXA service.
authService._signedInUser = fxa.internal.currentAccountState.signedInUser.accountData;
authService._signedInUser = config.fxaccount.user;
authService._account = config.fxaccount.user.email;
}

View File

@ -264,8 +264,8 @@ add_task(function test_ensureLoggedIn() {
// arrange for no logged in user.
let fxa = browseridManager._fxaService
let signedInUser = fxa.internal.currentAccountState.signedInUser;
fxa.internal.currentAccountState.signedInUser = null;
let signedInUser = fxa.internal.currentAccountState.storageManager.accountData;
fxa.internal.currentAccountState.storageManager.accountData = null;
browseridManager.initializeWithCurrentIdentity();
Assert.ok(!browseridManager._shouldHaveSyncKeyBundle,
"_shouldHaveSyncKeyBundle should be false so we know we are testing what we think we are.");
@ -273,7 +273,8 @@ add_task(function test_ensureLoggedIn() {
yield Assert.rejects(browseridManager.ensureLoggedIn(), "expecting rejection due to no user");
Assert.ok(browseridManager._shouldHaveSyncKeyBundle,
"_shouldHaveSyncKeyBundle should always be true after ensureLogin completes.");
fxa.internal.currentAccountState.signedInUser = signedInUser;
// Restore the logged in user to what it was.
fxa.internal.currentAccountState.storageManager.accountData = signedInUser;
Status.login = LOGIN_FAILED_LOGIN_REJECTED;
yield Assert.rejects(browseridManager.ensureLoggedIn(),
"LOGIN_FAILED_LOGIN_REJECTED should have caused immediate rejection");
@ -585,7 +586,17 @@ add_task(function test_getKeysMissing() {
fetchAndUnwrapKeys: function () {
return Promise.resolve({});
},
fxAccountsClient: new MockFxAccountsClient()
fxAccountsClient: new MockFxAccountsClient(),
newAccountState(credentials) {
// We only expect this to be called with null indicating the (mock)
// storage should be read.
if (credentials) {
throw new Error("Not expecting to have credentials passed");
}
let storageManager = new MockFxaStorageManager();
storageManager.initialize(identityConfig.fxaccount.user);
return new AccountState(this, storageManager);
},
});
// Add a mock to the currentAccountState object.
@ -597,9 +608,6 @@ add_task(function test_getKeysMissing() {
return Promise.resolve(this.cert.cert);
};
// Ensure the new FxAccounts mock has a signed-in user.
fxa.internal.currentAccountState.signedInUser = browseridManager._fxaService.internal.currentAccountState.signedInUser;
browseridManager._fxaService = fxa;
yield browseridManager.initializeWithCurrentIdentity();
@ -658,11 +666,18 @@ function* initializeIdentityWithHAWKResponseFactory(config, cbGetResponse) {
fxaClient.hawk = new MockedHawkClient();
let internal = {
fxAccountsClient: fxaClient,
newAccountState(credentials) {
// We only expect this to be called with null indicating the (mock)
// storage should be read.
if (credentials) {
throw new Error("Not expecting to have credentials passed");
}
let storageManager = new MockFxaStorageManager();
storageManager.initialize(config.fxaccount.user);
return new AccountState(this, storageManager);
},
}
let fxa = new FxAccounts(internal);
fxa.internal.currentAccountState.signedInUser = {
accountData: config.fxaccount.user,
};
browseridManager._fxaService = fxa;
browseridManager._signedInUser = null;

View File

@ -195,7 +195,6 @@ user_pref("browser.download.panel.shown", true);
// Assume the about:newtab page's intro panels have been shown to not depend on
// which test runs first and happens to open about:newtab
user_pref("browser.newtabpage.introShown", true);
user_pref("browser.newtabpage.updateIntroShown", true);
// Tell the PBackground infrastructure to run a test at startup.
user_pref("pbackground.testing", true);

View File

@ -1504,6 +1504,7 @@ try {
prefs.setCharPref("media.gmp-manager.url.override", "http://%(server)s/dummy-gmp-manager.xml");
prefs.setCharPref("browser.selfsupport.url", "https://%(server)s/selfsupport-dummy/");
prefs.setCharPref("toolkit.telemetry.server", "https://%(server)s/telemetry-dummy");
prefs.setCharPref("browser.search.geoip.url", "https://%(server)s/geoip-dummy");
}
} catch (e) { }

View File

@ -4655,19 +4655,52 @@
"n_buckets": 20,
"description": "Time (ms) it takes for evicting over-quota pings"
},
"TELEMETRY_PENDING_PINGS_SIZE_MB": {
"alert_emails": ["telemetry-client-dev@mozilla.com"],
"expires_in_version": "never",
"kind": "linear",
"high": "17",
"n_buckets": 16,
"description": "The size of the Telemetry pending pings directory (MB). The special value 17 is used to indicate over quota pings."
},
"TELEMETRY_PENDING_PINGS_AGE": {
"alert_emails": ["telemetry-client-dev@mozilla.com"],
"expires_in_version": "never",
"kind": "exponential",
"high": "365",
"n_buckets": 30,
"description": "The age, in days, of the pending pings."
},
"TELEMETRY_PENDING_PINGS_EVICTED_OVER_QUOTA": {
"alert_emails": ["telemetry-client-dev@mozilla.com"],
"expires_in_version": "never",
"kind": "exponential",
"high": "100000",
"n_buckets": 100,
"description": "Number of Telemetry pings evicted from the pending pings directory during cleanup, because they were over the quota"
},
"TELEMETRY_PENDING_EVICTING_OVER_QUOTA_MS": {
"alert_emails": ["telemetry-client-dev@mozilla.com"],
"expires_in_version": "never",
"kind": "exponential",
"high": "300000",
"n_buckets": 20,
"description": "Time (ms) it takes for evicting over-quota pending pings"
},
"TELEMETRY_PENDING_CHECKING_OVER_QUOTA_MS": {
"alert_emails": ["telemetry-client-dev@mozilla.com"],
"expires_in_version": "never",
"kind": "exponential",
"high": "300000",
"n_buckets": 20,
"description": "Time (ms) it takes for checking if the pending pings are over-quota"
},
"TELEMETRY_DISCARDED_CONTENT_PINGS_COUNT": {
"alert_emails": ["perf-telemetry-alerts@mozilla.com"],
"expires_in_version": "never",
"kind": "count",
"description": "Count of discarded content payloads."
},
"TELEMETRY_FILES_EVICTED": {
"alert_emails": ["perf-telemetry-alerts@mozilla.com", "rvitillo@mozilla.com"],
"expires_in_version": "never",
"kind": "enumerated",
"n_values": 30,
"description": "Number of telemetry pings evicted at startup"
},
"TELEMETRY_COMPRESS": {
"expires_in_version": "never",
"kind": "exponential",
@ -8135,15 +8168,6 @@
"extended_statistics_ok": true,
"description": "Sanitize: Time it takes to sanitize recent downloads (ms)"
},
"FX_SANITIZE_PASSWORDS": {
"alert_emails": ["firefox-dev@mozilla.org", "gavin@mozilla.com"],
"expires_in_version": "50",
"kind": "exponential",
"high": "30000",
"n_buckets": 20,
"extended_statistics_ok": true,
"description": "Sanitize: Time it takes to sanitize saved passwords (ms)"
},
"FX_SANITIZE_SESSIONS": {
"alert_emails": ["firefox-dev@mozilla.org", "gavin@mozilla.com"],
"expires_in_version": "50",

View File

@ -153,6 +153,7 @@ const PREF_PARTNER_ID = "mozilla.partner.id";
const PREF_TELEMETRY_ENABLED = "toolkit.telemetry.enabled";
const PREF_UPDATE_ENABLED = "app.update.enabled";
const PREF_UPDATE_AUTODOWNLOAD = "app.update.auto";
const PREF_SEARCH_COHORT = "browser.search.cohort";
const EXPERIMENTS_CHANGED_TOPIC = "experiments-changed";
const SEARCH_ENGINE_MODIFIED_TOPIC = "browser-search-engine-modified";
@ -891,6 +892,10 @@ EnvironmentCache.prototype = {
this._currentEnvironment.settings.defaultSearchEngine = this._getDefaultSearchEngine();
this._currentEnvironment.settings.defaultSearchEngineData =
Services.search.getDefaultEngineInfo();
// Record the cohort identifier used for search defaults A/B testing.
if (Services.prefs.prefHasUserValue(PREF_SEARCH_COHORT))
this._currentEnvironment.settings.searchCohort = Services.prefs.getCharPref(PREF_SEARCH_COHORT);
},
/**

View File

@ -79,17 +79,9 @@ const SEND_TICK_DELAY = 1 * MS_IN_A_MINUTE;
// This exponential backoff will be reset by external ping submissions & idle-daily.
const SEND_MAXIMUM_BACKOFF_DELAY_MS = 120 * MS_IN_A_MINUTE;
// Files that have been lying around for longer than MAX_PING_FILE_AGE are
// deleted without being loaded.
const MAX_PING_FILE_AGE = 14 * 24 * 60 * MS_IN_A_MINUTE; // 2 weeks
// Files that are older than OVERDUE_PING_FILE_AGE, but younger than
// MAX_PING_FILE_AGE indicate that we need to send all of our pings ASAP.
// The age of a pending ping to be considered overdue (in milliseconds).
const OVERDUE_PING_FILE_AGE = 7 * 24 * 60 * MS_IN_A_MINUTE; // 1 week
// Maximum number of pings to save.
const MAX_LRU_PINGS = 50;
/**
* This is a policy object used to override behavior within this module.
* Tests override properties on this object to allow for control of behavior
@ -154,12 +146,6 @@ function gzipCompressString(string) {
this.TelemetrySend = {
/**
* Maximum age in ms of a pending ping file before it gets evicted.
*/
get MAX_PING_FILE_AGE() {
return MAX_PING_FILE_AGE;
},
/**
* Age in ms of a pending ping to be considered overdue.
@ -168,13 +154,6 @@ this.TelemetrySend = {
return OVERDUE_PING_FILE_AGE;
},
/**
* The maximum number of pending pings we keep in the backlog.
*/
get MAX_LRU_PINGS() {
return MAX_LRU_PINGS;
},
get pendingPingCount() {
return TelemetrySendImpl.pendingPingCount;
},
@ -212,13 +191,6 @@ this.TelemetrySend = {
return TelemetrySendImpl.submitPing(ping);
},
/**
* Count of pending pings that were discarded at startup due to being too old.
*/
get discardedPingsCount() {
return TelemetrySendImpl.discardedPingsCount;
},
/**
* Count of pending pings that were found to be overdue at startup.
*/
@ -533,8 +505,6 @@ let TelemetrySendImpl = {
// This holds pings that we currently try and haven't persisted yet.
_currentPings: new Map(),
// Count of pending pings we discarded for age on startup.
_discardedPingsCount: 0,
// Count of pending pings that were overdue.
_overduePingCount: 0,
@ -550,10 +520,6 @@ let TelemetrySendImpl = {
return this._logger;
},
get discardedPingsCount() {
return this._discardedPingsCount;
},
get overduePingsCount() {
return this._overduePingCount;
},
@ -576,8 +542,6 @@ let TelemetrySendImpl = {
this._testMode = testing;
this._sendingEnabled = true;
this._discardedPingsCount = 0;
Services.obs.addObserver(this, TOPIC_IDLE_DAILY, false);
this._server = Preferences.get(PREF_SERVER, undefined);
@ -589,6 +553,10 @@ let TelemetrySendImpl = {
this._log.error("setup - _checkPendingPings rejected", ex);
}
// Enforce the pending pings storage quota. It could take a while so don't
// block on it.
TelemetryStorage.runEnforcePendingPingsQuotaTask();
// Start sending pings, but don't block on this.
SendScheduler.triggerSendingPings(true);
}),
@ -606,45 +574,19 @@ let TelemetrySendImpl = {
return;
}
// Remove old pings that we haven't been able to send yet.
const now = new Date();
const tooOld = (info) => (now.getTime() - info.lastModificationDate) > MAX_PING_FILE_AGE;
const oldPings = infos.filter((info) => tooOld(info));
infos = infos.filter((info) => !tooOld(info));
this._log.info("_checkPendingPings - clearing out " + oldPings.length + " old pings");
for (let info of oldPings) {
try {
yield TelemetryStorage.removePendingPing(info.id);
++this._discardedPingsCount;
} catch(ex) {
this._log.error("_checkPendingPings - failed to remove old ping", ex);
}
}
// Keep only the last MAX_LRU_PINGS entries to avoid that the backlog overgrows.
const shouldEvict = infos.splice(MAX_LRU_PINGS, infos.length);
let evictedCount = 0;
this._log.info("_checkPendingPings - evicting " + shouldEvict.length + " pings to " +
"avoid overgrowing the backlog");
for (let info of shouldEvict) {
try {
yield TelemetryStorage.removePendingPing(info.id);
++evictedCount;
} catch(ex) {
this._log.error("_checkPendingPings - failed to evict ping", ex);
}
}
Services.telemetry.getHistogramById('TELEMETRY_FILES_EVICTED')
.add(evictedCount);
const now = Policy.now();
// Check for overdue pings.
const overduePings = infos.filter((info) =>
(now.getTime() - info.lastModificationDate) > OVERDUE_PING_FILE_AGE);
this._overduePingCount = overduePings.length;
// Submit the age of the pending pings.
for (let pingInfo of infos) {
const ageInDays =
Utils.millisecondsToDays(Math.abs(now.getTime() - pingInfo.lastModificationDate));
Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_AGE").add(ageInDays);
}
}),
shutdown: Task.async(function*() {
@ -678,11 +620,9 @@ let TelemetrySendImpl = {
this._currentPings = new Map();
this._overduePingCount = 0;
this._discardedPingsCount = 0;
const histograms = [
"TELEMETRY_SUCCESS",
"TELEMETRY_FILES_EVICTED",
"TELEMETRY_SEND",
"TELEMETRY_PING",
];

View File

@ -929,7 +929,6 @@ let Impl = {
}
ret.pingsOverdue = TelemetrySend.overduePingsCount;
ret.pingsDiscarded = TelemetrySend.discardedPingsCount;
return ret;
},

View File

@ -12,6 +12,7 @@ const Ci = Components.interfaces;
const Cr = Components.results;
const Cu = Components.utils;
Cu.import("resource://gre/modules/AppConstants.jsm", this);
Cu.import("resource://gre/modules/Log.jsm");
Cu.import("resource://gre/modules/Services.jsm", this);
Cu.import("resource://gre/modules/XPCOMUtils.jsm", this);
@ -49,10 +50,18 @@ const MAX_ARCHIVED_PINGS_RETENTION_MS = 180 * 24 * 60 * 60 * 1000; // 180 days
// Maximum space the archive can take on disk (in Bytes).
const ARCHIVE_QUOTA_BYTES = 120 * 1024 * 1024; // 120 MB
// Maximum space the outgoing pings can take on disk, for Desktop (in Bytes).
const PENDING_PINGS_QUOTA_BYTES_DESKTOP = 15 * 1024 * 1024; // 15 MB
// Maximum space the outgoing pings can take on disk, for Mobile (in Bytes).
const PENDING_PINGS_QUOTA_BYTES_MOBILE = 1024 * 1024; // 1 MB
// This special value is submitted when the archive is outside of the quota.
const ARCHIVE_SIZE_PROBE_SPECIAL_VALUE = 300;
// This special value is submitted when the pending pings is outside of the quota, as
// we don't know the size of the pings above the quota.
const PENDING_PINGS_SIZE_PROBE_SPECIAL_VALUE = 17;
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
/**
@ -61,6 +70,9 @@ const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12
let Policy = {
now: () => new Date(),
getArchiveQuota: () => ARCHIVE_QUOTA_BYTES,
getPendingPingsQuota: () => (AppConstants.platform in ["android", "gonk"])
? PENDING_PINGS_QUOTA_BYTES_MOBILE
: PENDING_PINGS_QUOTA_BYTES_DESKTOP,
};
/**
@ -141,6 +153,15 @@ this.TelemetryStorage = {
return TelemetryStorageImpl.runCleanPingArchiveTask();
},
/**
* Run the task to enforce the pending pings quota.
*
* @return {Promise} Resolved when the cleanup task completes.
*/
runEnforcePendingPingsQuotaTask: function() {
return TelemetryStorageImpl.runEnforcePendingPingsQuotaTask();
},
/**
* Reset the storage state in tests.
*/
@ -155,6 +176,13 @@ this.TelemetryStorage = {
return (TelemetryStorageImpl._cleanArchiveTask || Promise.resolve());
},
/**
* Test method that allows waiting on the pending pings quota task to finish.
*/
testPendingQuotaTaskPromise: function() {
return (TelemetryStorageImpl._enforcePendingPingsQuotaTask || Promise.resolve());
},
/**
* Save a pending - outgoing - ping to disk and track it.
*
@ -471,6 +499,9 @@ let TelemetryStorageImpl = {
// We use this to cache info on pending pings to avoid scanning the disk more than once.
_pendingPings: new Map(),
// Track the pending pings enforce quota task.
_enforcePendingPingsQuotaTask: null,
// Track the shutdown process to bail out of the clean up task quickly.
_shutdown: false,
@ -490,9 +521,10 @@ let TelemetryStorageImpl = {
shutdown: Task.async(function*() {
this._shutdown = true;
yield this._abortedSessionSerializer.flushTasks();
// If the archive cleaning task is running, block on it. It should bail out as soon
// as possible.
// If the tasks for archive cleaning or pending ping quota are still running, block on
// them. They will bail out as soon as possible.
yield this._cleanArchiveTask;
yield this._enforcePendingPingsQuotaTask;
}),
/**
@ -687,7 +719,7 @@ let TelemetryStorageImpl = {
*/
_enforceArchiveQuota: Task.async(function*() {
this._log.trace("_enforceArchiveQuota");
const startTimeStamp = Policy.now().getTime();
let startTimeStamp = Policy.now().getTime();
// Build an ordered list, from newer to older, of archived pings.
let pingList = [for (p of this._archivedPings) {
@ -753,6 +785,7 @@ let TelemetryStorageImpl = {
this._log.info("_enforceArchiveQuota - archive size: " + archiveSizeInBytes + "bytes"
+ ", safety quota: " + SAFE_QUOTA + "bytes");
startTimeStamp = Policy.now().getTime();
let pingsToPurge = pingList.slice(lastPingIndexToKeep + 1);
// Remove all the pings older than the last one which we are safe to keep.
@ -793,6 +826,118 @@ let TelemetryStorageImpl = {
yield this._enforceArchiveQuota();
}),
/**
* Run the task to enforce the pending pings quota.
*
* @return {Promise} Resolved when the cleanup task completes.
*/
runEnforcePendingPingsQuotaTask: Task.async(function*() {
// If there's a cleaning task already running, return it.
if (this._enforcePendingPingsQuotaTask) {
return this._enforcePendingPingsQuotaTask;
}
// Since there's no quota enforcing task running, start it.
try {
this._enforcePendingPingsQuotaTask = this._enforcePendingPingsQuota();
yield this._enforcePendingPingsQuotaTask;
} finally {
this._enforcePendingPingsQuotaTask = null;
}
}),
/**
* Enforce a disk quota for the pending pings.
* @return {Promise} Resolved when the quota check is complete.
*/
_enforcePendingPingsQuota: Task.async(function*() {
this._log.trace("_enforcePendingPingsQuota");
let startTimeStamp = Policy.now().getTime();
// Build an ordered list, from newer to older, of pending pings.
let pingList = [for (p of this._pendingPings) {
id: p[0],
lastModificationDate: p[1].lastModificationDate,
}];
pingList.sort((a, b) => b.lastModificationDate - a.lastModificationDate);
// If our pending pings directory is too big, we should reduce it to reach 90% of the quota.
const SAFE_QUOTA = Policy.getPendingPingsQuota() * 0.9;
// The index of the last ping to keep. Pings older than this one will be deleted if
// the pending pings directory size exceeds the quota.
let lastPingIndexToKeep = null;
let pendingPingsSizeInBytes = 0;
// Find the disk size of the pending pings directory.
for (let i = 0; i < pingList.length; i++) {
if (this._shutdown) {
this._log.trace("_enforcePendingPingsQuota - Terminating the clean up task due to shutdown");
return;
}
let ping = pingList[i];
// Get the size for this ping.
const fileSize = yield getPendingPingSize(ping.id);
if (!fileSize) {
this._log.warn("_enforcePendingPingsQuota - Unable to find the size of ping " + ping.id);
continue;
}
pendingPingsSizeInBytes += fileSize;
if (pendingPingsSizeInBytes < SAFE_QUOTA) {
// We save the index of the last ping which is ok to keep in order to speed up ping
// pruning.
lastPingIndexToKeep = i;
} else if (pendingPingsSizeInBytes > Policy.getPendingPingsQuota()) {
// Ouch, our pending pings directory size is too big. Bail out and start pruning!
break;
}
}
// Save the time it takes to check if the pending pings are over-quota.
Telemetry.getHistogramById("TELEMETRY_PENDING_CHECKING_OVER_QUOTA_MS")
.add(Math.round(Policy.now().getTime() - startTimeStamp));
let recordHistograms = (sizeInMB, evictedPings, elapsedMs) => {
Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_SIZE_MB").add(sizeInMB);
Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_EVICTED_OVER_QUOTA").add(evictedPings);
Telemetry.getHistogramById("TELEMETRY_PENDING_EVICTING_OVER_QUOTA_MS").add(elapsedMs);
};
// Check if we're using too much space. If not, bail out.
if (pendingPingsSizeInBytes < Policy.getPendingPingsQuota()) {
recordHistograms(Math.round(pendingPingsSizeInBytes / 1024 / 1024), 0, 0);
return;
}
this._log.info("_enforcePendingPingsQuota - size: " + pendingPingsSizeInBytes + "bytes"
+ ", safety quota: " + SAFE_QUOTA + "bytes");
startTimeStamp = Policy.now().getTime();
let pingsToPurge = pingList.slice(lastPingIndexToKeep + 1);
// Remove all the pings older than the last one which we are safe to keep.
for (let ping of pingsToPurge) {
if (this._shutdown) {
this._log.trace("_enforcePendingPingsQuota - Terminating the clean up task due to shutdown");
return;
}
// This list is guaranteed to be in order, so remove the pings at its
// beginning (oldest).
yield this.removePendingPing(ping.id);
}
const endTimeStamp = Policy.now().getTime();
// We don't know the size of the pending pings directory if we are above the quota,
// since we stop scanning once we reach the quota. We use a special value to show
// this condition.
recordHistograms(PENDING_PINGS_SIZE_PROBE_SPECIAL_VALUE, pingsToPurge.length,
Math.ceil(endTimeStamp - startTimeStamp));
}),
/**
* Reset the storage state in tests.
*/
@ -991,7 +1136,8 @@ let TelemetryStorageImpl = {
this._log.trace("loadPendingPing - id: " + id);
let info = this._pendingPings.get(id);
if (!info) {
return;
this._log.trace("loadPendingPing - unknown id " + id);
return Promise.reject(new Error("TelemetryStorage.loadPendingPing - no ping with id " + id));
}
return this.loadPingFile(info.path, false);
@ -1292,6 +1438,20 @@ let getArchivedPingSize = Task.async(function*(aPingId, aDate, aType) {
return 0;
});
/**
* Get the size of the pending ping file on the disk.
* @return {Integer} The file size, in bytes, of the ping file or 0 on errors.
*/
let getPendingPingSize = Task.async(function*(aPingId) {
const path = OS.Path.join(TelemetryStorage.pingDirectoryPath, aPingId)
try {
return (yield OS.File.stat(path)).size;
} catch (e) {}
// That's odd, this ping doesn't seem to exist.
return 0;
});
/**
* Check if a directory name is in the "YYYY-MM" format.
* @param {String} aDirName The name of the pings archive directory.

View File

@ -266,3 +266,8 @@ The object contains:
For privacy, we don't record this for user-installed engines.
``loadPath`` and ``submissionURL`` are not present if ``name`` is ``NONE``.
searchCohort
~~~~~~~~~~~~
If the user has been enrolled into a search default change experiment, this contains the string identifying the experiment the user is taking part in. Most user profiles will never be part of any search default change experiment, and will not send this value.

View File

@ -1023,9 +1023,7 @@ add_task(function* test_defaultSearchEngine() {
defaultBranch.setCharPref("browser.search.jarURIs", "chrome://testsearchplugin/locale/searchplugins/");
defaultBranch.setBoolPref("browser.search.loadFromJars", true);
// Initialize the search service and disable geoip lookup, so we don't get unwanted
// network connections.
Preferences.set("browser.search.geoip.url", "");
// Initialize the search service.
yield new Promise(resolve => Services.search.init(resolve));
// Our default engine from the JAR file has an identifier. Check if it is correctly
@ -1103,6 +1101,15 @@ add_task(function* test_defaultSearchEngine() {
data = TelemetryEnvironment.currentEnvironment;
checkEnvironmentData(data);
Assert.equal(data.settings.defaultSearchEngine, EXPECTED_SEARCH_ENGINE);
// Check that by default we are not sending a cohort identifier...
Assert.equal(data.settings.searchCohort, undefined);
// ... but that if a cohort identifier is set, we send it.
Services.prefs.setCharPref("browser.search.cohort", "testcohort");
Services.obs.notifyObservers(null, "browser-search-service", "init-complete");
data = TelemetryEnvironment.currentEnvironment;
Assert.equal(data.settings.searchCohort, "testcohort");
});
add_task(function*() {

View File

@ -3,11 +3,8 @@
/**
* This test case populates the profile with some fake stored
* pings, and checks that:
*
* 1) Pings that are considered "expired" are deleted and never sent.
* 2) Pings that are considered "overdue" trigger a send of all
* overdue and recent pings.
* pings, and checks that pending pings are immediatlely sent
* after delayed init.
*/
"use strict"
@ -27,20 +24,19 @@ XPCOMUtils.defineLazyGetter(this, "gDatareportingService",
.getService(Ci.nsISupports)
.wrappedJSObject);
const Telemetry = Cc["@mozilla.org/base/telemetry;1"].getService(Ci.nsITelemetry);
// We increment TelemetryStorage's MAX_PING_FILE_AGE and
// OVERDUE_PING_FILE_AGE by 1 minute so that our test pings exceed
// those points in time, even taking into account file system imprecision.
const ONE_MINUTE_MS = 60 * 1000;
const EXPIRED_PING_FILE_AGE = TelemetrySend.MAX_PING_FILE_AGE + ONE_MINUTE_MS;
const OVERDUE_PING_FILE_AGE = TelemetrySend.OVERDUE_PING_FILE_AGE + ONE_MINUTE_MS;
const PING_SAVE_FOLDER = "saved-telemetry-pings";
const PING_TIMEOUT_LENGTH = 5000;
const EXPIRED_PINGS = 5;
const OVERDUE_PINGS = 6;
const OLD_FORMAT_PINGS = 4;
const RECENT_PINGS = 4;
const LRU_PINGS = TelemetrySend.MAX_LRU_PINGS;
const TOTAL_EXPECTED_PINGS = OVERDUE_PINGS + RECENT_PINGS + OLD_FORMAT_PINGS;
@ -94,6 +90,15 @@ let clearPings = Task.async(function* (aPingIds) {
}
});
/**
* Fakes the pending pings storage quota.
* @param {Integer} aPendingQuota The new quota, in bytes.
*/
function fakePendingPingsQuota(aPendingQuota) {
let storage = Cu.import("resource://gre/modules/TelemetryStorage.jsm");
storage.Policy.getPendingPingsQuota = () => aPendingQuota;
}
/**
* Returns a handle for the file that a ping should be
* stored in locally.
@ -154,14 +159,6 @@ let clearPendingPings = Task.async(function*() {
}
});
/**
* Creates and returns a TelemetryController instance in "testing"
* mode.
*/
function startTelemetry() {
return TelemetryController.setup();
}
function run_test() {
PingServer.start();
PingServer.registerPingHandler(pingHandler);
@ -192,21 +189,6 @@ add_task(function* setupEnvironment() {
yield clearPendingPings();
});
/**
* Test that pings that are considered too old are just chucked out
* immediately and never sent.
*/
add_task(function* test_expired_pings_are_deleted() {
let pingTypes = [{ num: EXPIRED_PINGS, age: EXPIRED_PING_FILE_AGE }];
let expiredPings = yield createSavedPings(pingTypes);
yield TelemetryController.reset();
assertReceivedPings(0);
yield assertNotSaved(expiredPings);
yield clearPendingPings();
});
/**
* Test that really recent pings are sent on Telemetry initialization.
*/
@ -221,36 +203,6 @@ add_task(function* test_recent_pings_sent() {
yield clearPendingPings();
});
/**
* Test that only the most recent LRU_PINGS pings are kept at startup.
*/
add_task(function* test_most_recent_pings_kept() {
let pingTypes = [
{ num: LRU_PINGS },
{ num: 3, age: ONE_MINUTE_MS },
];
let pings = yield createSavedPings(pingTypes);
let head = pings.slice(0, LRU_PINGS);
let tail = pings.slice(-3);
const evictedHistogram = Services.telemetry.getHistogramById('TELEMETRY_FILES_EVICTED');
evictedHistogram.clear();
yield TelemetryController.reset();
const pending = yield TelemetryStorage.loadPendingPingList();
for (let id of tail) {
const found = pending.some(p => p.id == id);
Assert.ok(!found, "Should have discarded the oldest pings");
}
assertNotSaved(tail);
Assert.equal(evictedHistogram.snapshot().sum, tail.length,
"Should have tracked the evicted ping count");
yield TelemetrySend.shutdown();
yield clearPendingPings();
});
/**
* Create an overdue ping in the old format and try to send it.
*/
@ -322,19 +274,15 @@ add_task(function* test_overdue_old_format() {
});
/**
* Create some recent, expired and overdue pings. The overdue pings should
* trigger a send of all recent and overdue pings, but the expired pings
* should just be deleted.
* Create some recent and overdue pings and verify that they get sent.
*/
add_task(function* test_overdue_pings_trigger_send() {
let pingTypes = [
{ num: RECENT_PINGS },
{ num: EXPIRED_PINGS, age: EXPIRED_PING_FILE_AGE },
{ num: OVERDUE_PINGS, age: OVERDUE_PING_FILE_AGE },
];
let pings = yield createSavedPings(pingTypes);
let recentPings = pings.slice(0, RECENT_PINGS);
let expiredPings = pings.slice(RECENT_PINGS, RECENT_PINGS + EXPIRED_PINGS);
let overduePings = pings.slice(-OVERDUE_PINGS);
yield TelemetryController.reset();
@ -342,13 +290,10 @@ add_task(function* test_overdue_pings_trigger_send() {
assertReceivedPings(TOTAL_EXPECTED_PINGS);
yield assertNotSaved(recentPings);
yield assertNotSaved(expiredPings);
yield assertNotSaved(overduePings);
Assert.equal(TelemetrySend.overduePingsCount, overduePings.length,
"Should have tracked the correct amount of overdue pings");
Assert.equal(TelemetrySend.discardedPingsCount, expiredPings.length,
"Should have tracked the correct amount of expired pings");
yield clearPendingPings();
});
@ -401,6 +346,124 @@ add_task(function* test_overdue_old_format() {
Assert.equal(receivedPings, 1, "We must receive a ping in the old format.");
yield clearPendingPings();
PingServer.resetPingHandler();
});
add_task(function* test_pendingPingsQuota() {
const PING_TYPE = "foo";
const PREF_FHR_UPLOAD = "datareporting.healthreport.uploadEnabled";
// Disable upload so pings don't get sent and removed from the pending pings directory.
Services.prefs.setBoolPref(PREF_FHR_UPLOAD, false);
// Remove all the pending pings then startup and wait for the cleanup task to complete.
// There should be nothing to remove.
yield clearPendingPings();
yield TelemetryController.reset();
yield TelemetrySend.testWaitOnOutgoingPings();
yield TelemetryStorage.testPendingQuotaTaskPromise();
// Remove the pending deletion ping generated when flipping FHR upload off.
yield clearPendingPings();
let expectedPrunedPings = [];
let expectedNotPrunedPings = [];
let checkPendingPings = Task.async(function*() {
// Check that the pruned pings are not on disk anymore.
for (let prunedPingId of expectedPrunedPings) {
yield Assert.rejects(TelemetryStorage.loadPendingPing(prunedPingId),
"Ping " + prunedPingId + " should have been pruned.");
const pingPath = getSavePathForPingId(prunedPingId);
Assert.ok(!(yield OS.File.exists(pingPath)), "The ping should not be on the disk anymore.");
}
// Check that the expected pings are there.
for (let expectedPingId of expectedNotPrunedPings) {
Assert.ok((yield TelemetryStorage.loadPendingPing(expectedPingId)),
"Ping" + expectedPingId + " should be among the pending pings.");
}
});
let pendingPingsInfo = [];
let pingsSizeInBytes = 0;
// Create 10 pings to test the pending pings quota.
for (let days = 1; days < 11; days++) {
const date = fakeNow(2010, 1, days, 1, 1, 0);
const pingId = yield TelemetryController.addPendingPing(PING_TYPE, {}, {});
// Find the size of the ping.
const pingFilePath = getSavePathForPingId(pingId);
const pingSize = (yield OS.File.stat(pingFilePath)).size;
// Add the info at the beginning of the array, so that most recent pings come first.
pendingPingsInfo.unshift({id: pingId, size: pingSize, timestamp: date.getTime() });
// Set the last modification date.
yield OS.File.setDates(pingFilePath, null, date.getTime());
// Add it to the pending ping directory size.
pingsSizeInBytes += pingSize;
}
// We need to test the pending pings size before we hit the quota, otherwise a special
// value is recorded.
Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_SIZE_MB").clear();
Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_EVICTED_OVER_QUOTA").clear();
Telemetry.getHistogramById("TELEMETRY_PENDING_EVICTING_OVER_QUOTA_MS").clear();
yield TelemetryController.reset();
yield TelemetryStorage.testPendingQuotaTaskPromise();
// Check that the correct values for quota probes are reported when no quota is hit.
let h = Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_SIZE_MB").snapshot();
Assert.equal(h.sum, Math.round(pingsSizeInBytes / 1024 / 1024),
"Telemetry must report the correct pending pings directory size.");
h = Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_EVICTED_OVER_QUOTA").snapshot();
Assert.equal(h.sum, 0, "Telemetry must report 0 evictions if quota is not hit.");
h = Telemetry.getHistogramById("TELEMETRY_PENDING_EVICTING_OVER_QUOTA_MS").snapshot();
Assert.equal(h.sum, 0, "Telemetry must report a null elapsed time if quota is not hit.");
// Set the quota to 80% of the space.
const testQuotaInBytes = pingsSizeInBytes * 0.8;
fakePendingPingsQuota(testQuotaInBytes);
// The storage prunes pending pings until we reach 90% of the requested storage quota.
// Based on that, find how many pings should be kept.
const safeQuotaSize = Math.round(testQuotaInBytes * 0.9);
let sizeInBytes = 0;
let pingsWithinQuota = [];
let pingsOutsideQuota = [];
for (let pingInfo of pendingPingsInfo) {
sizeInBytes += pingInfo.size;
if (sizeInBytes >= safeQuotaSize) {
pingsOutsideQuota.push(pingInfo.id);
continue;
}
pingsWithinQuota.push(pingInfo.id);
}
expectedNotPrunedPings = pingsWithinQuota;
expectedPrunedPings = pingsOutsideQuota;
// Reset TelemetryController to start the pending pings cleanup.
yield TelemetryController.reset();
yield TelemetryStorage.testPendingQuotaTaskPromise();
yield checkPendingPings();
h = Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_EVICTED_OVER_QUOTA").snapshot();
Assert.equal(h.sum, pingsOutsideQuota.length,
"Telemetry must correctly report the over quota pings evicted from the pending pings directory.");
h = Telemetry.getHistogramById("TELEMETRY_PENDING_PINGS_SIZE_MB").snapshot();
Assert.equal(h.sum, 17, "Pending pings quota was hit, a special size must be reported.");
// Trigger a cleanup again and make sure we're not removing anything.
yield TelemetryController.reset();
yield TelemetryStorage.testPendingQuotaTaskPromise();
yield checkPendingPings();
Services.prefs.setBoolPref(PREF_FHR_UPLOAD, true);
});
add_task(function* teardown() {

View File

@ -1104,21 +1104,6 @@ add_task(function* test_environmentChange() {
Assert.deepEqual(ping.payload.keyedHistograms[KEYED_ID], {});
});
// Checks that an expired histogram file is deleted when loaded.
add_task(function* test_pruneOldPingFile() {
const id = generateUUID();
const path = OS.Path.join(TelemetryStorage.pingDirectoryPath, id);
yield OS.File.writeAtomic(path, "{}", {noOverwrite: false});
// fake a time 14 days & 1m earlier
const now = new Date().getTime();
const fakeMtime = now - (14 * 24 * 60 * 60 * 1000 + 60000);
OS.File.setDates(path, null, fakeMtime);
yield TelemetryController.reset();
Assert.ok(!(yield OS.File.exists(path)), "File should have been removed.");
});
add_task(function* test_savedPingsOnShutdown() {
// On desktop, we expect both "saved-session" and "shutdown" pings. We only expect
// the former on Android.

View File

@ -148,6 +148,10 @@
<property name="searchCount" readonly="true"
onget="this.initSearchNames(); return this.mSearchNames.length;"/>
<field name="shrinkDelay" readonly="true">
parseInt(this.getAttribute("shrinkdelay")) || 0
</field>
<field name="PrivateBrowsingUtils" readonly="true">
let utils = {};
Components.utils.import("resource://gre/modules/PrivateBrowsingUtils.jsm", utils);
@ -1005,6 +1009,7 @@ extends="chrome://global/content/bindings/popup.xml#popup">
<implementation implements="nsIAutoCompletePopup">
<field name="_currentIndex">0</field>
<field name="_rowHeight">0</field>
<field name="_rlbAnimated">false</field>
<!-- =================== nsIAutoCompletePopup =================== -->
@ -1073,23 +1078,30 @@ extends="chrome://global/content/bindings/popup.xml#popup">
<method name="_invalidate">
<body>
<![CDATA[
// To get a fixed height for the popup, instead of the default
// behavior that grows and shrinks it on result change, a consumer
// can set the height attribute. In such a case, instead of adjusting
// the richlistbox height, we just need to collapse unused items.
if (!this.hasAttribute("height")) {
// collapsed if no matches
this.richlistbox.collapsed = (this._matchCount == 0);
// Dynamically update height until richlistbox.rows works (bug 401939)
// Adjust the height immediately and after the row contents update
this.adjustHeight();
setTimeout(function(self) self.adjustHeight(), 0, this);
// Update the richlistbox height.
if (this._adjustHeightTimeout) {
clearTimeout(this._adjustHeightTimeout);
}
if (this._shrinkTimeout) {
clearTimeout(this._shrinkTimeout);
}
this._adjustHeightTimeout = setTimeout(() => this.adjustHeight(), 0);
} else {
this._collapseUnusedItems();
}
// make sure to collapse any existing richlistitems
// that aren't going to be used
var existingItemsCount = this.richlistbox.childNodes.length;
for (var i = this._matchCount; i < existingItemsCount; i++)
this.richlistbox.childNodes[i].collapsed = true;
this._currentIndex = 0;
if (this._appendResultTimeout) {
clearTimeout(this._appendResultTimeout);
}
this._appendCurrentResult();
]]>
</body>
@ -1113,6 +1125,17 @@ extends="chrome://global/content/bindings/popup.xml#popup">
</getter>
</property>
<method name="_collapseUnusedItems">
<body>
<![CDATA[
let existingItemsCount = this.richlistbox.childNodes.length;
for (let i = this._matchCount; i < existingItemsCount; ++i) {
this.richlistbox.childNodes[i].collapsed = true;
}
]]>
</body>
</method>
<method name="adjustHeight">
<body>
<![CDATA[
@ -1126,16 +1149,35 @@ extends="chrome://global/content/bindings/popup.xml#popup">
if (!this._rowHeight) {
let firstRowRect = rows[0].getBoundingClientRect();
this._rowHeight = firstRowRect.height;
this._rlbAnimated = !!window.getComputedStyle(this.richlistbox).transitionProperty;
// Set a fixed max-height to avoid flicker when growing the panel.
this.richlistbox.style.maxHeight = (this._rowHeight * this.maxRows) + "px";
}
// Calculate the height to have the first row to last row shown
height = this._rowHeight * numRows;
}
// Only update the height if we have a non-zero height and if it
// changed (the richlistbox is collapsed if there are no results)
if (height && height != this.richlistbox.height)
this.richlistbox.height = height;
let currentHeight = this.richlistbox.getBoundingClientRect().height;
if (height > currentHeight) {
// Grow immediately.
this.richlistbox.style.height = height + "px";
} else {
// Delay shrinking to avoid flicker.
this._shrinkTimeout = setTimeout(() => {
this.richlistbox.style.height = height + "px";
if (this._rlbAnimated) {
let onTransitionEnd = () => {
this.removeEventListener("transitionend", onTransitionEnd, true);
this._collapseUnusedItems();
};
this.addEventListener("transitionend", onTransitionEnd, true);
} else {
this._collapseUnusedItems();
}
}, this.mInput.shrinkDelay);
}
]]>
</body>
</method>
@ -1227,7 +1269,7 @@ extends="chrome://global/content/bindings/popup.xml#popup">
if (this._currentIndex < matchCount) {
// yield after each batch of items so that typing the url bar is
// responsive
setTimeout(function (self) { self._appendCurrentResult(); }, 0, this);
this._appendResultTimeout = setTimeout(() => this._appendCurrentResult(), 0);
}
]]>
</body>