gecko/content/media/webaudio/test/webaudio.js
Karl Tomlinson a760b49eba remove unintentional ! in ed23ecc80370 to test expected buffer frame count when length is supplied b=890528
--HG--
extra : transplant_source : %BB%8E%A8%C2BBFr%A4D%28VG%84%2A%AE%A3qZ%81
2013-09-02 16:53:15 +12:00

212 lines
7.8 KiB
JavaScript

// Helpers for Web Audio tests
function expectException(func, exceptionCode) {
var threw = false;
try {
func();
} catch (ex) {
threw = true;
ok(ex instanceof DOMException, "Expect a DOM exception");
is(ex.code, exceptionCode, "Expect the correct exception code");
}
ok(threw, "The exception was thrown");
}
function expectTypeError(func) {
var threw = false;
try {
func();
} catch (ex) {
threw = true;
ok(ex instanceof TypeError, "Expect a TypeError");
}
ok(threw, "The exception was thrown");
}
function fuzzyCompare(a, b) {
return Math.abs(a - b) < 9e-3;
}
function compareBuffers(buf1, buf2,
/*optional*/ offset,
/*optional*/ length,
/*optional*/ sourceOffset,
/*optional*/ destOffset,
/*optional*/ skipLengthCheck) {
if (!skipLengthCheck) {
is(buf1.length, buf2.length, "Buffers must have the same length");
}
if (length == undefined) {
length = buf1.length - (offset || 0);
}
sourceOffset = sourceOffset || 0;
destOffset = destOffset || 0;
var difference = 0;
var maxDifference = 0;
var firstBadIndex = -1;
for (var i = offset || 0; i < Math.min(buf1.length, (offset || 0) + length); ++i) {
if (!fuzzyCompare(buf1[i + sourceOffset], buf2[i + destOffset])) {
difference++;
maxDifference = Math.max(maxDifference, Math.abs(buf1[i + sourceOffset] - buf2[i + destOffset]));
if (firstBadIndex == -1) {
firstBadIndex = i;
}
}
};
is(difference, 0, "Found " + difference + " different samples, maxDifference: " +
maxDifference + ", first bad index: " + firstBadIndex +
" with source offset " + sourceOffset + " and destination offset " +
destOffset);
}
function getEmptyBuffer(context, length) {
return context.createBuffer(gTest.numberOfChannels, length, context.sampleRate);
}
/**
* This function assumes that the test file defines a single gTest variable with
* the following properties and methods:
*
* + numberOfChannels: optional property which specifies the number of channels
* in the output. The default value is 2.
* + createGraph: mandatory method which takes a context object and does
* everything needed in order to set up the Web Audio graph.
* This function returns the node to be inspected.
* + createGraphAsync: async version of createGraph. This function takes
* a callback which should be called with an argument
* set to the node to be inspected when the callee is
* ready to proceed with the test. Either this function
* or createGraph must be provided.
* + createExpectedBuffers: optional method which takes a context object and
* returns either one expected buffer or an array of
* them, designating what is expected to be observed
* in the output. If omitted, the output is expected
* to be silence. All buffers must have the same
* length, which must be a bufferSize supported by
* ScriptProcessorNode. This function is guaranteed
* to be called before createGraph.
* + length: property equal to the total number of frames which we are waiting
* to see in the output, mandatory if createExpectedBuffers is not
* provided, in which case it must be a bufferSize supported by
* ScriptProcessorNode (256, 512, 1024, 2048, 4096, 8192, or 16384).
* If createExpectedBuffers is provided then this must be equal to
* the number of expected buffers * the expected buffer length.
*
* + skipOfflineContextTests: optional. when true, skips running tests on an offline
* context by circumventing testOnOfflineContext.
*/
function runTest()
{
function done() {
SimpleTest.finish();
}
SimpleTest.waitForExplicitFinish();
function runTestFunction () {
if (!gTest.numberOfChannels) {
gTest.numberOfChannels = 2; // default
}
var testLength;
function runTestOnContext(context, callback, testOutput) {
if (!gTest.createExpectedBuffers) {
// Assume that the output is silence
var expectedBuffers = getEmptyBuffer(context, gTest.length);
} else {
var expectedBuffers = gTest.createExpectedBuffers(context);
}
if (!(expectedBuffers instanceof Array)) {
expectedBuffers = [expectedBuffers];
}
var expectedFrames = 0;
for (var i = 0; i < expectedBuffers.length; ++i) {
is(expectedBuffers[i].numberOfChannels, gTest.numberOfChannels,
"Correct number of channels for expected buffer " + i);
expectedFrames += expectedBuffers[i].length;
}
if (gTest.length && gTest.createExpectedBuffers) {
is(expectedFrames, gTest.length, "Correct number of expected frames");
}
if (gTest.createGraphAsync) {
gTest.createGraphAsync(context, function(nodeToInspect) {
testOutput(nodeToInspect, expectedBuffers, callback);
});
} else {
testOutput(gTest.createGraph(context), expectedBuffers, callback);
}
}
function testOnNormalContext(callback) {
function testOutput(nodeToInspect, expectedBuffers, callback) {
testLength = 0;
var sp = context.createScriptProcessor(expectedBuffers[0].length, gTest.numberOfChannels);
nodeToInspect.connect(sp);
sp.connect(context.destination);
sp.onaudioprocess = function(e) {
var expectedBuffer = expectedBuffers.shift();
testLength += expectedBuffer.length;
is(e.inputBuffer.numberOfChannels, expectedBuffer.numberOfChannels,
"Correct number of input buffer channels");
for (var i = 0; i < e.inputBuffer.numberOfChannels; ++i) {
compareBuffers(e.inputBuffer.getChannelData(i), expectedBuffer.getChannelData(i));
}
if (expectedBuffers.length == 0) {
sp.onaudioprocess = null;
callback();
}
};
}
var context = new AudioContext();
runTestOnContext(context, callback, testOutput);
}
function testOnOfflineContext(callback, sampleRate) {
function testOutput(nodeToInspect, expectedBuffers, callback) {
nodeToInspect.connect(context.destination);
context.oncomplete = function(e) {
var samplesSeen = 0;
while (expectedBuffers.length) {
var expectedBuffer = expectedBuffers.shift();
is(e.renderedBuffer.numberOfChannels, expectedBuffer.numberOfChannels,
"Correct number of input buffer channels");
for (var i = 0; i < e.renderedBuffer.numberOfChannels; ++i) {
compareBuffers(e.renderedBuffer.getChannelData(i),
expectedBuffer.getChannelData(i),
undefined,
expectedBuffer.length,
samplesSeen,
undefined,
true);
}
samplesSeen += expectedBuffer.length;
}
callback();
};
context.startRendering();
}
var context = new OfflineAudioContext(gTest.numberOfChannels, testLength, sampleRate);
runTestOnContext(context, callback, testOutput);
}
testOnNormalContext(function() {
if (!gTest.skipOfflineContextTests) {
testOnOfflineContext(function() {
testOnOfflineContext(done, 44100);
}, 48000);
} else {
done();
}
});
};
if (document.readyState !== 'complete') {
addLoadEvent(runTestFunction);
} else {
runTestFunction();
}
}