gecko/content/media/test/test_a4_tone.html

263 lines
7.6 KiB
HTML
Raw Normal View History

<!DOCTYPE HTML>
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=490705
-->
<head>
<title>Media test: simple audioAvalailable event checks</title>
<script type="text/javascript" src="/MochiKit/packed.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=490705">Mozilla Bug 490705</a>
<!-- mute audio, since there is no need to hear the sound for these tests -->
<audio id='a1' onerror="event.stopPropagation();" controls></audio>
<pre id="test">
<script class="testbody" type="text/javascript">
/**
* FFT is a class for calculating the Discrete Fourier Transform of a signal
* with the Fast Fourier Transform algorithm.
*
* Source: github.com/corbanbrook/dsp.js; License: MIT; Copyright: Corban Brook
*
* @param {Number} bufferSize The size of the sample buffer to be computed. Must be power of 2
* @param {Number} sampleRate The sampleRate of the buffer (eg. 44100)
*
* @constructor
*/
FFT = function(bufferSize, sampleRate) {
this.bufferSize = bufferSize;
this.sampleRate = sampleRate;
this.spectrum = new Float32Array(bufferSize/2);
this.real = new Float32Array(bufferSize);
this.imag = new Float32Array(bufferSize);
this.reverseTable = new Uint32Array(bufferSize);
var limit = 1;
var bit = bufferSize >> 1;
while ( limit < bufferSize ) {
for ( var i = 0; i < limit; i++ ) {
this.reverseTable[i + limit] = this.reverseTable[i] + bit;
}
limit = limit << 1;
bit = bit >> 1;
}
this.sinTable = new Float32Array(bufferSize);
this.cosTable = new Float32Array(bufferSize);
for ( var i = 0; i < bufferSize; i++ ) {
this.sinTable[i] = Math.sin(-Math.PI/i);
this.cosTable[i] = Math.cos(-Math.PI/i);
}
};
/**
* Performs a forward tranform on the sample buffer.
* Converts a time domain signal to frequency domain spectra.
*
* @param {Array} buffer The sample buffer. Buffer Length must be power of 2
*
* @returns The frequency spectrum array
*/
FFT.prototype.forward = function(buffer) {
// Locally scope variables for speed up
var bufferSize = this.bufferSize,
cosTable = this.cosTable,
sinTable = this.sinTable,
reverseTable = this.reverseTable,
real = this.real,
imag = this.imag,
spectrum = this.spectrum;
var k = Math.floor(Math.log(bufferSize) / Math.LN2);
if ( Math.pow(2, k) !== bufferSize ) {
throw "Invalid buffer size, must be a power of 2.";
}
if ( bufferSize !== buffer.length ) {
throw "Supplied buffer is not the same size as defined FFT. FFT Size: " + bufferSize + " Buffer Size: " + buffer.length;
}
for ( var i = 0; i < bufferSize; i++ ) {
real[i] = buffer[reverseTable[i]];
imag[i] = 0;
}
var halfSize = 1,
phaseShiftStepReal,
phaseShiftStepImag,
currentPhaseShiftReal,
currentPhaseShiftImag,
off,
tr,
ti,
tmpReal,
i;
while ( halfSize < bufferSize ) {
phaseShiftStepReal = cosTable[halfSize];
phaseShiftStepImag = sinTable[halfSize];
currentPhaseShiftReal = 1;
currentPhaseShiftImag = 0;
for ( var fftStep = 0; fftStep < halfSize; fftStep++ ) {
i = fftStep;
while ( i < bufferSize ) {
off = i + halfSize;
tr = (currentPhaseShiftReal * real[off]) - (currentPhaseShiftImag * imag[off]);
ti = (currentPhaseShiftReal * imag[off]) + (currentPhaseShiftImag * real[off]);
real[off] = real[i] - tr;
imag[off] = imag[i] - ti;
real[i] += tr;
imag[i] += ti;
i += halfSize << 1;
}
tmpReal = currentPhaseShiftReal;
currentPhaseShiftReal = (tmpReal * phaseShiftStepReal) - (currentPhaseShiftImag * phaseShiftStepImag);
currentPhaseShiftImag = (tmpReal * phaseShiftStepImag) + (currentPhaseShiftImag * phaseShiftStepReal);
}
halfSize = halfSize << 1;
}
i = bufferSize/2;
while(i--) {
spectrum[i] = 2 * Math.sqrt(real[i] * real[i] + imag[i] * imag[i]) / bufferSize;
}
return spectrum;
};
/* end of FFT */
var testFile = "file_a4_tone.ogg";
var testFileDuration = 3.0;
var testFileChannelCount = 1;
var testFileSampleRate = 44100;
var testFileFrameBufferLength = 1024;
var signal = [{start:1.1, end: 1.9, fftBin: 10 } ];
var noSignal = [{start:0.1, end: 0.9 }, {start:2.1, end: 2.9 } ];
var undef;
var fft, fftBufferSize;
var currentSampleOffset = 0;
var spectrumMaxs = [];
var isTimePropertyValid = true;
function audioAvailable(event) {
var buffer = event.frameBuffer;
if(fft === undef) {
fftBufferSize = buffer.length;
fft = new FFT(fftBufferSize, testFileSampleRate);
}
fft.forward(buffer);
var spectrum = fft.spectrum;
// Finding pick frequency
var maxIndex = 0, maxValue = spectrum[0];
for(var i=0;i<spectrum.length;i++) {
if(maxValue < spectrum[i]) {
maxValue = spectrum[maxIndex = i];
}
}
spectrumMaxs.push({ value: maxValue, index: maxIndex, time: (currentSampleOffset / testFileSampleRate) });
if( (typeof event.time !== "number") ||
(Math.abs(event.time - currentSampleOffset / testFileSampleRate) > 0.01) ) {
isTimePropertyValid = false;
}
currentSampleOffset += buffer.length;
}
var loadedMetadataCalled = false;
function loadedMetadata() {
loadedMetadataCalled = true;
var a1 = document.getElementById('a1');
is(a1.mozChannels, testFileChannelCount, "mozChannels should be " + testFileChannelCount + ".");
is(a1.mozSampleRate, testFileSampleRate, "mozSampleRate should be " + testFileSampleRate + ".");
is(a1.mozFrameBufferLength, testFileFrameBufferLength, "mozFrameBufferLength should be " + testFileFrameBufferLength + ".");
}
function checkResults() {
ok(loadedMetadataCalled, "loadedmetadata event not dispatched.");
ok(isTimePropertyValid, "The audioAvailable event's time attribute was invalid.");
var expectedOffset = Math.ceil(testFileDuration * testFileSampleRate);
if(expectedOffset % fftBufferSize !== 0) { expectedOffset += (fftBufferSize - (expectedOffset % fftBufferSize)); }
is(currentSampleOffset, expectedOffset, "Check amount of signal data processed");
var i, j;
var signalPresent = true;
for(i=0;i<signal.length;++i) {
var signalAnalysed = false;
for(j=0;j<spectrumMaxs.length;++j) {
if(signal[i].start <= spectrumMaxs[j].time && spectrumMaxs[j].time < signal[i].end) {
signalAnalysed = true;
signalPresent = spectrumMaxs[j].index == signal[i].fftBin;
}
if(!signalPresent) break;
}
if(!signalAnalysed) signalPresent = false;;
if(!signalPresent) break;
}
is(signalPresent, true, "Check signal present");
var noSignalPresent = true;
for(i=0;i<noSignal.length;++i) {
var signalAnalysed = false;
for(j=0;j<spectrumMaxs.length;++j) {
if(noSignal[i].start <= spectrumMaxs[j].time && spectrumMaxs[j].time < noSignal[i].end) {
signalAnalysed = true;
noSignalPresent = spectrumMaxs[j].index == 0;
}
if(!noSignalPresent) break;
}
if(!signalAnalysed) noSignalPresent = false;;
if(!noSignalPresent) break;
}
is(signalPresent, true, "Check mute fragments present");
SimpleTest.finish();
}
function audioEnded() {
checkResults();
}
function initTest() {
var a1 = document.getElementById('a1');
a1.addEventListener("ended", audioEnded, false);
a1.addEventListener("loadedmetadata", loadedMetadata, false);
a1.addEventListener("MozAudioAvailable", audioAvailable, false);
a1.src = testFile;
a1.muted = true;
a1.play();
}
window.addEventListener("load", function(e) {
initTest();
}, false);
SimpleTest.waitForExplicitFinish();
</script>
</pre>
</body>
</html>