Bug 805254. Part 4: Remove FORMAT_U8 from nsAudioStream::SampleFormat. r=kinetik

We also give nsWaveReader its own separate format enum.
This commit is contained in:
Robert O'Callahan 2012-10-25 23:09:39 +13:00
parent 9daf75897d
commit 11cca22e78
6 changed files with 12 additions and 32 deletions

View File

@ -19,11 +19,6 @@ SampleToFloat(float aValue)
return aValue;
}
static float
SampleToFloat(uint8_t aValue)
{
return (aValue - 128)/128.0f;
}
static float
SampleToFloat(int16_t aValue)
{
return aValue/32768.0f;
@ -35,13 +30,6 @@ FloatToSample(float aValue, float* aOut)
*aOut = aValue;
}
static void
FloatToSample(float aValue, uint8_t* aOut)
{
float v = aValue*128 + 128;
float clamped = NS_MAX(0.0f, NS_MIN(255.0f, v));
*aOut = uint8_t(clamped);
}
static void
FloatToSample(float aValue, int16_t* aOut)
{
float v = aValue*32768.0f;
@ -103,10 +91,6 @@ InterleaveAndConvertBuffer(const SrcT* aSource, int32_t aSourceLength,
InterleaveAndConvertBuffer(aSource, aSourceLength, aLength, aVolume,
aChannels, static_cast<int16_t*>(aOutput));
break;
case nsAudioStream::FORMAT_U8:
InterleaveAndConvertBuffer(aSource, aSourceLength, aLength, aVolume,
aChannels, static_cast<uint8_t*>(aOutput));
break;
}
}
@ -133,13 +117,6 @@ InterleaveAndConvertBuffer(const void* aSource, nsAudioStream::SampleFormat aSou
aChannels,
aOutput, aOutputFormat);
break;
case nsAudioStream::FORMAT_U8:
InterleaveAndConvertBuffer(static_cast<const uint8_t*>(aSource) + aOffset, aSourceLength,
aLength,
aVolume,
aChannels,
aOutput, aOutputFormat);
break;
}
}

View File

@ -67,7 +67,6 @@ public:
static int GetSampleSize(SampleFormat aFormat)
{
switch (aFormat) {
case nsAudioStream::FORMAT_U8: return 1;
case nsAudioStream::FORMAT_S16: return 2;
case nsAudioStream::FORMAT_FLOAT32: return 4;
}

View File

@ -21,8 +21,9 @@ public:
enum SampleFormat
{
FORMAT_U8,
// Native-endian signed 16-bit audio samples
FORMAT_S16,
// Signed 32-bit float samples
FORMAT_FLOAT32
};

View File

@ -176,7 +176,7 @@ bool nsWaveReader::DecodeAudioData()
AudioDataValue* s = sampleBuffer.get();
for (int i = 0; i < frames; ++i) {
for (unsigned int j = 0; j < mChannels; ++j) {
if (mSampleFormat == nsAudioStream::FORMAT_U8) {
if (mSampleFormat == FORMAT_U8) {
uint8_t v = ReadUint8(&d);
#if defined(MOZ_SAMPLE_TYPE_S16)
*s++ = (v * (1.F/UINT8_MAX)) * UINT16_MAX + INT16_MIN;
@ -184,7 +184,7 @@ bool nsWaveReader::DecodeAudioData()
*s++ = (v * (1.F/UINT8_MAX)) * 2.F - 1.F;
#endif
}
else if (mSampleFormat == nsAudioStream::FORMAT_S16) {
else if (mSampleFormat == FORMAT_S16) {
int16_t v = ReadInt16LE(&d);
#if defined(MOZ_SAMPLE_TYPE_S16)
*s++ = v;
@ -451,9 +451,9 @@ nsWaveReader::LoadFormatChunk()
mChannels = channels;
mFrameSize = frameSize;
if (sampleFormat == 8) {
mSampleFormat = nsAudioStream::FORMAT_U8;
mSampleFormat = FORMAT_U8;
} else {
mSampleFormat = nsAudioStream::FORMAT_S16;
mSampleFormat = FORMAT_S16;
}
return true;
}

View File

@ -80,8 +80,12 @@ private:
// (interleaved).
uint32_t mFrameSize;
// The sample format of the PCM data.
nsAudioStream::SampleFormat mSampleFormat;
// The sample format of the PCM data. nsAudioStream::SampleFormat doesn't
// support U8.
enum {
FORMAT_U8,
FORMAT_S16
} mSampleFormat;
// Size of PCM data stored in the WAVE as reported by the data chunk in
// the media.

View File

@ -552,7 +552,6 @@ void MediaPipelineTransmit::ProcessAudioChunk(AudioSessionConduit *conduit,
if (chunk.mBuffer) {
switch(chunk.mBufferFormat) {
case nsAudioStream::FORMAT_U8:
case nsAudioStream::FORMAT_FLOAT32:
MOZ_MTLOG(PR_LOG_ERROR, "Can't process audio except in 16-bit PCM yet");
MOZ_ASSERT(PR_FALSE);