2012-09-26 21:33:43 -07:00
|
|
|
#include <stagefright/DataSource.h>
|
2013-03-08 11:43:32 -08:00
|
|
|
#include <stagefright/MediaSource.h>
|
2012-09-26 21:33:43 -07:00
|
|
|
#include <utils/RefBase.h>
|
|
|
|
|
|
|
|
#include "GonkNativeWindow.h"
|
2013-03-08 11:43:33 -08:00
|
|
|
#include "GonkNativeWindowClient.h"
|
2012-09-26 21:33:43 -07:00
|
|
|
#include "GonkIOSurfaceImage.h"
|
|
|
|
#include "MPAPI.h"
|
|
|
|
#include "MediaResource.h"
|
2012-11-19 07:11:21 -08:00
|
|
|
#include "AbstractMediaDecoder.h"
|
2012-09-26 21:33:43 -07:00
|
|
|
|
2013-02-15 18:27:18 -08:00
|
|
|
namespace android {
|
|
|
|
class OmxDecoder;
|
|
|
|
};
|
|
|
|
|
2012-09-26 21:33:43 -07:00
|
|
|
namespace mozilla {
|
|
|
|
namespace layers {
|
|
|
|
|
|
|
|
class VideoGraphicBuffer : public GraphicBufferLocked {
|
|
|
|
// XXX change this to an actual smart pointer at some point
|
|
|
|
android::MediaBuffer *mMediaBuffer;
|
2013-02-15 18:27:18 -08:00
|
|
|
android::wp<android::OmxDecoder> mOmxDecoder;
|
2012-09-26 21:33:43 -07:00
|
|
|
public:
|
2013-02-15 18:27:18 -08:00
|
|
|
VideoGraphicBuffer(const android::wp<android::OmxDecoder> aOmxDecoder,
|
|
|
|
android::MediaBuffer *aBuffer,
|
2012-09-26 21:33:43 -07:00
|
|
|
SurfaceDescriptor *aDescriptor);
|
|
|
|
~VideoGraphicBuffer();
|
|
|
|
void Unlock();
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace android {
|
|
|
|
|
|
|
|
// MediaStreamSource is a DataSource that reads from a MPAPI media stream.
|
|
|
|
class MediaStreamSource : public DataSource {
|
|
|
|
typedef mozilla::MediaResource MediaResource;
|
2012-11-19 07:11:21 -08:00
|
|
|
typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder;
|
2012-09-26 21:33:43 -07:00
|
|
|
|
|
|
|
MediaResource *mResource;
|
2012-11-19 07:11:21 -08:00
|
|
|
AbstractMediaDecoder *mDecoder;
|
2012-09-26 21:33:43 -07:00
|
|
|
public:
|
|
|
|
MediaStreamSource(MediaResource *aResource,
|
2012-11-19 07:11:21 -08:00
|
|
|
AbstractMediaDecoder *aDecoder);
|
2012-09-26 21:33:43 -07:00
|
|
|
|
|
|
|
virtual status_t initCheck() const;
|
|
|
|
virtual ssize_t readAt(off64_t offset, void *data, size_t size);
|
|
|
|
virtual ssize_t readAt(off_t offset, void *data, size_t size) {
|
|
|
|
return readAt(static_cast<off64_t>(offset), data, size);
|
|
|
|
}
|
|
|
|
virtual status_t getSize(off_t *size) {
|
|
|
|
off64_t size64;
|
|
|
|
status_t status = getSize(&size64);
|
|
|
|
*size = size64;
|
|
|
|
return status;
|
|
|
|
}
|
|
|
|
virtual status_t getSize(off64_t *size);
|
|
|
|
virtual uint32_t flags() {
|
|
|
|
return kWantsPrefetching;
|
|
|
|
}
|
|
|
|
|
|
|
|
virtual ~MediaStreamSource();
|
|
|
|
|
|
|
|
private:
|
|
|
|
MediaStreamSource(const MediaStreamSource &);
|
|
|
|
MediaStreamSource &operator=(const MediaStreamSource &);
|
|
|
|
};
|
|
|
|
|
2013-02-15 18:27:18 -08:00
|
|
|
class OmxDecoder : public RefBase {
|
2012-09-26 21:33:43 -07:00
|
|
|
typedef MPAPI::AudioFrame AudioFrame;
|
|
|
|
typedef MPAPI::VideoFrame VideoFrame;
|
|
|
|
typedef mozilla::MediaResource MediaResource;
|
2012-11-19 07:11:21 -08:00
|
|
|
typedef mozilla::AbstractMediaDecoder AbstractMediaDecoder;
|
2012-09-26 21:33:43 -07:00
|
|
|
|
|
|
|
enum {
|
2012-12-10 23:48:58 -08:00
|
|
|
kPreferSoftwareCodecs = 1,
|
|
|
|
kSoftwareCodecsOnly = 8,
|
|
|
|
kHardwareCodecsOnly = 16,
|
2012-09-26 21:33:43 -07:00
|
|
|
};
|
|
|
|
|
2012-11-19 07:11:21 -08:00
|
|
|
AbstractMediaDecoder *mDecoder;
|
2012-09-26 21:33:43 -07:00
|
|
|
MediaResource *mResource;
|
|
|
|
sp<GonkNativeWindow> mNativeWindow;
|
2013-03-08 11:43:33 -08:00
|
|
|
sp<GonkNativeWindowClient> mNativeWindowClient;
|
2012-09-26 21:33:43 -07:00
|
|
|
sp<MediaSource> mVideoTrack;
|
|
|
|
sp<MediaSource> mVideoSource;
|
|
|
|
sp<MediaSource> mAudioTrack;
|
|
|
|
sp<MediaSource> mAudioSource;
|
|
|
|
int32_t mVideoWidth;
|
|
|
|
int32_t mVideoHeight;
|
|
|
|
int32_t mVideoColorFormat;
|
|
|
|
int32_t mVideoStride;
|
|
|
|
int32_t mVideoSliceHeight;
|
|
|
|
int32_t mVideoRotation;
|
|
|
|
int32_t mAudioChannels;
|
|
|
|
int32_t mAudioSampleRate;
|
|
|
|
int64_t mDurationUs;
|
|
|
|
VideoFrame mVideoFrame;
|
|
|
|
AudioFrame mAudioFrame;
|
|
|
|
|
|
|
|
// Lifetime of these should be handled by OMXCodec, as long as we release
|
|
|
|
// them after use: see ReleaseVideoBuffer(), ReleaseAudioBuffer()
|
|
|
|
MediaBuffer *mVideoBuffer;
|
|
|
|
MediaBuffer *mAudioBuffer;
|
|
|
|
|
2013-02-15 18:27:18 -08:00
|
|
|
// Hold video's MediaBuffers that are released during video seeking.
|
|
|
|
// The holded MediaBuffers are released soon after seek completion.
|
|
|
|
// OMXCodec does not accept MediaBuffer during seeking. If MediaBuffer is
|
|
|
|
// returned to OMXCodec during seeking, OMXCodec calls assert.
|
|
|
|
Vector<MediaBuffer *> mPendingVideoBuffers;
|
|
|
|
// Show if OMXCodec is seeking.
|
|
|
|
bool mIsVideoSeeking;
|
|
|
|
// The lock protects video MediaBuffer release()'s pending operations called
|
|
|
|
// from multiple threads. The pending operations happen only during video
|
|
|
|
// seeking. Holding mSeekLock long time could affect to video rendering.
|
|
|
|
// Holding time should be minimum.
|
|
|
|
Mutex mSeekLock;
|
|
|
|
|
2012-09-26 21:33:43 -07:00
|
|
|
// 'true' if a read from the audio stream was done while reading the metadata
|
|
|
|
bool mAudioMetadataRead;
|
|
|
|
|
|
|
|
void ReleaseVideoBuffer();
|
|
|
|
void ReleaseAudioBuffer();
|
2013-02-15 18:27:18 -08:00
|
|
|
// Call with mSeekLock held.
|
|
|
|
void ReleaseAllPendingVideoBuffersLocked();
|
2012-09-26 21:33:43 -07:00
|
|
|
|
|
|
|
void PlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
|
|
|
|
void CbYCrYFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
|
|
|
|
void SemiPlanarYUV420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
|
|
|
|
void SemiPlanarYVU420Frame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
|
|
|
|
bool ToVideoFrame(VideoFrame *aFrame, int64_t aTimeUs, void *aData, size_t aSize, bool aKeyFrame);
|
|
|
|
bool ToAudioFrame(AudioFrame *aFrame, int64_t aTimeUs, void *aData, size_t aDataOffset, size_t aSize,
|
|
|
|
int32_t aAudioChannels, int32_t aAudioSampleRate);
|
|
|
|
public:
|
2012-11-19 07:11:21 -08:00
|
|
|
OmxDecoder(MediaResource *aResource, AbstractMediaDecoder *aDecoder);
|
2012-09-26 21:33:43 -07:00
|
|
|
~OmxDecoder();
|
|
|
|
|
|
|
|
bool Init();
|
|
|
|
bool SetVideoFormat();
|
|
|
|
bool SetAudioFormat();
|
|
|
|
|
|
|
|
void GetDuration(int64_t *durationUs) {
|
|
|
|
*durationUs = mDurationUs;
|
|
|
|
}
|
|
|
|
|
|
|
|
void GetVideoParameters(int32_t *width, int32_t *height) {
|
|
|
|
*width = mVideoWidth;
|
|
|
|
*height = mVideoHeight;
|
|
|
|
}
|
|
|
|
|
|
|
|
void GetAudioParameters(int32_t *numChannels, int32_t *sampleRate) {
|
|
|
|
*numChannels = mAudioChannels;
|
|
|
|
*sampleRate = mAudioSampleRate;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool HasVideo() {
|
|
|
|
return mVideoSource != nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool HasAudio() {
|
|
|
|
return mAudioSource != nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool ReadVideo(VideoFrame *aFrame, int64_t aSeekTimeUs,
|
|
|
|
bool aKeyframeSkip = false,
|
|
|
|
bool aDoSeek = false);
|
|
|
|
bool ReadAudio(AudioFrame *aFrame, int64_t aSeekTimeUs);
|
|
|
|
|
|
|
|
MediaResource *GetResource() {
|
|
|
|
return mResource;
|
|
|
|
}
|
2013-02-15 18:27:18 -08:00
|
|
|
|
|
|
|
bool ReleaseVideoBuffer(MediaBuffer *aBuffer);
|
2012-09-26 21:33:43 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
|