Backed out 9 changesets (bug 1014614) for Android mochitest bustage on a CLOSED TREE

Backed out changeset 40f99ba7f616 (bug 1014614)
Backed out changeset 8fbc3c85adfc (bug 1014614)
Backed out changeset 40dbd7c6ce65 (bug 1014614)
Backed out changeset f2b504bdd7c5 (bug 1014614)
Backed out changeset c878e29fbef9 (bug 1014614)
Backed out changeset 511dcc817f5d (bug 1014614)
Backed out changeset 2b72e71f1fdf (bug 1014614)
Backed out changeset 8b530a9a2f99 (bug 1014614)
Backed out changeset 7fa1b78de684 (bug 1014614)
This commit is contained in:
Wes Kocher 2014-10-17 16:34:01 -07:00
parent b9f04fe4f1
commit a074f0af18
39 changed files with 394 additions and 3970 deletions

View File

@ -5301,15 +5301,8 @@ fi;
dnl ========================================================
dnl = Built-in fragmented MP4 support.
dnl ========================================================
if test "$OS_TARGET" = Android; then
MOZ_FMP4=1
fi
if test -n "$MOZ_WMF" -o -n "$MOZ_FFMPEG" -o -n "$MOZ_APPLEMEDIA"; then
dnl Enable fragmented MP4 parser on Windows by default.
dnl We will also need to enable it on other platforms as we implement
dnl platform decoder support there too.
dnl Enable fragmented MP4 parser on platforms with decoder support.
MOZ_FMP4=1
fi

View File

@ -23,9 +23,6 @@
#ifdef MOZ_APPLEMEDIA
#include "apple/AppleDecoderModule.h"
#endif
#ifdef MOZ_WIDGET_ANDROID
#include "AndroidBridge.h"
#endif
namespace mozilla {
@ -170,10 +167,6 @@ HavePlatformMPEGDecoders()
#ifdef XP_WIN
// We have H.264/AAC platform decoders on Windows Vista and up.
IsVistaOrLater() ||
#endif
#ifdef MOZ_WIDGET_ANDROID
// We need android.media.MediaCodec which exists in API level 16 and higher.
(AndroidBridge::Bridge()->GetAPIVersion() >= 16) ||
#endif
IsFFmpegAvailable() ||
IsAppleAvailable() ||

View File

@ -17,9 +17,6 @@
#ifdef MOZ_GONK_MEDIACODEC
#include "GonkDecoderModule.h"
#endif
#ifdef MOZ_WIDGET_ANDROID
#include "AndroidDecoderModule.h"
#endif
#include "mozilla/Preferences.h"
#ifdef MOZ_EME
@ -36,8 +33,6 @@ extern PlatformDecoderModule* CreateBlankDecoderModule();
bool PlatformDecoderModule::sUseBlankDecoder = false;
bool PlatformDecoderModule::sFFmpegDecoderEnabled = false;
bool PlatformDecoderModule::sGonkDecoderEnabled = false;
bool PlatformDecoderModule::sAndroidMCDecoderEnabled = false;
bool PlatformDecoderModule::sAndroidMCDecoderPreferred = false;
/* static */
void
@ -54,18 +49,10 @@ PlatformDecoderModule::Init()
"media.fragmented-mp4.use-blank-decoder");
Preferences::AddBoolVarCache(&sFFmpegDecoderEnabled,
"media.fragmented-mp4.ffmpeg.enabled", false);
#ifdef MOZ_GONK_MEDIACODEC
Preferences::AddBoolVarCache(&sGonkDecoderEnabled,
"media.fragmented-mp4.gonk.enabled", false);
#endif
#ifdef MOZ_WIDGET_ANDROID
Preferences::AddBoolVarCache(&sAndroidMCDecoderEnabled,
"media.fragmented-mp4.android-media-codec.enabled", false);
Preferences::AddBoolVarCache(&sAndroidMCDecoderPreferred,
"media.fragmented-mp4.android-media-codec.preferred", false);
#endif
#ifdef XP_WIN
WMFDecoderModule::Init();
#endif
@ -135,11 +122,6 @@ PlatformDecoderModule::Create()
// Note: This runs on the decode thread.
MOZ_ASSERT(!NS_IsMainThread());
#ifdef MOZ_WIDGET_ANDROID
if(sAndroidMCDecoderPreferred && sAndroidMCDecoderEnabled){
return new AndroidDecoderModule();
}
#endif
if (sUseBlankDecoder) {
return CreateBlankDecoderModule();
}
@ -167,11 +149,6 @@ PlatformDecoderModule::Create()
if (sGonkDecoderEnabled) {
return new GonkDecoderModule();
}
#endif
#ifdef MOZ_WIDGET_ANDROID
if(sAndroidMCDecoderEnabled){
return new AndroidDecoderModule();
}
#endif
return nullptr;
}

View File

@ -131,8 +131,6 @@ protected:
static bool sUseBlankDecoder;
static bool sFFmpegDecoderEnabled;
static bool sGonkDecoderEnabled;
static bool sAndroidMCDecoderPreferred;
static bool sAndroidMCDecoderEnabled;
};
// A callback used by MediaDataDecoder to return output/errors to the

View File

@ -1,494 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AndroidDecoderModule.h"
#include "PlatformDecoderModule.h"
#include "GeneratedJNIWrappers.h"
#include "GeneratedSDKWrappers.h"
#include "AndroidBridge.h"
#include "MediaTaskQueue.h"
#include "SharedThreadPool.h"
#include "TexturePoolOGL.h"
#include "GLImages.h"
#include "MediaData.h"
#include "mp4_demuxer/AnnexB.h"
#include "mp4_demuxer/DecoderData.h"
#include "nsThreadUtils.h"
#include "nsAutoPtr.h"
#include <jni.h>
using namespace mozilla;
using namespace mozilla::gl;
using namespace mozilla::widget::android;
static MediaCodec* CreateDecoder(JNIEnv* aEnv, const char* aMimeType)
{
if (!aMimeType) {
return nullptr;
}
nsAutoString mimeType;
mimeType.AssignASCII(aMimeType);
jobject decoder = MediaCodec::CreateDecoderByType(mimeType);
return new MediaCodec(decoder, aEnv);
}
class VideoDataDecoder : public MediaCodecDataDecoder {
public:
VideoDataDecoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
MediaFormat* aFormat, MediaDataDecoderCallback* aCallback,
layers::ImageContainer* aImageContainer)
: MediaCodecDataDecoder(MediaData::Type::VIDEO_FRAME, aConfig.mime_type, aFormat, aCallback)
, mImageContainer(aImageContainer)
, mConfig(aConfig)
{
}
nsresult Init() MOZ_OVERRIDE {
mSurfaceTexture = AndroidSurfaceTexture::Create();
if (!mSurfaceTexture) {
printf_stderr("Failed to create SurfaceTexture for video decode\n");
return NS_ERROR_FAILURE;
}
return InitDecoder(mSurfaceTexture->JavaSurface());
}
virtual nsresult Input(mp4_demuxer::MP4Sample* aSample) MOZ_OVERRIDE {
mp4_demuxer::AnnexB::ConvertSample(aSample, mConfig.annex_b);
return MediaCodecDataDecoder::Input(aSample);
}
virtual nsresult PostOutput(BufferInfo* aInfo, Microseconds aDuration) MOZ_OVERRIDE {
VideoInfo videoInfo;
videoInfo.mDisplay = nsIntSize(mConfig.display_width, mConfig.display_height);
bool isSync = false;
if (MediaCodec::getBUFFER_FLAG_SYNC_FRAME() & aInfo->getFlags()) {
isSync = true;
}
nsRefPtr<layers::Image> img = mImageContainer->CreateImage(ImageFormat::SURFACE_TEXTURE);
layers::SurfaceTextureImage::Data data;
data.mSurfTex = mSurfaceTexture.get();
data.mSize = gfx::IntSize(mConfig.display_width, mConfig.display_height);
data.mInverted = true;
layers::SurfaceTextureImage* typedImg = static_cast<layers::SurfaceTextureImage*>(img.get());
typedImg->SetData(data);
mCallback->Output(VideoData::CreateFromImage(videoInfo, mImageContainer, aInfo->getOffset(),
aInfo->getPresentationTimeUs(),
aDuration,
img, isSync,
aInfo->getPresentationTimeUs(),
gfx::IntRect(0, 0,
mConfig.display_width,
mConfig.display_height)));
return NS_OK;
}
protected:
layers::ImageContainer* mImageContainer;
const mp4_demuxer::VideoDecoderConfig& mConfig;
nsRefPtr<AndroidSurfaceTexture> mSurfaceTexture;
};
class AudioDataDecoder : public MediaCodecDataDecoder {
public:
AudioDataDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaFormat* aFormat, MediaDataDecoderCallback* aCallback)
: MediaCodecDataDecoder(MediaData::Type::AUDIO_SAMPLES, aConfig.mime_type, aFormat, aCallback)
, mConfig(aConfig)
{
MOZ_ASSERT(mConfig.bits_per_sample == 16, "We only support 16-bit audio");
}
nsresult Output(BufferInfo* aInfo, void* aBuffer, Microseconds aDuration) {
// The output on Android is always 16-bit signed
uint32_t numChannels = mConfig.channel_count;
uint32_t numFrames = (aInfo->getSize() / numChannels) / 2;
AudioDataValue* audio = new AudioDataValue[aInfo->getSize()];
PodCopy(audio, static_cast<AudioDataValue*>(aBuffer), aInfo->getSize());
mCallback->Output(new AudioData(aInfo->getOffset(), aInfo->getPresentationTimeUs(),
aDuration,
numFrames,
audio,
numChannels,
mConfig.samples_per_second));
return NS_OK;
}
protected:
const mp4_demuxer::AudioDecoderConfig& mConfig;
};
bool AndroidDecoderModule::SupportsAudioMimeType(const char* aMimeType) {
JNIEnv* env = GetJNIForThread();
MediaCodec* decoder = CreateDecoder(env, aMimeType);
bool supports = (decoder != nullptr);
delete decoder;
return supports;
}
already_AddRefed<MediaDataDecoder>
AndroidDecoderModule::CreateH264Decoder(
const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback)
{
nsAutoString mimeType;
mimeType.AssignASCII(aConfig.mime_type);
jobject jFormat = MediaFormat::CreateVideoFormat(mimeType,
aConfig.display_width,
aConfig.display_height);
if (!jFormat) {
return nullptr;
}
MediaFormat* format = MediaFormat::Wrap(jFormat);
if (!format) {
return nullptr;
}
nsRefPtr<MediaDataDecoder> decoder =
new VideoDataDecoder(aConfig, format, aCallback, aImageContainer);
return decoder.forget();
}
already_AddRefed<MediaDataDecoder>
AndroidDecoderModule::CreateAudioDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback)
{
nsAutoString mimeType;
mimeType.AssignASCII(aConfig.mime_type);
jobject jFormat = MediaFormat::CreateAudioFormat(mimeType,
aConfig.samples_per_second,
aConfig.channel_count);
if (jFormat == nullptr)
return nullptr;
MediaFormat* format = MediaFormat::Wrap(jFormat);
if(format == nullptr)
return nullptr;
JNIEnv* env = GetJNIForThread();
if (!format->GetByteBuffer(NS_LITERAL_STRING("csd-0"))) {
uint8_t* csd0 = new uint8_t[2];
csd0[0] = aConfig.audio_specific_config[0];
csd0[1] = aConfig.audio_specific_config[1];
jobject buffer = env->NewDirectByteBuffer(csd0, 2);
format->SetByteBuffer(NS_LITERAL_STRING("csd-0"), buffer);
env->DeleteLocalRef(buffer);
}
if (mimeType.EqualsLiteral("audio/mp4a-latm")) {
format->SetInteger(NS_LITERAL_STRING("is-adts"), 1);
}
nsRefPtr<MediaDataDecoder> decoder =
new AudioDataDecoder(aConfig, format, aCallback);
return decoder.forget();
}
nsresult AndroidDecoderModule::Shutdown()
{
return NS_OK;
}
MediaCodecDataDecoder::MediaCodecDataDecoder(MediaData::Type aType,
const char* aMimeType,
MediaFormat* aFormat,
MediaDataDecoderCallback* aCallback)
: mType(aType)
, mMimeType(strdup(aMimeType))
, mFormat(aFormat)
, mCallback(aCallback)
, mInputBuffers(nullptr)
, mOutputBuffers(nullptr)
, mMonitor("MediaCodecDataDecoder::mMonitor")
, mDraining(false)
, mStopping(false)
{
}
MediaCodecDataDecoder::~MediaCodecDataDecoder()
{
JNIEnv* env = GetJNIForThread();
Shutdown();
if (mInputBuffers) {
env->DeleteGlobalRef(mInputBuffers);
mInputBuffers = nullptr;
}
if (mOutputBuffers) {
env->DeleteGlobalRef(mOutputBuffers);
mOutputBuffers = nullptr;
}
}
nsresult MediaCodecDataDecoder::Init()
{
return InitDecoder();
}
nsresult MediaCodecDataDecoder::InitDecoder(jobject aSurface)
{
JNIEnv* env = GetJNIForThread();
mDecoder = CreateDecoder(env, mMimeType);
if (!mDecoder) {
mCallback->Error();
return NS_ERROR_FAILURE;
}
mDecoder->Configure(mFormat->wrappedObject(), aSurface, nullptr, 0);
mDecoder->Start();
ResetInputBuffers();
ResetOutputBuffers();
NS_NewNamedThread("MC Decoder", getter_AddRefs(mThread),
NS_NewRunnableMethod(this, &MediaCodecDataDecoder::DecoderLoop));
return NS_OK;
}
// This is in usec, so that's 10ms
#define DECODER_TIMEOUT 10000
void MediaCodecDataDecoder::DecoderLoop()
{
bool outputDone = false;
JNIEnv* env = GetJNIForThread();
mp4_demuxer::MP4Sample* sample = nullptr;
for (;;) {
{
MonitorAutoLock lock(mMonitor);
while (!mStopping && !mDraining && mQueue.empty()) {
if (mQueue.empty()) {
// We could be waiting here forever if we don't signal that we need more input
mCallback->InputExhausted();
}
lock.Wait();
}
if (mStopping) {
// Get out of the loop. This is the only exit point.
break;
}
if (mDraining) {
mDecoder->Flush();
ClearQueue();
mDraining = false;
lock.Notify();
continue;
}
// We're not stopping or draining, so try to get a sample
if (!mQueue.empty()) {
sample = mQueue.front();
}
}
if (sample) {
// We have a sample, try to feed it to the decoder
int inputIndex = mDecoder->DequeueInputBuffer(DECODER_TIMEOUT);
if (inputIndex >= 0) {
jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
void* directBuffer = env->GetDirectBufferAddress(buffer);
// We're feeding this to the decoder, so remove it from the queue
mMonitor.Lock();
mQueue.pop();
mMonitor.Unlock();
MOZ_ASSERT(env->GetDirectBufferCapacity(buffer) >= sample->size,
"Decoder buffer is not large enough for sample");
PodCopy((uint8_t*)directBuffer, sample->data, sample->size);
mDecoder->QueueInputBuffer(inputIndex, 0, sample->size, sample->composition_timestamp, 0);
mDurations.push(sample->duration);
delete sample;
sample = nullptr;
outputDone = false;
env->DeleteLocalRef(buffer);
}
}
if (!outputDone) {
BufferInfo bufferInfo;
int outputStatus = mDecoder->DequeueOutputBuffer(bufferInfo.wrappedObject(), DECODER_TIMEOUT);
if (outputStatus == MediaCodec::getINFO_TRY_AGAIN_LATER()) {
// We might want to call mCallback->InputExhausted() here, but there seems to be
// some possible bad interactions here with the threading
} else if (outputStatus == MediaCodec::getINFO_OUTPUT_BUFFERS_CHANGED()) {
ResetOutputBuffers();
} else if (outputStatus == MediaCodec::getINFO_OUTPUT_FORMAT_CHANGED()) {
// Don't care, we use SurfaceTexture for video
} else if (outputStatus < 0) {
printf_stderr("unknown error from decoder! %d\n", outputStatus);
mCallback->Error();
} else {
// We have a valid buffer index >= 0 here
if (bufferInfo.getFlags() & MediaCodec::getBUFFER_FLAG_END_OF_STREAM()) {
outputDone = true;
}
MOZ_ASSERT(!mDurations.empty(), "Should have had a duration queued");
Microseconds duration = 0;
if (!mDurations.empty()) {
duration = mDurations.front();
mDurations.pop();
}
jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputStatus);
if (buffer) {
// The buffer will be null on Android L if we are decoding to a Surface
void* directBuffer = env->GetDirectBufferAddress(buffer);
Output(&bufferInfo, directBuffer, duration);
}
// The Surface will be updated at this point (for video)
mDecoder->ReleaseOutputBuffer(outputStatus, true);
PostOutput(&bufferInfo, duration);
if (buffer) {
env->DeleteLocalRef(buffer);
}
}
}
}
// We're done
mMonitor.Lock();
mStopping = false;
mMonitor.Notify();
mMonitor.Unlock();
}
void MediaCodecDataDecoder::ClearQueue()
{
mMonitor.AssertCurrentThreadOwns();
while (!mQueue.empty()) {
delete mQueue.front();
mQueue.pop();
}
while (!mDurations.empty()) {
mDurations.pop();
}
}
nsresult MediaCodecDataDecoder::Input(mp4_demuxer::MP4Sample* aSample) {
MonitorAutoLock lock(mMonitor);
mQueue.push(aSample);
lock.NotifyAll();
return NS_OK;
}
void MediaCodecDataDecoder::ResetInputBuffers()
{
JNIEnv* env = GetJNIForThread();
if (mInputBuffers) {
env->DeleteGlobalRef(mInputBuffers);
}
mInputBuffers = (jobjectArray) env->NewGlobalRef(mDecoder->GetInputBuffers());
}
void MediaCodecDataDecoder::ResetOutputBuffers()
{
JNIEnv* env = GetJNIForThread();
if (mOutputBuffers) {
env->DeleteGlobalRef(mOutputBuffers);
}
mOutputBuffers = (jobjectArray) env->NewGlobalRef(mDecoder->GetOutputBuffers());
}
nsresult MediaCodecDataDecoder::Flush() {
Drain();
return NS_OK;
}
nsresult MediaCodecDataDecoder::Drain() {
MonitorAutoLock lock(mMonitor);
mDraining = true;
lock.Notify();
while (mDraining) {
lock.Wait();
}
mCallback->DrainComplete();
return NS_OK;
}
nsresult MediaCodecDataDecoder::Shutdown() {
MonitorAutoLock lock(mMonitor);
if (!mThread || mStopping) {
// Already shutdown or in the process of doing so
return NS_OK;
}
mStopping = true;
lock.Notify();
while (mStopping) {
lock.Wait();
}
mThread->Shutdown();
mThread = nullptr;
mDecoder->Stop();
mDecoder->Release();
return NS_OK;
}

View File

@ -1,109 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef AndroidDecoderModule_h_
#define AndroidDecoderModule_h_
#include "PlatformDecoderModule.h"
#include "AndroidJavaWrappers.h"
#include "AndroidSurfaceTexture.h"
#include "GeneratedSDKWrappers.h"
#include "mozilla/Monitor.h"
#include <queue>
namespace mozilla {
typedef std::queue<mp4_demuxer::MP4Sample*> SampleQueue;
namespace widget {
namespace android {
class MediaCodec;
class MediaFormat;
class ByteBuffer;
}
}
class MediaCodecDataDecoder;
class AndroidDecoderModule : public PlatformDecoderModule {
public:
virtual nsresult Shutdown() MOZ_OVERRIDE;
virtual already_AddRefed<MediaDataDecoder>
CreateH264Decoder(const mp4_demuxer::VideoDecoderConfig& aConfig,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer,
MediaTaskQueue* aVideoTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
virtual already_AddRefed<MediaDataDecoder>
CreateAudioDecoder(const mp4_demuxer::AudioDecoderConfig& aConfig,
MediaTaskQueue* aAudioTaskQueue,
MediaDataDecoderCallback* aCallback) MOZ_OVERRIDE;
AndroidDecoderModule() {}
virtual ~AndroidDecoderModule() {}
virtual bool SupportsAudioMimeType(const char* aMimeType) MOZ_OVERRIDE;
};
class MediaCodecDataDecoder : public MediaDataDecoder {
public:
MediaCodecDataDecoder(MediaData::Type aType,
const char* aMimeType,
mozilla::widget::android::MediaFormat* aFormat,
MediaDataDecoderCallback* aCallback);
virtual ~MediaCodecDataDecoder();
virtual nsresult Init() MOZ_OVERRIDE;
virtual nsresult Flush() MOZ_OVERRIDE;
virtual nsresult Drain() MOZ_OVERRIDE;
virtual nsresult Shutdown() MOZ_OVERRIDE;
virtual nsresult Input(mp4_demuxer::MP4Sample* aSample);
protected:
friend class AndroidDecoderModule;
MediaData::Type mType;
nsAutoPtr<char> mMimeType;
nsAutoPtr<mozilla::widget::android::MediaFormat> mFormat;
MediaDataDecoderCallback* mCallback;
nsAutoPtr<mozilla::widget::android::MediaCodec> mDecoder;
jobjectArray mInputBuffers;
jobjectArray mOutputBuffers;
nsCOMPtr<nsIThread> mThread;
// Only these members are protected by mMonitor.
Monitor mMonitor;
bool mDraining;
bool mStopping;
SampleQueue mQueue;
std::queue<Microseconds> mDurations;
virtual nsresult InitDecoder(jobject aSurface = nullptr);
virtual nsresult Output(mozilla::widget::android::BufferInfo* aInfo, void* aBuffer, Microseconds aDuration) { return NS_OK; }
virtual nsresult PostOutput(mozilla::widget::android::BufferInfo* aInfo, Microseconds aDuration) { return NS_OK; }
void ResetInputBuffers();
void ResetOutputBuffers();
void DecoderLoop();
virtual void ClearQueue();
};
} // namwspace mozilla
#endif

View File

@ -25,7 +25,7 @@ if CONFIG['MOZ_WMF']:
if CONFIG['MOZ_EME']:
DIRS += ['eme']
if CONFIG['MOZ_FFMPEG']:
EXPORTS += [
'ffmpeg/FFmpegRuntimeLinker.h',
@ -67,14 +67,6 @@ if CONFIG['ANDROID_VERSION'] >= '18'and CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gonk':
include('/ipc/chromium/chromium-config.mozbuild')
if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android':
EXPORTS += [
'android/AndroidDecoderModule.h',
]
UNIFIED_SOURCES += [
'android/AndroidDecoderModule.cpp',
]
FINAL_LIBRARY = 'xul'
FAIL_ON_WARNINGS = True

View File

@ -952,7 +952,7 @@ void nsNPAPIPluginInstance::ReleaseContentTexture(nsNPAPIPluginInstance::Texture
mContentTexture->Release(aTextureInfo);
}
AndroidSurfaceTexture* nsNPAPIPluginInstance::CreateSurfaceTexture()
nsSurfaceTexture* nsNPAPIPluginInstance::CreateSurfaceTexture()
{
if (!EnsureGLContext())
return nullptr;
@ -961,8 +961,7 @@ AndroidSurfaceTexture* nsNPAPIPluginInstance::CreateSurfaceTexture()
if (!texture)
return nullptr;
AndroidSurfaceTexture* surface = AndroidSurfaceTexture::Create(TexturePoolOGL::GetGLContext(),
texture);
nsSurfaceTexture* surface = nsSurfaceTexture::Create(texture);
if (!surface)
return nullptr;
@ -986,7 +985,7 @@ void* nsNPAPIPluginInstance::AcquireContentWindow()
return nullptr;
}
return mContentSurface->NativeWindow()->Handle();
return mContentSurface->GetNativeWindow();
}
EGLImage
@ -998,7 +997,7 @@ nsNPAPIPluginInstance::AsEGLImage()
return mContentTexture->CreateEGLImage();
}
AndroidSurfaceTexture*
nsSurfaceTexture*
nsNPAPIPluginInstance::AsSurfaceTexture()
{
if (!mContentSurface)
@ -1009,13 +1008,13 @@ nsNPAPIPluginInstance::AsSurfaceTexture()
void* nsNPAPIPluginInstance::AcquireVideoWindow()
{
AndroidSurfaceTexture* surface = CreateSurfaceTexture();
nsSurfaceTexture* surface = CreateSurfaceTexture();
if (!surface)
return nullptr;
VideoInfo* info = new VideoInfo(surface);
void* window = info->mSurfaceTexture->NativeWindow()->Handle();
void* window = info->mSurfaceTexture->GetNativeWindow();
mVideos.insert(std::pair<void*, VideoInfo*>(window, info));
return window;

View File

@ -21,7 +21,7 @@
#include "nsAutoPtr.h"
#include "nsIRunnable.h"
#include "GLContextTypes.h"
#include "AndroidSurfaceTexture.h"
#include "nsSurfaceTexture.h"
#include "AndroidBridge.h"
#include <map>
class PluginEventRunnable;
@ -192,12 +192,12 @@ public:
void* AcquireContentWindow();
EGLImage AsEGLImage();
mozilla::gl::AndroidSurfaceTexture* AsSurfaceTexture();
nsSurfaceTexture* AsSurfaceTexture();
// For ANPVideo
class VideoInfo {
public:
VideoInfo(mozilla::gl::AndroidSurfaceTexture* aSurfaceTexture) :
VideoInfo(nsSurfaceTexture* aSurfaceTexture) :
mSurfaceTexture(aSurfaceTexture)
{
}
@ -207,7 +207,7 @@ public:
mSurfaceTexture = nullptr;
}
nsRefPtr<mozilla::gl::AndroidSurfaceTexture> mSurfaceTexture;
nsRefPtr<nsSurfaceTexture> mSurfaceTexture;
gfxRect mDimensions;
};
@ -334,7 +334,7 @@ protected:
bool mInverted;
nsRefPtr<SharedPluginTexture> mContentTexture;
nsRefPtr<mozilla::gl::AndroidSurfaceTexture> mContentSurface;
nsRefPtr<nsSurfaceTexture> mContentSurface;
#endif
enum {
@ -383,7 +383,7 @@ private:
#ifdef MOZ_WIDGET_ANDROID
void EnsureSharedTexture();
mozilla::gl::AndroidSurfaceTexture* CreateSurfaceTexture();
nsSurfaceTexture* CreateSurfaceTexture();
std::map<void*, VideoInfo*> mVideos;
bool mOnScreen;

View File

@ -185,7 +185,7 @@ AttachToContainerAsSurfaceTexture(ImageContainer* container,
MOZ_ASSERT(out_image);
MOZ_ASSERT(!*out_image);
mozilla::gl::AndroidSurfaceTexture* surfTex = instance->AsSurfaceTexture();
nsSurfaceTexture* surfTex = instance->AsSurfaceTexture();
if (!surfTex) {
return;
}

View File

@ -1,281 +0,0 @@
#ifdef MOZ_WIDGET_ANDROID
#include "AndroidNativeWindow.h"
#include "prlink.h"
// #define ANDROID_NATIVE_WINDOW_DEBUG
#if defined(ANDROID_NATIVE_WINDOW_DEBUG) || defined(DEBUG)
#define ALOG(args...) __android_log_print(ANDROID_LOG_INFO, "AndroidNativeWindow" , ## args)
#else
#define ALOG(args...) ((void)0)
#endif
using namespace mozilla::gfx;
using namespace mozilla::gl;
using namespace mozilla;
class NativeWindowLibrary
{
public:
NativeWindowLibrary()
: fANativeWindow_fromSurface(nullptr)
, fANativeWindow_release(nullptr)
, fANativeWindow_setBuffersGeometry(nullptr)
, fANativeWindow_lock(nullptr)
, fANativeWindow_unlockAndPost(nullptr)
, fANativeWindow_getFormat(nullptr)
, fANativeWindow_getWidth(nullptr)
, fANativeWindow_getHeight(nullptr)
{
PRLibrary* lib = PR_LoadLibrary("libandroid.so");
fANativeWindow_fromSurface = (pfnANativeWindow_fromSurface)PR_FindSymbol(lib, "ANativeWindow_fromSurface");
fANativeWindow_release = (pfnANativeWindow_release)PR_FindSymbol(lib, "ANativeWindow_release");
fANativeWindow_setBuffersGeometry = (pfnANativeWindow_setBuffersGeometry)PR_FindSymbol(lib, "ANativeWindow_setBuffersGeometry");
fANativeWindow_lock = (pfnANativeWindow_lock)PR_FindSymbol(lib, "ANativeWindow_lock");
fANativeWindow_unlockAndPost = (pfnANativeWindow_unlockAndPost)PR_FindSymbol(lib, "ANativeWindow_unlockAndPost");
fANativeWindow_getFormat = (pfnANativeWindow_getFormat)PR_FindSymbol(lib, "ANativeWindow_getFormat");
fANativeWindow_getWidth = (pfnANativeWindow_getWidth)PR_FindSymbol(lib, "ANativeWindow_getWidth");
fANativeWindow_getHeight = (pfnANativeWindow_getHeight)PR_FindSymbol(lib, "ANativeWindow_getHeight");
}
void* ANativeWindow_fromSurface(JNIEnv* aEnv, jobject aSurface) {
ALOG("%s: env=%p, surface=%p\n", __PRETTY_FUNCTION__, aEnv, aSurface);
if (!Initialized()) {
return nullptr;
}
return fANativeWindow_fromSurface(aEnv, aSurface);
}
void ANativeWindow_release(void* aWindow) {
ALOG("%s: window=%p\n", __PRETTY_FUNCTION__, aWindow);
if (!Initialized()) {
return;
}
fANativeWindow_release(aWindow);
}
bool ANativeWindow_setBuffersGeometry(void* aWindow, int32_t aWidth, int32_t aHeight, int32_t aFormat) {
ALOG("%s: window=%p, width=%d, height=%d, format=%d\n", __PRETTY_FUNCTION__, aWindow, aWidth, aHeight, aFormat);
if (!Initialized()) {
return nullptr;
}
return fANativeWindow_setBuffersGeometry(aWindow, aWidth, aHeight, (int32_t)aFormat) == 0;
}
bool ANativeWindow_lock(void* aWindow, void* out_buffer, void*in_out_dirtyBounds) {
ALOG("%s: window=%p, out_buffer=%p, in_out_dirtyBounds=%p\n", __PRETTY_FUNCTION__,
aWindow, out_buffer, in_out_dirtyBounds);
if (!Initialized()) {
return false;
}
return fANativeWindow_lock(aWindow, out_buffer, in_out_dirtyBounds) == 0;
}
bool ANativeWindow_unlockAndPost(void* aWindow) {
ALOG("%s: window=%p\n", __PRETTY_FUNCTION__, aWindow);
if (!Initialized()) {
return false;
}
return fANativeWindow_unlockAndPost(aWindow) == 0;
}
AndroidWindowFormat ANativeWindow_getFormat(void* aWindow) {
ALOG("%s: window=%p\n", __PRETTY_FUNCTION__, aWindow);
if (!Initialized()) {
return AndroidWindowFormat::Unknown;
}
return (AndroidWindowFormat)fANativeWindow_getFormat(aWindow);
}
int32_t ANativeWindow_getWidth(void* aWindow) {
ALOG("%s: window=%p\n", __PRETTY_FUNCTION__, aWindow);
if (!Initialized()) {
return -1;
}
return fANativeWindow_getWidth(aWindow);
}
int32_t ANativeWindow_getHeight(void* aWindow) {
ALOG("%s: window=%p\n", __PRETTY_FUNCTION__, aWindow);
if (!Initialized()) {
return -1;
}
return fANativeWindow_getHeight(aWindow);
}
bool Initialized() {
return fANativeWindow_fromSurface && fANativeWindow_release && fANativeWindow_setBuffersGeometry
&& fANativeWindow_lock && fANativeWindow_unlockAndPost && fANativeWindow_getFormat && fANativeWindow_getWidth
&& fANativeWindow_getHeight;
}
private:
typedef void* (*pfnANativeWindow_fromSurface)(JNIEnv* env, jobject surface);
pfnANativeWindow_fromSurface fANativeWindow_fromSurface;
typedef void (*pfnANativeWindow_release)(void* window);
pfnANativeWindow_release fANativeWindow_release;
typedef int32_t (*pfnANativeWindow_setBuffersGeometry)(void* window, int32_t width, int32_t height, int32_t format);
pfnANativeWindow_setBuffersGeometry fANativeWindow_setBuffersGeometry;
typedef int32_t (*pfnANativeWindow_lock)(void *window, void *out_buffer, void *in_out_dirtyBounds);
pfnANativeWindow_lock fANativeWindow_lock;
typedef int32_t (*pfnANativeWindow_unlockAndPost)(void *window);
pfnANativeWindow_unlockAndPost fANativeWindow_unlockAndPost;
typedef AndroidWindowFormat (*pfnANativeWindow_getFormat)(void* window);
pfnANativeWindow_getFormat fANativeWindow_getFormat;
typedef int32_t (*pfnANativeWindow_getWidth)(void* window);
pfnANativeWindow_getWidth fANativeWindow_getWidth;
typedef int32_t (*pfnANativeWindow_getHeight)(void* window);
pfnANativeWindow_getHeight fANativeWindow_getHeight;
};
static NativeWindowLibrary* sLibrary = nullptr;
static bool
EnsureInit()
{
static bool initialized = false;
if (!initialized) {
if (!sLibrary) {
sLibrary = new NativeWindowLibrary();
}
initialized = sLibrary->Initialized();
}
return initialized;
}
namespace mozilla {
/* static */ AndroidNativeWindow*
AndroidNativeWindow::CreateFromSurface(JNIEnv* aEnv, jobject aSurface)
{
if (!EnsureInit()) {
ALOG("Not initialized");
return nullptr;
}
void* window = sLibrary->ANativeWindow_fromSurface(aEnv, aSurface);
if (!window) {
ALOG("Failed to create window from surface");
return nullptr;
}
return new AndroidNativeWindow(window);
}
AndroidNativeWindow::~AndroidNativeWindow()
{
if (EnsureInit() && mWindow) {
sLibrary->ANativeWindow_release(mWindow);
mWindow = nullptr;
}
}
IntSize
AndroidNativeWindow::Size()
{
MOZ_ASSERT(mWindow);
if (!EnsureInit()) {
return IntSize(0, 0);
}
return IntSize(sLibrary->ANativeWindow_getWidth(mWindow), sLibrary->ANativeWindow_getHeight(mWindow));
}
AndroidWindowFormat
AndroidNativeWindow::Format()
{
MOZ_ASSERT(mWindow);
if (!EnsureInit()) {
return AndroidWindowFormat::Unknown;
}
return sLibrary->ANativeWindow_getFormat(mWindow);
}
bool
AndroidNativeWindow::SetBuffersGeometry(int32_t aWidth, int32_t aHeight, AndroidWindowFormat aFormat)
{
MOZ_ASSERT(mWindow);
if (!EnsureInit())
return false;
return sLibrary->ANativeWindow_setBuffersGeometry(mWindow, aWidth, aHeight, (int32_t)aFormat);
}
bool
AndroidNativeWindow::Lock(void** out_bits,int32_t* out_width, int32_t* out_height,
int32_t* out_stride, AndroidWindowFormat* out_format)
{
/* Copied from native_window.h in Android NDK (platform-9) */
typedef struct ANativeWindow_Buffer {
// The number of pixels that are show horizontally.
int32_t width;
// The number of pixels that are shown vertically.
int32_t height;
// The number of *pixels* that a line in the buffer takes in
// memory. This may be >= width.
int32_t stride;
// The format of the buffer. One of WINDOW_FORMAT_*
int32_t format;
// The actual bits.
void* bits;
// Do not touch.
uint32_t reserved[6];
} ANativeWindow_Buffer;
ANativeWindow_Buffer buffer;
if (!sLibrary->ANativeWindow_lock(mWindow, &buffer, nullptr)) {
ALOG("Failed to lock");
return false;
}
*out_bits = buffer.bits;
*out_width = buffer.width;
*out_height = buffer.height;
*out_stride = buffer.stride;
*out_format = (AndroidWindowFormat)buffer.format;
return true;
}
bool
AndroidNativeWindow::UnlockAndPost()
{
if (!EnsureInit()) {
ALOG("Not initialized");
return false;
}
return sLibrary->ANativeWindow_unlockAndPost(mWindow);
}
}
#endif // MOZ_WIDGET_ANDROID

View File

@ -1,68 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
// vim:set ts=2 sts=2 sw=2 et cin:
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef AndroidNativeWindow_h__
#define AndroidNativeWindow_h__
#ifdef MOZ_WIDGET_ANDROID
#include <jni.h>
#include "GLDefs.h"
#include "nsISupports.h"
#include "mozilla/TypedEnum.h"
#include "mozilla/gfx/2D.h"
namespace mozilla {
namespace gl {
MOZ_BEGIN_ENUM_CLASS(AndroidWindowFormat)
Unknown = -1,
RGBA_8888 = 1,
RGBX_8888 = 1 << 1,
RGB_565 = 1 << 2
MOZ_END_ENUM_CLASS(AndroidWindowFormat)
/**
* This class is a wrapper around Android's SurfaceTexture class.
* Usage is pretty much exactly like the Java class, so see
* the Android documentation for details.
*/
class AndroidNativeWindow {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AndroidNativeWindow)
public:
static AndroidNativeWindow* CreateFromSurface(JNIEnv* aEnv, jobject aSurface);
gfx::IntSize Size();
AndroidWindowFormat Format();
bool SetBuffersGeometry(int32_t aWidth, int32_t aHeight, AndroidWindowFormat aFormat);
bool Lock(void** out_bits, int32_t* out_width, int32_t* out_height, int32_t* out_stride, AndroidWindowFormat* out_format);
bool UnlockAndPost();
void* Handle() { return mWindow; }
protected:
AndroidNativeWindow(void* aWindow)
: mWindow(aWindow)
{
}
virtual ~AndroidNativeWindow();
void* mWindow;
};
}
}
#endif
#endif

View File

@ -1,409 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
// vim:set ts=2 sts=2 sw=2 et cin:
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef MOZ_WIDGET_ANDROID
#include <set>
#include <map>
#include <android/log.h>
#include "AndroidSurfaceTexture.h"
#include "gfxImageSurface.h"
#include "AndroidBridge.h"
#include "nsThreadUtils.h"
#include "mozilla/gfx/Matrix.h"
#include "GeneratedJNIWrappers.h"
#include "GLContext.h"
using namespace mozilla;
using namespace mozilla::widget::android;
// UGH
static std::map<int, AndroidSurfaceTexture*> sInstances;
static int sNextID = 0;
static bool
IsDetachSupported()
{
return AndroidBridge::Bridge()->GetAPIVersion() >= 16; /* Jelly Bean */
}
static bool
IsSTSupported()
{
return AndroidBridge::Bridge()->GetAPIVersion() >= 14; /* ICS */
}
static class JNIFunctions {
public:
JNIFunctions() : mInitialized(false)
{
}
bool EnsureInitialized()
{
if (mInitialized) {
return true;
}
if (!IsSTSupported()) {
return false;
}
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
jSurfaceTextureClass = (jclass)env->NewGlobalRef(env->FindClass("android/graphics/SurfaceTexture"));
jSurfaceTexture_Ctor = env->GetMethodID(jSurfaceTextureClass, "<init>", "(I)V");
jSurfaceTexture_updateTexImage = env->GetMethodID(jSurfaceTextureClass, "updateTexImage", "()V");
jSurfaceTexture_getTransformMatrix = env->GetMethodID(jSurfaceTextureClass, "getTransformMatrix", "([F)V");
jSurfaceTexture_setDefaultBufferSize = env->GetMethodID(jSurfaceTextureClass, "setDefaultBufferSize", "(II)V");
if (IsDetachSupported()) {
jSurfaceTexture_attachToGLContext = env->GetMethodID(jSurfaceTextureClass, "attachToGLContext", "(I)V");
jSurfaceTexture_detachFromGLContext = env->GetMethodID(jSurfaceTextureClass, "detachFromGLContext", "()V");
} else {
jSurfaceTexture_attachToGLContext = jSurfaceTexture_detachFromGLContext = 0;
}
jSurfaceClass = (jclass)env->NewGlobalRef(env->FindClass("android/view/Surface"));
jSurface_Ctor = env->GetMethodID(jSurfaceClass, "<init>", "(Landroid/graphics/SurfaceTexture;)V");
mInitialized = true;
return true;
}
jobject CreateSurfaceTexture(GLuint aTexture)
{
if (!EnsureInitialized())
return nullptr;
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
return env->NewGlobalRef(env->NewObject(jSurfaceTextureClass, jSurfaceTexture_Ctor, (int) aTexture));
}
jobject CreateSurface(jobject aSurfaceTexture)
{
if (!EnsureInitialized())
return nullptr;
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
return env->NewGlobalRef(env->NewObject(jSurfaceClass, jSurface_Ctor, aSurfaceTexture));
}
void ReleaseSurfaceTexture(jobject aSurfaceTexture)
{
JNIEnv* env = GetJNIForThread();
env->DeleteGlobalRef(aSurfaceTexture);
}
void UpdateTexImage(jobject aSurfaceTexture)
{
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
env->CallVoidMethod(aSurfaceTexture, jSurfaceTexture_updateTexImage);
}
bool GetTransformMatrix(jobject aSurfaceTexture, gfx::Matrix4x4& aMatrix)
{
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
jfloatArray jarray = env->NewFloatArray(16);
env->CallVoidMethod(aSurfaceTexture, jSurfaceTexture_getTransformMatrix, jarray);
jfloat* array = env->GetFloatArrayElements(jarray, nullptr);
aMatrix._11 = array[0];
aMatrix._12 = array[1];
aMatrix._13 = array[2];
aMatrix._14 = array[3];
aMatrix._21 = array[4];
aMatrix._22 = array[5];
aMatrix._23 = array[6];
aMatrix._24 = array[7];
aMatrix._31 = array[8];
aMatrix._32 = array[9];
aMatrix._33 = array[10];
aMatrix._34 = array[11];
aMatrix._41 = array[12];
aMatrix._42 = array[13];
aMatrix._43 = array[14];
aMatrix._44 = array[15];
env->ReleaseFloatArrayElements(jarray, array, 0);
return false;
}
void SetDefaultBufferSize(jobject aSurfaceTexture, int32_t width, int32_t height)
{
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
env->CallVoidMethod(aSurfaceTexture, jSurfaceTexture_setDefaultBufferSize, width, height);
}
void AttachToGLContext(jobject aSurfaceTexture, int32_t texName)
{
MOZ_ASSERT(jSurfaceTexture_attachToGLContext);
JNIEnv* env = GetJNIForThread();
env->CallVoidMethod(aSurfaceTexture, jSurfaceTexture_attachToGLContext, texName);
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
}
}
void DetachFromGLContext(jobject aSurfaceTexture)
{
MOZ_ASSERT(jSurfaceTexture_detachFromGLContext);
JNIEnv* env = GetJNIForThread();
env->CallVoidMethod(aSurfaceTexture, jSurfaceTexture_detachFromGLContext);
if (env->ExceptionCheck()) {
env->ExceptionDescribe();
env->ExceptionClear();
}
}
private:
bool mInitialized;
jclass jSurfaceTextureClass;
jmethodID jSurfaceTexture_Ctor;
jmethodID jSurfaceTexture_updateTexImage;
jmethodID jSurfaceTexture_getTransformMatrix;
jmethodID jSurfaceTexture_setDefaultBufferSize;
jmethodID jSurfaceTexture_attachToGLContext;
jmethodID jSurfaceTexture_detachFromGLContext;
jclass jSurfaceClass;
jmethodID jSurface_Ctor;
} sJNIFunctions;
AndroidSurfaceTexture*
AndroidSurfaceTexture::Create()
{
return Create(nullptr, 0);
}
AndroidSurfaceTexture*
AndroidSurfaceTexture::Create(GLContext* aContext, GLuint aTexture)
{
if (!IsSTSupported()) {
return nullptr;
}
AndroidSurfaceTexture* st = new AndroidSurfaceTexture();
if (!st->Init(aContext, aTexture)) {
printf_stderr("Failed to initialize AndroidSurfaceTexture");
delete st;
st = nullptr;
}
return st;
}
AndroidSurfaceTexture*
AndroidSurfaceTexture::Find(int id)
{
std::map<int, AndroidSurfaceTexture*>::iterator it;
it = sInstances.find(id);
if (it == sInstances.end())
return nullptr;
return it->second;
}
bool
AndroidSurfaceTexture::Check()
{
return sJNIFunctions.EnsureInitialized();
}
bool
AndroidSurfaceTexture::Attach(GLContext* aContext, PRIntervalTime aTimeout)
{
MonitorAutoLock lock(mMonitor);
if (mAttachedContext == aContext) {
NS_WARNING("Tried to attach same GLContext to AndroidSurfaceTexture");
return true;
}
if (!IsDetachSupported()) {
return false;
}
while (mAttachedContext) {
// Wait until it's detached (or we time out)
if (NS_FAILED(lock.Wait(aTimeout))) {
return false;
}
}
MOZ_ASSERT(aContext->IsOwningThreadCurrent(), "Trying to attach GLContext from different thread");
mAttachedContext = aContext;
mAttachedContext->MakeCurrent();
aContext->fGenTextures(1, &mTexture);
sJNIFunctions.AttachToGLContext(mSurfaceTexture, mTexture);
return true;
}
bool
AndroidSurfaceTexture::Detach()
{
MonitorAutoLock lock(mMonitor);
if (!IsDetachSupported() ||
!mAttachedContext || !mAttachedContext->IsOwningThreadCurrent()) {
return false;
}
mAttachedContext->MakeCurrent();
// This call takes care of deleting the texture
sJNIFunctions.DetachFromGLContext(mSurfaceTexture);
mTexture = 0;
mAttachedContext = nullptr;
lock.NotifyAll();
return true;
}
bool
AndroidSurfaceTexture::Init(GLContext* aContext, GLuint aTexture)
{
if (!aTexture && !IsDetachSupported()) {
// We have no texture and cannot initialize detached, bail out
return false;
}
if (!sJNIFunctions.EnsureInitialized())
return false;
JNIEnv* env = GetJNIForThread();
mSurfaceTexture = sJNIFunctions.CreateSurfaceTexture(aTexture);
if (!mSurfaceTexture) {
return false;
}
if (!aTexture) {
sJNIFunctions.DetachFromGLContext(mSurfaceTexture);
}
mAttachedContext = aContext;
mSurface = sJNIFunctions.CreateSurface(mSurfaceTexture);
if (!mSurface) {
return false;
}
mNativeWindow = AndroidNativeWindow::CreateFromSurface(env, mSurface);
mID = ++sNextID;
sInstances.insert(std::pair<int, AndroidSurfaceTexture*>(mID, this));
return true;
}
AndroidSurfaceTexture::AndroidSurfaceTexture()
: mTexture(0)
, mSurfaceTexture(nullptr)
, mSurface(nullptr)
, mMonitor("AndroidSurfaceTexture::mContextMonitor")
, mAttachedContext(nullptr)
{
}
AndroidSurfaceTexture::~AndroidSurfaceTexture()
{
sInstances.erase(mID);
mFrameAvailableCallback = nullptr;
JNIEnv* env = GetJNIForThread();
if (mSurfaceTexture) {
GeckoAppShell::UnregisterSurfaceTextureFrameListener(mSurfaceTexture);
env->DeleteGlobalRef(mSurfaceTexture);
mSurfaceTexture = nullptr;
}
if (mSurface) {
env->DeleteGlobalRef(mSurface);
mSurface = nullptr;
}
}
void
AndroidSurfaceTexture::UpdateTexImage()
{
sJNIFunctions.UpdateTexImage(mSurfaceTexture);
}
bool
AndroidSurfaceTexture::GetTransformMatrix(gfx::Matrix4x4& aMatrix)
{
return sJNIFunctions.GetTransformMatrix(mSurfaceTexture, aMatrix);
}
void
AndroidSurfaceTexture::SetFrameAvailableCallback(nsIRunnable* aRunnable)
{
if (aRunnable) {
GeckoAppShell::RegisterSurfaceTextureFrameListener(mSurfaceTexture, mID);
} else {
GeckoAppShell::UnregisterSurfaceTextureFrameListener(mSurfaceTexture);
}
mFrameAvailableCallback = aRunnable;
}
void
AndroidSurfaceTexture::SetDefaultSize(mozilla::gfx::IntSize size)
{
sJNIFunctions.SetDefaultBufferSize(mSurfaceTexture, size.width, size.height);
}
void
AndroidSurfaceTexture::NotifyFrameAvailable()
{
if (mFrameAvailableCallback) {
// Proxy to main thread if we aren't on it
if (!NS_IsMainThread()) {
// Proxy to main thread
nsCOMPtr<nsIRunnable> event = NS_NewRunnableMethod(this, &AndroidSurfaceTexture::NotifyFrameAvailable);
NS_DispatchToCurrentThread(event);
} else {
mFrameAvailableCallback->Run();
}
}
}
#endif // MOZ_WIDGET_ANDROID

View File

@ -1,117 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
// vim:set ts=2 sts=2 sw=2 et cin:
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef AndroidSurfaceTexture_h__
#define AndroidSurfaceTexture_h__
#ifdef MOZ_WIDGET_ANDROID
#include <jni.h>
#include "nsIRunnable.h"
#include "gfxPlatform.h"
#include "GLDefs.h"
#include "mozilla/gfx/2D.h"
#include "mozilla/Monitor.h"
#include "AndroidNativeWindow.h"
class gfxASurface;
namespace mozilla {
namespace gfx {
class Matrix4x4;
}
}
namespace mozilla {
namespace gl {
class GLContext;
/**
* This class is a wrapper around Android's SurfaceTexture class.
* Usage is pretty much exactly like the Java class, so see
* the Android documentation for details.
*/
class AndroidSurfaceTexture {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AndroidSurfaceTexture)
public:
// The SurfaceTexture is created in an attached state. This method requires
// Android Ice Cream Sandwich.
static AndroidSurfaceTexture* Create(GLContext* aGLContext, GLuint aTexture);
// Here the SurfaceTexture will be created in a detached state. You must call
// Attach() with the GLContext you wish to composite with. It must be done
// on the thread where that GLContext is current. This method requires
// Android Jelly Bean.
static AndroidSurfaceTexture* Create();
static AndroidSurfaceTexture* Find(int id);
// Returns with reasonable certainty whether or not we'll
// be able to create and use a SurfaceTexture
static bool Check();
// If we are on Jelly Bean, the SurfaceTexture can be detached and reattached
// to allow consumption from different GLContexts. It is recommended to only
// attach while you are consuming in order to allow this.
//
// Only one GLContext may be attached at any given time. If another is already
// attached, we try to wait for it to become detached.
bool Attach(GLContext* aContext, PRIntervalTime aTiemout = PR_INTERVAL_NO_TIMEOUT);
// This is a noop on ICS, and will always fail
bool Detach();
GLContext* GetAttachedContext() { return mAttachedContext; }
AndroidNativeWindow* NativeWindow() {
return mNativeWindow;
}
// This attaches the updated data to the TEXTURE_EXTERNAL target
void UpdateTexImage();
bool GetTransformMatrix(mozilla::gfx::Matrix4x4& aMatrix);
int ID() { return mID; }
void SetDefaultSize(mozilla::gfx::IntSize size);
// The callback is guaranteed to be called on the main thread even
// if the upstream callback is received on a different thread
void SetFrameAvailableCallback(nsIRunnable* aRunnable);
// Only should be called by AndroidJNI when we get a
// callback from the underlying SurfaceTexture instance
void NotifyFrameAvailable();
GLuint Texture() { return mTexture; }
jobject JavaSurface() { return mSurface; }
private:
AndroidSurfaceTexture();
~AndroidSurfaceTexture();
bool Init(GLContext* aContext, GLuint aTexture);
GLuint mTexture;
jobject mSurfaceTexture;
jobject mSurface;
Monitor mMonitor;
GLContext* mAttachedContext;
RefPtr<AndroidNativeWindow> mNativeWindow;
int mID;
nsRefPtr<nsIRunnable> mFrameAvailableCallback;
};
}
}
#endif
#endif

View File

@ -10,17 +10,12 @@
#include "mozilla/Preferences.h"
#include "ImageContainer.h"
#include "HeapCopyOfStackArray.h"
#include "mozilla/gfx/Matrix.h"
#ifdef MOZ_WIDGET_GONK
#include "GrallocImages.h"
#include "GLLibraryEGL.h"
#endif
#ifdef MOZ_WIDGET_ANDROID
#include "GLImages.h"
#endif
using mozilla::layers::PlanarYCbCrImage;
using mozilla::layers::PlanarYCbCrData;
@ -149,7 +144,6 @@ GLBlitHelper::GLBlitHelper(GLContext* gl)
, mTex2DBlit_Program(0)
, mTex2DRectBlit_Program(0)
, mYFlipLoc(-1)
, mTextureTransformLoc(-1)
, mTexExternalBlit_FragShader(0)
, mTexYUVPlanarBlit_FragShader(0)
, mTexExternalBlit_Program(0)
@ -244,23 +238,21 @@ GLBlitHelper::InitTexQuadProgram(BlitType target)
vTexCoord * uTexCoordMult); \n\
} \n\
";
#ifdef ANDROID /* MOZ_WIDGET_ANDROID || MOZ_WIDGET_GONK */
#ifdef MOZ_WIDGET_GONK
const char kTexExternalBlit_FragShaderSource[] = "\
#extension GL_OES_EGL_image_external : require \n\
#ifdef GL_FRAGMENT_PRECISION_HIGH \n\
precision highp float; \n\
#else \n\
precision mediump float; \n\
#endif \n\
varying vec2 vTexCoord; \n\
uniform mat4 uTextureTransform; \n\
uniform samplerExternalOES uTexUnit; \n\
\n\
void main() \n\
{ \n\
gl_FragColor = texture2D(uTexUnit, \n\
(uTextureTransform * vec4(vTexCoord, 0.0, 1.0)).xy); \n\
} \n\
#extension GL_OES_EGL_image_external : require \n\
#ifdef GL_FRAGMENT_PRECISION_HIGH \n\
precision highp float; \n\
#else \n\
precision mediump float; \n\
#endif \n\
varying vec2 vTexCoord; \n\
uniform samplerExternalOES uTexUnit; \n\
\n\
void main() \n\
{ \n\
gl_FragColor = texture2D(uTexUnit, vTexCoord); \n\
} \n\
";
#endif
/* From Rec601:
@ -314,8 +306,7 @@ GLBlitHelper::InitTexQuadProgram(BlitType target)
fragShaderPtr = &mTex2DRectBlit_FragShader;
fragShaderSource = kTex2DRectBlit_FragShaderSource;
break;
#ifdef ANDROID
case ConvertSurfaceTexture:
#ifdef MOZ_WIDGET_GONK
case ConvertGralloc:
programPtr = &mTexExternalBlit_Program;
fragShaderPtr = &mTexExternalBlit_FragShader;
@ -450,9 +441,8 @@ GLBlitHelper::InitTexQuadProgram(BlitType target)
switch (target) {
case BlitTex2D:
case BlitTexRect:
case ConvertSurfaceTexture:
case ConvertGralloc: {
#ifdef ANDROID
#ifdef MOZ_WIDGET_GONK
GLint texUnitLoc = mGL->fGetUniformLocation(program, "uTexUnit");
MOZ_ASSERT(texUnitLoc != -1, "uniform uTexUnit not found");
mGL->fUniform1i(texUnitLoc, 0);
@ -482,12 +472,6 @@ GLBlitHelper::InitTexQuadProgram(BlitType target)
MOZ_ASSERT(mGL->fGetAttribLocation(program, "aPosition") == 0);
mYFlipLoc = mGL->fGetUniformLocation(program, "uYflip");
MOZ_ASSERT(mYFlipLoc != -1, "uniform: uYflip not found");
mTextureTransformLoc = mGL->fGetUniformLocation(program, "uTextureTransform");
if (mTextureTransformLoc >= 0) {
// Set identity matrix as default
gfx::Matrix4x4 identity;
mGL->fUniformMatrix4fv(mTextureTransformLoc, 1, false, &identity._11);
}
success = true;
} while (false);
@ -716,42 +700,6 @@ GLBlitHelper::BlitGrallocImage(layers::GrallocImage* grallocImage, bool yFlip)
}
#endif
#ifdef MOZ_WIDGET_ANDROID
bool
GLBlitHelper::BlitSurfaceTextureImage(layers::SurfaceTextureImage* stImage)
{
AndroidSurfaceTexture* surfaceTexture = stImage->GetData()->mSurfTex;
bool yFlip = stImage->GetData()->mInverted;
ScopedBindTextureUnit boundTU(mGL, LOCAL_GL_TEXTURE0);
mGL->fClear(LOCAL_GL_COLOR_BUFFER_BIT);
if (!surfaceTexture->Attach(mGL)) {
return false;
}
// UpdateTexImage() changes the EXTERNAL binding, so save it here
// so we can restore it after.
int oldBinding = 0;
mGL->fGetIntegerv(LOCAL_GL_TEXTURE_BINDING_EXTERNAL, &oldBinding);
surfaceTexture->UpdateTexImage();
Matrix4x4 transform;
surfaceTexture->GetTransformMatrix(transform);
mGL->fUniformMatrix4fv(mTextureTransformLoc, 1, false, &transform._11);
mGL->fUniform1f(mYFlipLoc, yFlip ? 1.0f : 0.0f);
mGL->fDrawArrays(LOCAL_GL_TRIANGLE_STRIP, 0, 4);
surfaceTexture->Detach();
mGL->fBindTexture(LOCAL_GL_TEXTURE_EXTERNAL, oldBinding);
return true;
}
#endif
bool
GLBlitHelper::BlitPlanarYCbCrImage(layers::PlanarYCbCrImage* yuvImage, bool yFlip)
{
@ -790,14 +738,15 @@ GLBlitHelper::BlitPlanarYCbCrImage(layers::PlanarYCbCrImage* yuvImage, bool yFli
}
bool
GLBlitHelper::BlitImageToFramebuffer(layers::Image* srcImage,
const gfx::IntSize& destSize,
GLuint destFB,
bool yFlip,
GLuint xoffset,
GLuint yoffset,
GLuint cropWidth,
GLuint cropHeight)
GLBlitHelper::BlitImageToTexture(layers::Image* srcImage,
const gfx::IntSize& destSize,
GLuint destTex,
GLenum destTarget,
bool yFlip,
GLuint xoffset,
GLuint yoffset,
GLuint cropWidth,
GLuint cropHeight)
{
ScopedGLDrawState autoStates(mGL);
@ -810,11 +759,6 @@ GLBlitHelper::BlitImageToFramebuffer(layers::Image* srcImage,
#ifdef MOZ_WIDGET_GONK
type = ConvertGralloc;
break;
#endif
#ifdef MOZ_WIDGET_ANDROID
case ImageFormat::SURFACE_TEXTURE:
type = ConvertSurfaceTexture;
break;
#endif
default:
return false;
@ -825,7 +769,12 @@ GLBlitHelper::BlitImageToFramebuffer(layers::Image* srcImage,
return false;
}
ScopedBindFramebuffer boundFB(mGL, destFB);
if (!mFBO) {
mGL->fGenFramebuffers(1, &mFBO);
}
ScopedBindFramebuffer boundFB(mGL, mFBO);
mGL->fFramebufferTexture2D(LOCAL_GL_FRAMEBUFFER, LOCAL_GL_COLOR_ATTACHMENT0, destTarget, destTex, 0);
mGL->fColorMask(LOCAL_GL_TRUE, LOCAL_GL_TRUE, LOCAL_GL_TRUE, LOCAL_GL_TRUE);
mGL->fViewport(0, 0, destSize.width, destSize.height);
if (xoffset != 0 && yoffset != 0 && cropWidth != 0 && cropHeight != 0) {
@ -844,39 +793,10 @@ GLBlitHelper::BlitImageToFramebuffer(layers::Image* srcImage,
PlanarYCbCrImage* yuvImage = static_cast<PlanarYCbCrImage*>(srcImage);
return BlitPlanarYCbCrImage(yuvImage, yFlip);
}
#ifdef MOZ_WIDGET_ANDROID
if (type == ConvertSurfaceTexture) {
layers::SurfaceTextureImage* stImage = static_cast<layers::SurfaceTextureImage*>(srcImage);
return BlitSurfaceTextureImage(stImage);
}
#endif
return false;
}
bool
GLBlitHelper::BlitImageToTexture(layers::Image* srcImage,
const gfx::IntSize& destSize,
GLuint destTex,
GLenum destTarget,
bool yFlip,
GLuint xoffset,
GLuint yoffset,
GLuint cropWidth,
GLuint cropHeight)
{
ScopedGLDrawState autoStates(mGL);
if (!mFBO) {
mGL->fGenFramebuffers(1, &mFBO);
}
ScopedBindFramebuffer boundFB(mGL, mFBO);
mGL->fFramebufferTexture2D(LOCAL_GL_FRAMEBUFFER, LOCAL_GL_COLOR_ATTACHMENT0, destTarget, destTex, 0);
return BlitImageToFramebuffer(srcImage, destSize, mFBO, yFlip, xoffset, yoffset,
cropWidth, cropHeight);
}
void
GLBlitHelper::BlitTextureToFramebuffer(GLuint srcTex, GLuint destFB,
const gfx::IntSize& srcSize,

View File

@ -19,7 +19,6 @@ namespace layers {
class Image;
class PlanarYCbCrImage;
class GrallocImage;
class SurfaceTextureImage;
}
namespace gl {
@ -98,7 +97,6 @@ class GLBlitHelper MOZ_FINAL
BlitTexRect,
ConvertGralloc,
ConvertPlanarYCbCr,
ConvertSurfaceTexture
};
// The GLContext is the sole owner of the GLBlitHelper.
GLContext* mGL;
@ -112,8 +110,6 @@ class GLBlitHelper MOZ_FINAL
GLint mYFlipLoc;
GLint mTextureTransformLoc;
// Data for image blit path
GLuint mTexExternalBlit_FragShader;
GLuint mTexYUVPlanarBlit_FragShader;
@ -146,9 +142,6 @@ class GLBlitHelper MOZ_FINAL
bool BlitGrallocImage(layers::GrallocImage* grallocImage, bool yFlip = false);
#endif
bool BlitPlanarYCbCrImage(layers::PlanarYCbCrImage* yuvImage, bool yFlip = false);
#ifdef MOZ_WIDGET_ANDROID
bool BlitSurfaceTextureImage(layers::SurfaceTextureImage* stImage);
#endif
public:
@ -182,9 +175,6 @@ public:
const gfx::IntSize& destSize,
GLenum srcTarget = LOCAL_GL_TEXTURE_2D,
GLenum destTarget = LOCAL_GL_TEXTURE_2D);
bool BlitImageToFramebuffer(layers::Image* srcImage, const gfx::IntSize& destSize,
GLuint destFB, bool yFlip = false, GLuint xoffset = 0,
GLuint yoffset = 0, GLuint width = 0, GLuint height = 0);
bool BlitImageToTexture(layers::Image* srcImage, const gfx::IntSize& destSize,
GLuint destTex, GLenum destTarget, bool yFlip = false, GLuint xoffset = 0,
GLuint yoffset = 0, GLuint width = 0, GLuint height = 0);

View File

@ -25,6 +25,7 @@
/* from widget */
#if defined(MOZ_WIDGET_ANDROID)
#include "AndroidBridge.h"
#include "nsSurfaceTexture.h"
#endif
#include <android/log.h>

View File

@ -26,8 +26,6 @@ if CONFIG['MOZ_GL_PROVIDER']:
gl_provider = CONFIG['MOZ_GL_PROVIDER']
EXPORTS += [
'AndroidNativeWindow.h',
'AndroidSurfaceTexture.h',
'DecomposeIntoNoRepeatTriangles.h',
'EGLUtils.h',
'ForceDiscreteGPUHelperCGL.h',
@ -116,8 +114,6 @@ else:
]
UNIFIED_SOURCES += [
'AndroidNativeWindow.cpp',
'AndroidSurfaceTexture.cpp',
'DecomposeIntoNoRepeatTriangles.cpp',
'EGLUtils.cpp',
'GfxTexturesReporter.cpp',

View File

@ -1,60 +0,0 @@
#ifdef MOZ_WIDGET_ANDROID
#include "GLImages.h"
#include "GLContext.h"
#include "GLContextProvider.h"
#include "ScopedGLHelpers.h"
#include "GLImages.h"
#include "GLBlitHelper.h"
#include "GLReadTexImageHelper.h"
#include "AndroidSurfaceTexture.h"
using namespace mozilla;
using namespace mozilla::gl;
static Mutex sSnapshotMutex("SurfaceTextureImage::sSnapshotMutex");
static nsRefPtr<GLContext> sSnapshotContext;
TemporaryRef<gfx::SourceSurface>
SurfaceTextureImage::GetAsSourceSurface()
{
MutexAutoLock lock(sSnapshotMutex);
MOZ_ASSERT(NS_IsMainThread(), "Should be on the main thread");
if (!sSnapshotContext) {
SurfaceCaps caps = SurfaceCaps::ForRGBA();
sSnapshotContext = GLContextProvider::CreateOffscreen(gfxIntSize(16, 16), caps);
if (!sSnapshotContext) {
return nullptr;
}
}
sSnapshotContext->MakeCurrent();
ScopedTexture scopedTex(sSnapshotContext);
ScopedBindTexture boundTex(sSnapshotContext, scopedTex.Texture());
sSnapshotContext->fTexImage2D(LOCAL_GL_TEXTURE_2D, 0, LOCAL_GL_RGBA,
mData.mSize.width, mData.mSize.height, 0,
LOCAL_GL_RGBA,
LOCAL_GL_UNSIGNED_BYTE,
nullptr);
ScopedFramebufferForTexture fb(sSnapshotContext, scopedTex.Texture());
GLBlitHelper helper(sSnapshotContext);
helper.BlitImageToFramebuffer(this, mData.mSize, fb.FB(), false);
ScopedBindFramebuffer bind(sSnapshotContext, fb.FB());
RefPtr<DataSourceSurface> source =
Factory::CreateDataSourceSurface(mData.mSize, gfx::SurfaceFormat::B8G8R8A8);
if (NS_WARN_IF(!source)) {
return nullptr;
}
ReadPixelsIntoDataSurface(sSnapshotContext, source);
return source.forget();
}
#endif

View File

@ -12,10 +12,9 @@
#include "nsCOMPtr.h" // for already_AddRefed
#include "mozilla/gfx/Point.h" // for IntSize
class nsSurfaceTexture;
namespace mozilla {
namespace gl {
class AndroidSurfaceTexture;
}
namespace layers {
class EGLImageImage : public Image {
@ -47,7 +46,7 @@ private:
class SurfaceTextureImage : public Image {
public:
struct Data {
mozilla::gl::AndroidSurfaceTexture* mSurfTex;
nsSurfaceTexture* mSurfTex;
gfx::IntSize mSize;
bool mInverted;
};
@ -57,7 +56,10 @@ public:
gfx::IntSize GetSize() { return mData.mSize; }
virtual TemporaryRef<gfx::SourceSurface> GetAsSourceSurface() MOZ_OVERRIDE;
virtual TemporaryRef<gfx::SourceSurface> GetAsSourceSurface() MOZ_OVERRIDE
{
return nullptr;
}
SurfaceTextureImage() : Image(nullptr, ImageFormat::SURFACE_TEXTURE) {}

View File

@ -288,7 +288,6 @@ UNIFIED_SOURCES += [
'Compositor.cpp',
'CopyableCanvasLayer.cpp',
'Effects.cpp',
'GLImages.cpp',
'ImageDataSerializer.cpp',
'ImageLayers.cpp',
'ipc/AsyncTransactionTracker.cpp',

View File

@ -78,7 +78,7 @@ EGLImageTextureClient::Unlock()
#ifdef MOZ_WIDGET_ANDROID
SurfaceTextureClient::SurfaceTextureClient(TextureFlags aFlags,
AndroidSurfaceTexture* aSurfTex,
nsSurfaceTexture* aSurfTex,
gfx::IntSize aSize,
bool aInverted)
: TextureClient(aFlags)

View File

@ -13,7 +13,7 @@
#include "mozilla/layers/CompositorTypes.h"
#include "mozilla/layers/LayersSurfaces.h" // for SurfaceDescriptor
#include "mozilla/layers/TextureClient.h" // for TextureClient, etc
#include "AndroidSurfaceTexture.h"
#include "nsSurfaceTexture.h"
namespace mozilla {
namespace layers {
@ -74,7 +74,7 @@ class SurfaceTextureClient : public TextureClient
{
public:
SurfaceTextureClient(TextureFlags aFlags,
AndroidSurfaceTexture* aSurfTex,
nsSurfaceTexture* aSurfTex,
gfx::IntSize aSize,
bool aInverted);
@ -113,7 +113,7 @@ public:
}
protected:
const RefPtr<AndroidSurfaceTexture> mSurfTex;
const nsRefPtr<nsSurfaceTexture> mSurfTex;
const gfx::IntSize mSize;
bool mIsLocked;
};

View File

@ -23,7 +23,7 @@
#include "mozilla/layers/GrallocTextureHost.h"
#include "nsPoint.h" // for nsIntPoint
#include "nsRegion.h" // for nsIntRegion
#include "AndroidSurfaceTexture.h"
#include "nsSurfaceTexture.h"
#include "GfxTexturesReporter.h" // for GfxTexturesReporter
#include "GLBlitTextureImageHelper.h"
#ifdef XP_MACOSX
@ -58,7 +58,7 @@ CreateTextureHostOGL(const SurfaceDescriptor& aDesc,
case SurfaceDescriptor::TSurfaceTextureDescriptor: {
const SurfaceTextureDescriptor& desc = aDesc.get_SurfaceTextureDescriptor();
result = new SurfaceTextureHost(aFlags,
(AndroidSurfaceTexture*)desc.surfTex(),
(nsSurfaceTexture*)desc.surfTex(),
desc.size());
break;
}
@ -429,7 +429,7 @@ GLTextureSource::gl() const
#ifdef MOZ_WIDGET_ANDROID
SurfaceTextureSource::SurfaceTextureSource(CompositorOGL* aCompositor,
AndroidSurfaceTexture* aSurfTex,
nsSurfaceTexture* aSurfTex,
gfx::SurfaceFormat aFormat,
GLenum aTarget,
GLenum aWrapMode,
@ -450,8 +450,10 @@ SurfaceTextureSource::BindTexture(GLenum aTextureUnit, gfx::Filter aFilter)
NS_WARNING("Trying to bind a texture without a GLContext");
return;
}
GLuint tex = mCompositor->GetTemporaryTexture(GetTextureTarget(), aTextureUnit);
gl()->fActiveTexture(aTextureUnit);
gl()->fBindTexture(mTextureTarget, tex);
#ifndef DEBUG
// SurfaceTexture spams us if there are any existing GL errors, so
// we'll clear them here in order to avoid that.
@ -496,7 +498,7 @@ SurfaceTextureSource::GetTextureTransform()
////////////////////////////////////////////////////////////////////////
SurfaceTextureHost::SurfaceTextureHost(TextureFlags aFlags,
AndroidSurfaceTexture* aSurfTex,
nsSurfaceTexture* aSurfTex,
gfx::IntSize aSize)
: TextureHost(aFlags)
, mSurfTex(aSurfTex)
@ -534,15 +536,12 @@ SurfaceTextureHost::Lock()
mSize);
}
mSurfTex->Attach(gl());
return true;
}
void
SurfaceTextureHost::Unlock()
{
mSurfTex->Detach();
}
void

View File

@ -39,6 +39,7 @@
class gfxReusableSurfaceWrapper;
class nsIntRegion;
class nsSurfaceTexture;
struct nsIntPoint;
struct nsIntRect;
struct nsIntSize;
@ -48,10 +49,6 @@ namespace gfx {
class DataSourceSurface;
}
namespace gl {
class AndroidSurfaceTexture;
}
namespace layers {
class Compositor;
@ -342,7 +339,7 @@ class SurfaceTextureSource : public TextureSource
{
public:
SurfaceTextureSource(CompositorOGL* aCompositor,
mozilla::gl::AndroidSurfaceTexture* aSurfTex,
nsSurfaceTexture* aSurfTex,
gfx::SurfaceFormat aFormat,
GLenum aTarget,
GLenum aWrapMode,
@ -373,7 +370,7 @@ public:
protected:
RefPtr<CompositorOGL> mCompositor;
mozilla::gl::AndroidSurfaceTexture* const mSurfTex;
nsSurfaceTexture* const mSurfTex;
const gfx::SurfaceFormat mFormat;
const GLenum mTextureTarget;
const GLenum mWrapMode;
@ -384,7 +381,7 @@ class SurfaceTextureHost : public TextureHost
{
public:
SurfaceTextureHost(TextureFlags aFlags,
mozilla::gl::AndroidSurfaceTexture* aSurfTex,
nsSurfaceTexture* aSurfTex,
gfx::IntSize aSize);
virtual ~SurfaceTextureHost();
@ -417,7 +414,7 @@ public:
virtual const char* Name() { return "SurfaceTextureHost"; }
protected:
mozilla::gl::AndroidSurfaceTexture* const mSurfTex;
nsSurfaceTexture* const mSurfTex;
const gfx::IntSize mSize;
RefPtr<CompositorOGL> mCompositor;
RefPtr<SurfaceTextureSource> mTextureSource;

View File

@ -102,11 +102,6 @@ void TexturePoolOGL::Fill(GLContext* aContext)
sMonitor->NotifyAll();
}
GLContext* TexturePoolOGL::GetGLContext()
{
return sActiveContext;
}
void TexturePoolOGL::Init()
{
sMonitor = new Monitor("TexturePoolOGL.sMonitor");

View File

@ -12,7 +12,7 @@ namespace gl {
// A texture pool for for the on-screen GLContext. The main purpose of this class
// is to provide the ability to easily allocate an on-screen texture from the
// content thread. The unfortunate nature of the SurfaceTexture API (see AndroidSurfaceTexture)
// content thread. The unfortunate nature of the SurfaceTexture API (see nsSurfaceTexture)
// necessitates this.
class TexturePoolOGL
{
@ -25,8 +25,6 @@ public:
// the pool
static void Fill(GLContext* aContext);
static GLContext* GetGLContext();
// Initializes the pool, but does not fill it. Called by gfxPlatform init.
static void Init();

View File

@ -49,6 +49,7 @@ EXPORTS += [
'gfxUserFontSet.h',
'gfxUtils.h',
'GraphicsFilter.h',
'nsSurfaceTexture.h',
'RoundedRect.h',
]
@ -241,6 +242,7 @@ UNIFIED_SOURCES += [
'gfxTextRun.cpp',
'gfxUserFontSet.cpp',
'gfxUtils.cpp',
'nsSurfaceTexture.cpp',
'nsUnicodeRange.cpp',
]

View File

@ -0,0 +1,247 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
// vim:set ts=2 sts=2 sw=2 et cin:
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifdef MOZ_WIDGET_ANDROID
#include <set>
#include <map>
#include <android/log.h>
#include "nsSurfaceTexture.h"
#include "AndroidBridge.h"
#include "nsThreadUtils.h"
#include "mozilla/gfx/Matrix.h"
using namespace mozilla;
// UGH
static std::map<int, nsSurfaceTexture*> sInstances;
static int sNextID = 0;
static class JNIFunctions {
public:
JNIFunctions() : mInitialized(false)
{
}
bool EnsureInitialized()
{
if (mInitialized)
return true;
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
jSurfaceTextureClass = (jclass)env->NewGlobalRef(env->FindClass("android/graphics/SurfaceTexture"));
jSurfaceTexture_Ctor = env->GetMethodID(jSurfaceTextureClass, "<init>", "(I)V");
jSurfaceTexture_updateTexImage = env->GetMethodID(jSurfaceTextureClass, "updateTexImage", "()V");
jSurfaceTexture_getTransformMatrix = env->GetMethodID(jSurfaceTextureClass, "getTransformMatrix", "([F)V");
mInitialized = true;
return true;
}
jobject CreateSurfaceTexture(GLuint aTexture)
{
if (!EnsureInitialized())
return nullptr;
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
return env->NewGlobalRef(env->NewObject(jSurfaceTextureClass, jSurfaceTexture_Ctor, (int) aTexture));
}
void ReleaseSurfaceTexture(jobject aSurfaceTexture)
{
JNIEnv* env = GetJNIForThread();
env->DeleteGlobalRef(aSurfaceTexture);
}
void UpdateTexImage(jobject aSurfaceTexture)
{
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
env->CallVoidMethod(aSurfaceTexture, jSurfaceTexture_updateTexImage);
}
bool GetTransformMatrix(jobject aSurfaceTexture, gfx::Matrix4x4& aMatrix)
{
JNIEnv* env = GetJNIForThread();
AutoLocalJNIFrame jniFrame(env);
jfloatArray jarray = env->NewFloatArray(16);
env->CallVoidMethod(aSurfaceTexture, jSurfaceTexture_getTransformMatrix, jarray);
jfloat* array = env->GetFloatArrayElements(jarray, nullptr);
aMatrix._11 = array[0];
aMatrix._12 = array[1];
aMatrix._13 = array[2];
aMatrix._14 = array[3];
aMatrix._21 = array[4];
aMatrix._22 = array[5];
aMatrix._23 = array[6];
aMatrix._24 = array[7];
aMatrix._31 = array[8];
aMatrix._32 = array[9];
aMatrix._33 = array[10];
aMatrix._34 = array[11];
aMatrix._41 = array[12];
aMatrix._42 = array[13];
aMatrix._43 = array[14];
aMatrix._44 = array[15];
env->ReleaseFloatArrayElements(jarray, array, 0);
return false;
}
private:
bool mInitialized;
jclass jSurfaceTextureClass;
jmethodID jSurfaceTexture_Ctor;
jmethodID jSurfaceTexture_updateTexImage;
jmethodID jSurfaceTexture_getTransformMatrix;
} sJNIFunctions;
nsSurfaceTexture*
nsSurfaceTexture::Create(GLuint aTexture)
{
// Right now we only support creating this on the main thread because
// of the JNIEnv assumptions in JNIHelper and elsewhere
if (!NS_IsMainThread())
return nullptr;
nsSurfaceTexture* st = new nsSurfaceTexture();
if (!st->Init(aTexture)) {
printf_stderr("Failed to initialize nsSurfaceTexture");
delete st;
st = nullptr;
}
return st;
}
nsSurfaceTexture*
nsSurfaceTexture::Find(int id)
{
std::map<int, nsSurfaceTexture*>::iterator it;
it = sInstances.find(id);
if (it == sInstances.end())
return nullptr;
return it->second;
}
bool
nsSurfaceTexture::Check()
{
return sJNIFunctions.EnsureInitialized();
}
bool
nsSurfaceTexture::Init(GLuint aTexture)
{
if (!sJNIFunctions.EnsureInitialized())
return false;
JNIEnv* env = GetJNIForThread();
mSurfaceTexture = sJNIFunctions.CreateSurfaceTexture(aTexture);
if (!mSurfaceTexture)
return false;
mNativeWindow = AndroidBridge::Bridge()->AcquireNativeWindowFromSurfaceTexture(env, mSurfaceTexture);
mID = ++sNextID;
sInstances.insert(std::pair<int, nsSurfaceTexture*>(mID, this));
return true;
}
nsSurfaceTexture::nsSurfaceTexture()
: mSurfaceTexture(nullptr), mNativeWindow(nullptr)
{
}
nsSurfaceTexture::~nsSurfaceTexture()
{
sInstances.erase(mID);
mFrameAvailableCallback = nullptr;
if (mNativeWindow) {
AndroidBridge::Bridge()->ReleaseNativeWindowForSurfaceTexture(mSurfaceTexture);
mNativeWindow = nullptr;
}
JNIEnv* env = GetJNIForThread();
if (mSurfaceTexture) {
mozilla::widget::android::GeckoAppShell::UnregisterSurfaceTextureFrameListener(mSurfaceTexture);
env->DeleteGlobalRef(mSurfaceTexture);
mSurfaceTexture = nullptr;
}
}
void*
nsSurfaceTexture::GetNativeWindow()
{
return mNativeWindow;
}
void
nsSurfaceTexture::UpdateTexImage()
{
sJNIFunctions.UpdateTexImage(mSurfaceTexture);
}
bool
nsSurfaceTexture::GetTransformMatrix(gfx::Matrix4x4& aMatrix)
{
return sJNIFunctions.GetTransformMatrix(mSurfaceTexture, aMatrix);
}
void
nsSurfaceTexture::SetFrameAvailableCallback(nsIRunnable* aRunnable)
{
if (aRunnable)
mozilla::widget::android::GeckoAppShell::RegisterSurfaceTextureFrameListener(mSurfaceTexture, mID);
else
mozilla::widget::android::GeckoAppShell::UnregisterSurfaceTextureFrameListener(mSurfaceTexture);
mFrameAvailableCallback = aRunnable;
}
void
nsSurfaceTexture::NotifyFrameAvailable()
{
if (mFrameAvailableCallback) {
// Proxy to main thread if we aren't on it
if (!NS_IsMainThread()) {
// Proxy to main thread
nsCOMPtr<nsIRunnable> event = NS_NewRunnableMethod(this, &nsSurfaceTexture::NotifyFrameAvailable);
NS_DispatchToCurrentThread(event);
} else {
mFrameAvailableCallback->Run();
}
}
}
#endif // MOZ_WIDGET_ANDROID

View File

@ -0,0 +1,70 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
// vim:set ts=2 sts=2 sw=2 et cin:
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef nsSurfaceTexture_h__
#define nsSurfaceTexture_h__
#ifdef MOZ_WIDGET_ANDROID
#include <jni.h>
#include "nsIRunnable.h"
#include "gfxPlatform.h"
#include "GLDefs.h"
namespace mozilla {
namespace gfx {
class Matrix4x4;
}
}
/**
* This class is a wrapper around Android's SurfaceTexture class.
* Usage is pretty much exactly like the Java class, so see
* the Android documentation for details.
*/
class nsSurfaceTexture MOZ_FINAL {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(nsSurfaceTexture)
public:
static nsSurfaceTexture* Create(GLuint aTexture);
static nsSurfaceTexture* Find(int id);
// Returns with reasonable certainty whether or not we'll
// be able to create and use a SurfaceTexture
static bool Check();
// This is an ANativeWindow. Use AndroidBridge::LockWindow and
// friends for manipulating it.
void* GetNativeWindow();
// This attaches the updated data to the TEXTURE_EXTERNAL target
void UpdateTexImage();
bool GetTransformMatrix(mozilla::gfx::Matrix4x4& aMatrix);
int ID() { return mID; }
// The callback is guaranteed to be called on the main thread even
// if the upstream callback is received on a different thread
void SetFrameAvailableCallback(nsIRunnable* aRunnable);
// Only should be called by AndroidJNI when we get a
// callback from the underlying SurfaceTexture instance
void NotifyFrameAvailable();
private:
nsSurfaceTexture();
// Private destructor, to discourage deletion outside of Release():
~nsSurfaceTexture();
bool Init(GLuint aTexture);
jobject mSurfaceTexture;
void* mNativeWindow;
int mID;
nsRefPtr<nsIRunnable> mFrameAvailableCallback;
};
#endif
#endif

View File

@ -560,12 +560,6 @@ pref("media.preload.auto", 2); // preload metadata if preload=auto
// of at least 4.
pref("media.video-queue.default-size", 3);
// Enable the MediaCodec PlatformDecoderModule by default.
pref("media.fragmented-mp4.exposed", true);
pref("media.fragmented-mp4.enabled", true);
pref("media.fragmented-mp4.android-media-codec.enabled", true);
pref("media.fragmented-mp4.android-media-codec.preferred", true);
// optimize images memory usage
pref("image.mem.decodeondraw", true);
pref("image.mem.min_discard_timeout_ms", 10000);

View File

@ -2547,7 +2547,7 @@ public class GeckoAppShell
getGeckoInterface().notifyWakeLockChanged(topic, state);
}
@WrapElementForJNI(allowMultithread = true)
@WrapElementForJNI
public static void registerSurfaceTextureFrameListener(Object surfaceTexture, final int id) {
((SurfaceTexture)surfaceTexture).setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override

View File

@ -41,7 +41,6 @@
#include "nsContentUtils.h"
#include "nsIScriptError.h"
#include "nsIHttpChannel.h"
#include "GeneratedSDKWrappers.h"
using namespace mozilla;
using namespace mozilla::widget::android;
@ -221,11 +220,6 @@ AndroidBridge::Init(JNIEnv *jEnv)
InitAndroidJavaWrappers(jEnv);
if (mAPIVersion >= 16 /* Jelly Bean */) {
// We only use this for MediaCodec right now
InitSDKStubs(jEnv);
}
// jEnv should NOT be cached here by anything -- the jEnv here
// is not valid for the real gecko main thread, which is set
// at SetMainThread time.

View File

@ -40,7 +40,7 @@
#include "mozilla/layers/APZCTreeManager.h"
#include "nsIMobileMessageDatabaseService.h"
#include "nsPluginInstanceOwner.h"
#include "AndroidSurfaceTexture.h"
#include "nsSurfaceTexture.h"
#include "GeckoProfiler.h"
#include "nsMemoryPressure.h"
@ -862,9 +862,9 @@ Java_org_mozilla_gecko_GeckoAppShell_getNextMessageFromQueue(JNIEnv* jenv, jclas
NS_EXPORT void JNICALL
Java_org_mozilla_gecko_GeckoAppShell_onSurfaceTextureFrameAvailable(JNIEnv* jenv, jclass, jobject surfaceTexture, jint id)
{
mozilla::gl::AndroidSurfaceTexture* st = mozilla::gl::AndroidSurfaceTexture::Find(id);
nsSurfaceTexture* st = nsSurfaceTexture::Find(id);
if (!st) {
__android_log_print(ANDROID_LOG_ERROR, "GeckoJNI", "Failed to find AndroidSurfaceTexture with id %d", id);
__android_log_print(ANDROID_LOG_ERROR, "GeckoJNI", "Failed to find nsSurfaceTexture with id %d", id);
return;
}

View File

@ -11,7 +11,6 @@
#include "mozilla/BasicEvents.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/TouchEvents.h"
#include "GeneratedSDKWrappers.h"
using namespace mozilla;
using namespace mozilla::dom;

View File

@ -1088,7 +1088,7 @@ bool GeckoAppShell::PumpMessageLoop() {
}
void GeckoAppShell::RegisterSurfaceTextureFrameListener(jobject a0, int32_t a1) {
JNIEnv *env = GetJNIForThread();
JNIEnv *env = AndroidBridge::GetJNIEnv();
if (env->PushLocalFrame(1) != 0) {
AndroidBridge::HandleUncaughtException(env);
MOZ_CRASH("Exception should have caused crash.");

File diff suppressed because it is too large Load Diff

View File

@ -1,332 +0,0 @@
// GENERATED CODE
// NOTE: This code has been doctored. The JarClassProcessor is still a work in progress,
// and so additions and deletions have been made to make this file valid.
// Generated by the Java program at /build/jarClassProcessors at compile time from
// a given set of jars and a set of requested methods. To update, change the annotations
// on the corresponding Java methods and rerun the build. Manually updating this file
// will cause your build to fail.
#ifndef GeneratedSDKWrappers_h__
#define GeneratedSDKWrappers_h__
#include "nsXPCOMStrings.h"
#include "AndroidJavaWrappers.h"
namespace mozilla {
namespace widget {
namespace android {
void InitSDKStubs(JNIEnv *jEnv);
class MediaCodec : public AutoGlobalWrappedJavaObject {
public:
static void InitStubs(JNIEnv *jEnv);
static MediaCodec* Wrap(jobject obj);
MediaCodec(jobject obj, JNIEnv* env) : AutoGlobalWrappedJavaObject(obj, env) {};
void Configure(jobject a0, jobject a1, jobject a2, int32_t a3);
static jobject CreateByCodecName(const nsAString& a0);
static jobject CreateDecoderByType(const nsAString& a0);
static jobject CreateEncoderByType(const nsAString& a0);
int32_t DequeueInputBuffer(int64_t a0);
int32_t DequeueOutputBuffer(jobject a0, int64_t a1);
void Finalize();
void Flush();
jobjectArray GetInputBuffers();
jobjectArray GetOutputBuffers();
jobject GetOutputFormat();
void QueueInputBuffer(int32_t a0, int32_t a1, int32_t a2, int64_t a3, int32_t a4);
void QueueSecureInputBuffer(int32_t a0, int32_t a1, jobject a2, int64_t a3, int32_t a4);
void Release();
void ReleaseOutputBuffer(int32_t a0, bool a1);
void SetVideoScalingMode(int32_t a0);
void Start();
void Stop();
static int32_t getBUFFER_FLAG_CODEC_CONFIG();
static int32_t getBUFFER_FLAG_END_OF_STREAM();
static int32_t getBUFFER_FLAG_SYNC_FRAME();
static int32_t getCONFIGURE_FLAG_ENCODE();
static int32_t getCRYPTO_MODE_AES_CTR();
static int32_t getCRYPTO_MODE_UNENCRYPTED();
static int32_t getINFO_OUTPUT_BUFFERS_CHANGED();
static int32_t getINFO_OUTPUT_FORMAT_CHANGED();
static int32_t getINFO_TRY_AGAIN_LATER();
static int32_t getVIDEO_SCALING_MODE_SCALE_TO_FIT();
static int32_t getVIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING();
MediaCodec() : AutoGlobalWrappedJavaObject() {};
protected:
static jclass mMediaCodecClass;
static jmethodID jConfigure;
static jmethodID jCreateByCodecName;
static jmethodID jCreateDecoderByType;
static jmethodID jCreateEncoderByType;
static jmethodID jDequeueInputBuffer;
static jmethodID jDequeueOutputBuffer;
static jmethodID jFinalize;
static jmethodID jFlush;
static jmethodID jGetInputBuffers;
static jmethodID jGetOutputBuffers;
static jmethodID jGetOutputFormat;
static jmethodID jQueueInputBuffer;
static jmethodID jQueueSecureInputBuffer;
static jmethodID jRelease;
static jmethodID jReleaseOutputBuffer;
static jmethodID jSetVideoScalingMode;
static jmethodID jStart;
static jmethodID jStop;
static jfieldID jBUFFER_FLAG_CODEC_CONFIG;
static jfieldID jBUFFER_FLAG_END_OF_STREAM;
static jfieldID jBUFFER_FLAG_SYNC_FRAME;
static jfieldID jCONFIGURE_FLAG_ENCODE;
static jfieldID jCRYPTO_MODE_AES_CTR;
static jfieldID jCRYPTO_MODE_UNENCRYPTED;
static jfieldID jINFO_OUTPUT_BUFFERS_CHANGED;
static jfieldID jINFO_OUTPUT_FORMAT_CHANGED;
static jfieldID jINFO_TRY_AGAIN_LATER;
static jfieldID jVIDEO_SCALING_MODE_SCALE_TO_FIT;
static jfieldID jVIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING;
};
class MediaFormat : public AutoGlobalWrappedJavaObject {
public:
static void InitStubs(JNIEnv *jEnv);
static MediaFormat* Wrap(jobject obj);
MediaFormat(jobject obj, JNIEnv* env) : AutoGlobalWrappedJavaObject(obj, env) {};
MediaFormat();
bool ContainsKey(const nsAString& a0);
static jobject CreateAudioFormat(const nsAString& a0, int32_t a1, int32_t a2);
static jobject CreateVideoFormat(const nsAString& a0, int32_t a1, int32_t a2);
jobject GetByteBuffer(const nsAString& a0);
jfloat GetFloat(const nsAString& a0);
int32_t GetInteger(const nsAString& a0);
int64_t GetLong(const nsAString& a0);
jstring GetString(const nsAString& a0);
void SetByteBuffer(const nsAString& a0, jobject a1);
void SetFloat(const nsAString& a0, jfloat a1);
void SetInteger(const nsAString& a0, int32_t a1);
void SetLong(const nsAString& a0, int64_t a1);
void SetString(const nsAString& a0, const nsAString& a1);
jstring ToString();
static jstring getKEY_AAC_PROFILE();
static jstring getKEY_BIT_RATE();
static jstring getKEY_CHANNEL_COUNT();
static jstring getKEY_CHANNEL_MASK();
static jstring getKEY_COLOR_FORMAT();
static jstring getKEY_DURATION();
static jstring getKEY_FLAC_COMPRESSION_LEVEL();
static jstring getKEY_FRAME_RATE();
static jstring getKEY_HEIGHT();
static jstring getKEY_IS_ADTS();
static jstring getKEY_I_FRAME_INTERVAL();
static jstring getKEY_MAX_INPUT_SIZE();
static jstring getKEY_MIME();
static jstring getKEY_SAMPLE_RATE();
static jstring getKEY_WIDTH();
protected:
static jclass mMediaFormatClass;
static jmethodID jMediaFormat;
static jmethodID jContainsKey;
static jmethodID jCreateAudioFormat;
static jmethodID jCreateVideoFormat;
static jmethodID jGetByteBuffer;
static jmethodID jGetFloat;
static jmethodID jGetInteger;
static jmethodID jGetLong;
static jmethodID jGetString;
static jmethodID jSetByteBuffer;
static jmethodID jSetFloat;
static jmethodID jSetInteger;
static jmethodID jSetLong;
static jmethodID jSetString;
static jmethodID jToString;
static jfieldID jKEY_AAC_PROFILE;
static jfieldID jKEY_BIT_RATE;
static jfieldID jKEY_CHANNEL_COUNT;
static jfieldID jKEY_CHANNEL_MASK;
static jfieldID jKEY_COLOR_FORMAT;
static jfieldID jKEY_DURATION;
static jfieldID jKEY_FLAC_COMPRESSION_LEVEL;
static jfieldID jKEY_FRAME_RATE;
static jfieldID jKEY_HEIGHT;
static jfieldID jKEY_IS_ADTS;
static jfieldID jKEY_I_FRAME_INTERVAL;
static jfieldID jKEY_MAX_INPUT_SIZE;
static jfieldID jKEY_MIME;
static jfieldID jKEY_SAMPLE_RATE;
static jfieldID jKEY_WIDTH;
};
class ByteBuffer : public AutoGlobalWrappedJavaObject {
public:
static void InitStubs(JNIEnv *jEnv);
static ByteBuffer* Wrap(jobject obj);
ByteBuffer(jobject obj, JNIEnv* env) : AutoGlobalWrappedJavaObject(obj, env) {};
int8_t _get(int32_t a0);
void _put(int32_t a0, int8_t a1);
static jobject Allocate(int32_t a0);
static jobject AllocateDirect(int32_t a0);
jobject Array();
jbyteArray Array1();
int32_t ArrayOffset();
jstring AsCharBuffer();
jobject AsDoubleBuffer();
jobject AsFloatBuffer();
jobject AsIntBuffer();
jobject AsLongBuffer();
jobject AsReadOnlyBuffer();
jobject AsShortBuffer();
jobject Compact();
int32_t CompareTo(jobject a0);
int32_t CompareTo1(jobject a0);
jobject Duplicate();
bool Equals(jobject a0);
int8_t Get();
int8_t Get1(int32_t a0);
jobject Get1(jbyteArray a0);
jobject Get1(jbyteArray a0, int32_t a1, int32_t a2);
uint16_t GetChar();
uint16_t GetChar1(int32_t a0);
jdouble GetDouble();
jdouble GetDouble1(int32_t a0);
jfloat GetFloat();
jfloat GetFloat1(int32_t a0);
int32_t GetInt();
int32_t GetInt1(int32_t a0);
int64_t GetLong();
int64_t GetLong1(int32_t a0);
int16_t GetShort();
int16_t GetShort1(int32_t a0);
bool HasArray();
int32_t HashCode();
bool IsDirect();
jobject Order();
jobject Order1(jobject a0);
jobject Put(int8_t a0);
jobject Put1(int32_t a0, int8_t a1);
jobject Put1(jobject a0);
jobject Put1(jbyteArray a0);
jobject Put1(jbyteArray a0, int32_t a1, int32_t a2);
jobject PutChar(uint16_t a0);
jobject PutChar1(int32_t a0, uint16_t a1);
jobject PutDouble(jdouble a0);
jobject PutDouble1(int32_t a0, jdouble a1);
jobject PutFloat(jfloat a0);
jobject PutFloat1(int32_t a0, jfloat a1);
jobject PutInt(int32_t a0);
jobject PutInt1(int32_t a0, int32_t a1);
jobject PutLong(int32_t a0, int64_t a1);
jobject PutLong1(int64_t a0);
jobject PutShort(int32_t a0, int16_t a1);
jobject PutShort1(int16_t a0);
jobject Slice();
jstring ToString();
static jobject Wrap1(jbyteArray a0);
static jobject Wrap2(jbyteArray a0, int32_t a1, int32_t a2);
bool getBigEndian();
void setBigEndian(bool a0);
jbyteArray getHb();
bool getIsReadOnly();
void setIsReadOnly(bool a0);
bool getNativeByteOrder();
void setNativeByteOrder(bool a0);
int32_t getOffset();
ByteBuffer() : AutoGlobalWrappedJavaObject() {};
protected:
static jclass mByteBufferClass;
static jmethodID j_get;
static jmethodID j_put;
static jmethodID jAllocate;
static jmethodID jAllocateDirect;
static jmethodID jArray;
static jmethodID jArray1;
static jmethodID jArrayOffset;
static jmethodID jAsCharBuffer;
static jmethodID jAsDoubleBuffer;
static jmethodID jAsFloatBuffer;
static jmethodID jAsIntBuffer;
static jmethodID jAsLongBuffer;
static jmethodID jAsReadOnlyBuffer;
static jmethodID jAsShortBuffer;
static jmethodID jCompact;
static jmethodID jCompareTo;
static jmethodID jCompareTo1;
static jmethodID jDuplicate;
static jmethodID jEquals;
static jmethodID jGet;
static jmethodID jGet1;
static jmethodID jGet10;
static jmethodID jGet11;
static jmethodID jGetChar;
static jmethodID jGetChar1;
static jmethodID jGetDouble;
static jmethodID jGetDouble1;
static jmethodID jGetFloat;
static jmethodID jGetFloat1;
static jmethodID jGetInt;
static jmethodID jGetInt1;
static jmethodID jGetLong;
static jmethodID jGetLong1;
static jmethodID jGetShort;
static jmethodID jGetShort1;
static jmethodID jHasArray;
static jmethodID jHashCode;
static jmethodID jIsDirect;
static jmethodID jOrder;
static jmethodID jOrder1;
static jmethodID jPut;
static jmethodID jPut1;
static jmethodID jPut12;
static jmethodID jPut13;
static jmethodID jPut14;
static jmethodID jPutChar;
static jmethodID jPutChar1;
static jmethodID jPutDouble;
static jmethodID jPutDouble1;
static jmethodID jPutFloat;
static jmethodID jPutFloat1;
static jmethodID jPutInt;
static jmethodID jPutInt1;
static jmethodID jPutLong;
static jmethodID jPutLong1;
static jmethodID jPutShort;
static jmethodID jPutShort1;
static jmethodID jSlice;
static jmethodID jToString;
static jmethodID jWrap;
static jmethodID jWrap1;
static jfieldID jBigEndian;
static jfieldID jHb;
static jfieldID jIsReadOnly;
static jfieldID jNativeByteOrder;
static jfieldID jOffset;
};
class BufferInfo : public AutoGlobalWrappedJavaObject {
public:
static void InitStubs(JNIEnv *jEnv);
static BufferInfo* Wrap(jobject obj);
BufferInfo(jobject obj, JNIEnv* env) : AutoGlobalWrappedJavaObject(obj, env) {};
BufferInfo();
void Set(int32_t a0, int32_t a1, int64_t a2, int32_t a3);
int32_t getFlags();
void setFlags(int32_t a0);
int32_t getOffset();
void setOffset(int32_t a0);
int64_t getPresentationTimeUs();
void setPresentationTimeUs(int64_t a0);
int32_t getSize();
void setSize(int32_t a0);
protected:
static jclass mBufferInfoClass;
static jmethodID jBufferInfo;
static jmethodID jSet;
static jfieldID jFlags;
static jfieldID jOffset;
static jfieldID jPresentationTimeUs;
static jfieldID jSize;
};
} /* android */
} /* widget */
} /* mozilla */
#endif

View File

@ -15,7 +15,6 @@ EXPORTS += [
'AndroidJavaWrappers.h',
'AndroidJNIWrapper.h',
'GeneratedJNIWrappers.h',
'GeneratedSDKWrappers.h',
]
SOURCES += [
@ -27,7 +26,6 @@ SOURCES += [
'AndroidJNIWrapper.cpp',
'APZCCallbackHandler.cpp',
'GeneratedJNIWrappers.cpp',
'GeneratedSDKWrappers.cpp',
'GfxInfo.cpp',
'NativeJSContainer.cpp',
'nsAndroidProtocolHandler.cpp',