diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc index 1d46fb57314..f25edc6b826 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc @@ -54,16 +54,13 @@ DeviceInfoAndroid::~DeviceInfoAndroid() { } uint32_t DeviceInfoAndroid::NumberOfDevices() { - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - return 0; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return 0; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "%s GetMethodId", __FUNCTION__); @@ -78,7 +75,6 @@ uint32_t DeviceInfoAndroid::NumberOfDevices() { "%s Calling Number of devices", __FUNCTION__); numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid); } - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); if (numberOfDevices > 0) return numberOfDevices; @@ -94,17 +90,14 @@ int32_t DeviceInfoAndroid::GetDeviceName( char* /*productUniqueIdUTF8*/, uint32_t /*productUniqueIdUTF8Length*/) { - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; int32_t result = 0; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached)!= 0) - return -1; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); // get the method ID for the Android Java GetDeviceUniqueName name. jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName", @@ -153,8 +146,6 @@ int32_t DeviceInfoAndroid::GetDeviceName( result = -1; } - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, "%s: result %d", __FUNCTION__, (int) result); return result; @@ -169,21 +160,17 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( _captureCapabilities.Erase(item); } - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - return -1; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); // Find the capability class jclass javaCapClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureCapabilityClass); if (javaCapClass == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: SetAndroidCaptureClasses must be called first!", __FUNCTION__); @@ -196,7 +183,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( "GetCapabilityArray", "(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;"); if (cid == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't find method GetCapabilityArray.", __FUNCTION__); return -1; @@ -205,7 +191,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); if (capureIdString == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't create string for method GetCapabilityArray.", __FUNCTION__); @@ -215,7 +200,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject, cid, capureIdString); if (!javaCapabilitiesObj) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Failed to call java GetCapabilityArray.", __FUNCTION__); @@ -226,7 +210,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I"); jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I"); if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Failed to get field Id.", __FUNCTION__); return -1; @@ -261,7 +244,6 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( env->DeleteGlobalRef(javaCapClass); - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d", _captureCapabilities.Size()); @@ -271,22 +253,18 @@ int32_t DeviceInfoAndroid::CreateCapabilityMap( int32_t DeviceInfoAndroid::GetOrientation( const char* deviceUniqueIdUTF8, VideoCaptureRotation& orientation) { - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - return -1; + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); // get the method ID for the Android Java GetOrientation . jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation", "(Ljava/lang/String;)I"); if (cid == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't find method GetOrientation.", __FUNCTION__); return -1; @@ -294,7 +272,6 @@ int32_t DeviceInfoAndroid::GetOrientation( // Create a jstring so we can pass the deviceUniquName to the java method. jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); if (capureIdString == NULL) { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "%s: Can't create string for method GetCapabilityArray.", __FUNCTION__); @@ -303,7 +280,6 @@ int32_t DeviceInfoAndroid::GetOrientation( // Call the java class and get the orientation. jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid, capureIdString); - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); int32_t retValue = 0; switch (jorientation) { diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc index c066384fd02..01290f3a517 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc @@ -216,62 +216,6 @@ int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM, return 0; } -int32_t VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - JNIEnv*& env, - jclass& javaCmDevInfoClass, - jobject& javaCmDevInfoObject, - bool& attached) { - - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: AttachAndUseAndroidDeviceInfoObj.", - __FUNCTION__); - - // get the JNI env for this thread - if (!g_jvm) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: SetAndroidObjects not called with a valid JVM.", - __FUNCTION__); - return -1; - } - attached = false; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - attached = true; - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: attach success", __FUNCTION__); - } else { - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: did not attach because JVM Env present", __FUNCTION__); - } - MOZ_ASSERT(g_javaCmDevInfoClass != nullptr); - MOZ_ASSERT(g_javaCmDevInfoObject != nullptr); - javaCmDevInfoClass = g_javaCmDevInfoClass; - javaCmDevInfoObject = g_javaCmDevInfoObject; - return 0; - -} - -int32_t VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects( - bool attached) { - if (attached && g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, - "%s: Could not detach thread from JVM", __FUNCTION__); - return -1; - } else if (!attached) { - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: not attached, no detach", __FUNCTION__); - } - return 0; -} - /* * JNI callback from Java class. Called * when the camera has a new frame to deliver @@ -355,34 +299,25 @@ int32_t VideoCaptureAndroid::Init(const int32_t id, "%s: Not a valid Java VM pointer", __FUNCTION__); return -1; } - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - int32_t rotation = 0; - // get the JNI env for this thread - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) return -1; - } - isAttached = true; - } + + jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass(); + jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject(); + + int32_t rotation = 0; WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "get method id"); - // get the method ID for the Android Java // CaptureDeviceInfoClass AllocateCamera factory method. char signature[256]; sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass); - jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera", + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "AllocateCamera", signature); if (cid == NULL) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, @@ -392,7 +327,7 @@ int32_t VideoCaptureAndroid::Init(const int32_t id, jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); // construct the object by calling the static constructor object - jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject, + jobject javaCameraObjLocal = env->CallObjectMethod(javaCmDevInfoObject, cid, (jint) id, (jlong) this, capureIdString); @@ -412,17 +347,6 @@ int32_t VideoCaptureAndroid::Init(const int32_t id, return -1; } - // Delete local object ref, we only use the global ref - env->DeleteLocalRef(javaCameraObjLocal); - - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - return 0; } @@ -434,57 +358,30 @@ VideoCaptureAndroid::~VideoCaptureAndroid() { "%s: Nothing to clean", __FUNCTION__); } else { - bool isAttached = false; - // get the JNI env for this thread - JNIEnv *env; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else { - isAttached = true; - } - } + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return; - if (env) { - // get the method ID for the Android Java CaptureClass static - // DeleteVideoCaptureAndroid method. Call this to release the camera so - // another application can use it. - jmethodID cid = env->GetStaticMethodID( - g_javaCmClass, - "DeleteVideoCaptureAndroid", - "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); - if (cid != NULL) { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__); - // Close the camera by calling the static destruct function. - env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj); + // get the method ID for the Android Java CaptureClass static + // DeleteVideoCaptureAndroid method. Call this to release the camera so + // another application can use it. + jmethodID cid = env->GetStaticMethodID(g_javaCmClass, + "DeleteVideoCaptureAndroid", + "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); + if (cid != NULL) { + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__); + // Close the camera by calling the static destruct function. + env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj); - // Delete global object ref to the camera. - env->DeleteGlobalRef(_javaCaptureObj); - - _javaCaptureObj = NULL; - } - else { + // Delete global object ref to the camera. + env->DeleteGlobalRef(_javaCaptureObj); + _javaCaptureObj = NULL; + } else { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: Failed to find DeleteVideoCaptureAndroid id", __FUNCTION__); - } - } - - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, - _id, "%s: Could not detach thread from JVM", - __FUNCTION__); - } } } } @@ -495,24 +392,13 @@ int32_t VideoCaptureAndroid::StartCapture( WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, "%s: ", __FUNCTION__); - bool isAttached = false; int32_t result = 0; int32_t rotation = 0; - // get the JNI env for this thread - JNIEnv *env; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else { - isAttached = true; - } - } + + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability, _frameInfo) < 0) { @@ -544,14 +430,6 @@ int32_t VideoCaptureAndroid::StartCapture( "%s: Failed to find StartCapture id", __FUNCTION__); } - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - if (result == 0) { _requestedCapability = capability; _captureStarted = true; @@ -566,23 +444,12 @@ int32_t VideoCaptureAndroid::StopCapture() { WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, "%s: ", __FUNCTION__); - bool isAttached = false; int32_t result = 0; - // get the JNI env for this thread - JNIEnv *env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else { - isAttached = true; - } - } + + AutoLocalJNIFrame jniFrame; + JNIEnv* env = jniFrame.GetEnv(); + if (!env) + return -1; memset(&_requestedCapability, 0, sizeof(_requestedCapability)); memset(&_frameInfo, 0, sizeof(_frameInfo)); @@ -600,13 +467,6 @@ int32_t VideoCaptureAndroid::StopCapture() { "%s: Failed to find StopCapture id", __FUNCTION__); } - // Detach this thread if it was attached - if (isAttached) { - if (g_jvm->DetachCurrentThread() < 0) { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } _captureStarted = false; WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, diff --git a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h index 4ebcffe028f..473ca2ae6ed 100644 --- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h +++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h @@ -12,6 +12,8 @@ #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_ #include +#include +#include "trace.h" #include "device_info_android.h" #include "../video_capture_impl.h" @@ -23,17 +25,9 @@ namespace videocapturemodule { class VideoCaptureAndroid : public VideoCaptureImpl { public: static int32_t SetAndroidObjects(void* javaVM, void* javaContext); - static int32_t AttachAndUseAndroidDeviceInfoObjects( - JNIEnv*& env, - jclass& javaCmDevInfoClass, - jobject& javaCmDevInfoObject, - bool& attached); - static int32_t ReleaseAndroidDeviceInfoObjects(bool attached); - VideoCaptureAndroid(const int32_t id); virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8); - virtual int32_t StartCapture( const VideoCaptureCapability& capability); virtual int32_t StopCapture(); @@ -41,6 +35,8 @@ class VideoCaptureAndroid : public VideoCaptureImpl { virtual int32_t CaptureSettings(VideoCaptureCapability& settings); virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation); + friend class AutoLocalJNIFrame; + protected: virtual ~VideoCaptureAndroid(); static void JNICALL ProvideCameraFrame (JNIEnv * env, @@ -53,7 +49,6 @@ class VideoCaptureAndroid : public VideoCaptureImpl { jobject _javaCaptureObj; // Java Camera object. VideoCaptureCapability _frameInfo; bool _captureStarted; - static JavaVM* g_jvm; static jclass g_javaCmClass; static jclass g_javaCmDevInfoClass; @@ -61,6 +56,120 @@ class VideoCaptureAndroid : public VideoCaptureImpl { static jobject g_javaCmDevInfoObject; }; +// Reworked version of what is available in AndroidBridge, +// can attach/deatch in addition to push/pop frames. +class AutoLocalJNIFrame { +public: + AutoLocalJNIFrame(int nEntries = 128) + : mEntries(nEntries), mHasFrameBeenPushed(false), mAttached(false) + { + mJNIEnv = InitJNIEnv(); + Push(); + } + + JNIEnv* GetEnv() { + return mJNIEnv; + } + + jclass GetCmDevInfoClass() { + assert(VideoCaptureAndroid::g_javaCmDevInfoClass != nullptr); + return VideoCaptureAndroid::g_javaCmDevInfoClass; + } + + jobject GetCmDevInfoObject() { + assert(VideoCaptureAndroid::g_javaCmDevInfoObject != nullptr); + return VideoCaptureAndroid::g_javaCmDevInfoObject; + } + + bool CheckForException() { + if (mJNIEnv->ExceptionCheck()) { + mJNIEnv->ExceptionDescribe(); + mJNIEnv->ExceptionClear(); + return true; + } + + return false; + } + + ~AutoLocalJNIFrame() { + if (!mJNIEnv) + return; + + CheckForException(); + + if (mHasFrameBeenPushed) + mJNIEnv->PopLocalFrame(NULL); + + if (mAttached) { + int res = VideoCaptureAndroid::g_jvm->DetachCurrentThread(); + if (res < 0) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: JVM Detach failed.", __FUNCTION__); + } + } + } + +private: + void Push() { + if (!mJNIEnv) + return; + + // Make sure there is enough space to store a local ref to the + // exception. I am not completely sure this is needed, but does + // not hurt. + jint ret = mJNIEnv->PushLocalFrame(mEntries + 1); + assert(ret == 0); + if (ret < 0) + CheckForException(); + else + mHasFrameBeenPushed = true; + } + + JNIEnv* InitJNIEnv() + { + JNIEnv* env = nullptr; + + // Get the JNI env for this thread. + if (!VideoCaptureAndroid::g_jvm) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: SetAndroidObjects not called with a valid JVM.", + __FUNCTION__); + return nullptr; + } + + jint res = VideoCaptureAndroid::g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4); + if (res == JNI_EDETACHED) { + // Try to attach this thread to the JVM and get the env. + res = VideoCaptureAndroid::g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + // Attaching failed, error out. + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return nullptr; + } + mAttached = true; + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: attach success", __FUNCTION__); + } else if (res == JNI_OK) { + // Already attached, GetEnv succeeded. + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: did not attach because JVM Env already present", + __FUNCTION__); + } else { + // Non-recoverable error in GetEnv. + return nullptr; + } + + return env; + } + + int mEntries; + JNIEnv* mJNIEnv; + bool mHasFrameBeenPushed; + bool mAttached; +}; + } // namespace videocapturemodule } // namespace webrtc #endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_