Bug 709230 - Part 4: annotate members referenced by WebRTC's JNI code. r=gcp,rnewman

This commit is contained in:
Chris Kitching 2013-11-18 17:27:00 -08:00
parent ac10c2668c
commit cf8fdcc2a3
11 changed files with 69 additions and 0 deletions

View File

@ -21,6 +21,9 @@ import android.util.Log;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
@WebRTCJNITarget
class AudioManagerAndroid {
// Most of Google lead devices use 44.1K as the default sampling rate, 44.1K
// is also widely used on other android devices.

View File

@ -20,10 +20,13 @@ import android.media.AudioRecord;
import android.media.AudioTrack;
import android.util.Log;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
class WebRTCAudioDevice {
private AudioTrack _audioTrack = null;
private AudioRecord _audioRecord = null;
@WebRTCJNITarget
private Context _context;
private AudioManager _audioManager;
@ -44,6 +47,7 @@ class WebRTCAudioDevice {
private int _bufferedPlaySamples = 0;
private int _playPosition = 0;
@WebRTCJNITarget
WebRTCAudioDevice() {
try {
_playBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48
@ -59,6 +63,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int InitRecording(int audioSource, int sampleRate) {
// get the minimum buffer size that can be used
int minRecBufSize = AudioRecord.getMinBufferSize(
@ -104,6 +109,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int StartRecording() {
if (_isPlaying == false) {
SetAudioMode(true);
@ -123,6 +129,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int InitPlayback(int sampleRate) {
// get the minimum buffer size that can be used
int minPlayBufSize = AudioTrack.getMinBufferSize(
@ -180,6 +187,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int StartPlayback() {
if (_isRecording == false) {
SetAudioMode(true);
@ -199,6 +207,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int StopRecording() {
_recLock.lock();
try {
@ -234,6 +243,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int StopPlayback() {
_playLock.lock();
try {
@ -271,6 +281,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int PlayAudio(int lengthInBytes) {
int bufferedSamples = 0;
@ -331,6 +342,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int RecordAudio(int lengthInBytes) {
_recLock.lock();
@ -376,6 +388,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int SetPlayoutSpeaker(boolean loudspeakerOn) {
// create audio manager if needed
if (_audioManager == null && _context != null) {
@ -425,6 +438,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int SetPlayoutVolume(int level) {
// create audio manager if needed
@ -445,6 +459,7 @@ class WebRTCAudioDevice {
}
@SuppressWarnings("unused")
@WebRTCJNITarget
private int GetPlayoutVolume() {
// create audio manager if needed

View File

@ -10,6 +10,9 @@
package org.webrtc.videoengine;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
@WebRTCJNITarget
public class CaptureCapabilityAndroid {
public int width = 0;
public int height = 0;

View File

@ -37,6 +37,7 @@ import org.mozilla.gecko.GeckoApp;
import org.mozilla.gecko.GeckoAppShell;
import org.mozilla.gecko.GeckoAppShell.AppStateListener;
import org.mozilla.gecko.util.ThreadUtils;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
public class VideoCaptureAndroid implements PreviewCallback, Callback {
@ -107,6 +108,8 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
// Invoked every time there's a new Camera preview frame
}
}
@WebRTCJNITarget
public static
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
Log.d(TAG, "DeleteVideoCaptureAndroid");
@ -313,6 +316,7 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback {
return 0;
}
@WebRTCJNITarget
public int StartCapture(int width, int height, int frameRate) {
Log.d(TAG, "StartCapture width " + width +
" height " + height +" frame rate " + frameRate);

View File

@ -24,6 +24,8 @@ import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
public class VideoCaptureDeviceInfoAndroid {
//Context
@ -34,6 +36,7 @@ public class VideoCaptureDeviceInfoAndroid {
private final static String TAG = "WEBRTC";
// Private class with info about all available cameras and the capabilities
@WebRTCJNITarget
public class AndroidVideoCaptureDevice {
AndroidVideoCaptureDevice() {
frontCameraType = FrontFacingCameraType.None;
@ -62,6 +65,7 @@ public class VideoCaptureDeviceInfoAndroid {
int id;
List<AndroidVideoCaptureDevice> deviceList;
@WebRTCJNITarget
public static VideoCaptureDeviceInfoAndroid
CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
Log.d(TAG,
@ -213,10 +217,12 @@ public class VideoCaptureDeviceInfoAndroid {
}
// Returns the number of Capture devices that is supported
@WebRTCJNITarget
public int NumberOfDevices() {
return deviceList.size();
}
@WebRTCJNITarget
public String GetDeviceUniqueName(int deviceNumber) {
if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
return null;
@ -224,6 +230,7 @@ public class VideoCaptureDeviceInfoAndroid {
return deviceList.get(deviceNumber).deviceUniqueName;
}
@WebRTCJNITarget
public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: deviceList) {
@ -236,6 +243,7 @@ public class VideoCaptureDeviceInfoAndroid {
// Returns the camera orientation as described by
// android.hardware.Camera.CameraInfo.orientation
@WebRTCJNITarget
public int GetOrientation(String deviceUniqueId) {
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
@ -246,6 +254,7 @@ public class VideoCaptureDeviceInfoAndroid {
}
// Returns an instance of VideoCaptureAndroid.
@WebRTCJNITarget
public VideoCaptureAndroid AllocateCamera(int id, long context,
String deviceUniqueId) {
try {
@ -296,6 +305,7 @@ public class VideoCaptureDeviceInfoAndroid {
}
// Searches for a front facing camera device. This is device specific code.
@WebRTCJNITarget
private Camera.Parameters
SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
throws SecurityException, IllegalArgumentException,

View File

@ -25,6 +25,8 @@ import android.graphics.PixelFormat;
import android.opengl.GLSurfaceView;
import android.util.Log;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
public class ViEAndroidGLES20 extends GLSurfaceView
implements GLSurfaceView.Renderer {
private static String TAG = "WEBRTC-JR";
@ -40,6 +42,7 @@ public class ViEAndroidGLES20 extends GLSurfaceView
private int viewWidth = 0;
private int viewHeight = 0;
@WebRTCJNITarget
public static boolean UseOpenGL2(Object renderWindow) {
return ViEAndroidGLES20.class.isInstance(renderWindow);
}
@ -49,6 +52,7 @@ public class ViEAndroidGLES20 extends GLSurfaceView
init(false, 0, 0);
}
@WebRTCJNITarget
public ViEAndroidGLES20(Context context, boolean translucent,
int depth, int stencil) {
super(context);
@ -341,6 +345,7 @@ public class ViEAndroidGLES20 extends GLSurfaceView
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
@WebRTCJNITarget
public void RegisterNativeObject(long nativeObject) {
nativeFunctionLock.lock();
this.nativeObject = nativeObject;
@ -348,6 +353,7 @@ public class ViEAndroidGLES20 extends GLSurfaceView
nativeFunctionLock.unlock();
}
@WebRTCJNITarget
public void DeRegisterNativeObject() {
nativeFunctionLock.lock();
nativeFunctionsRegisted = false;
@ -356,6 +362,7 @@ public class ViEAndroidGLES20 extends GLSurfaceView
nativeFunctionLock.unlock();
}
@WebRTCJNITarget
public void ReDraw() {
if(surfaceCreated) {
// Request the renderer to redraw using the render thread context.

View File

@ -26,6 +26,8 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.SurfaceHolder.Callback;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
public class ViESurfaceRenderer implements Callback {
private final static String TAG = "WEBRTC";
@ -43,6 +45,7 @@ public class ViESurfaceRenderer implements Callback {
private float dstLeftScale = 0;
private float dstRightScale = 1;
@WebRTCJNITarget
public ViESurfaceRenderer(SurfaceView view) {
surfaceHolder = view.getHolder();
if(surfaceHolder == null)
@ -122,6 +125,7 @@ public class ViESurfaceRenderer implements Callback {
return bitmap;
}
@WebRTCJNITarget
public ByteBuffer CreateByteBuffer(int width, int height) {
Log.d(TAG, "CreateByteBuffer " + width + ":" + height);
if (bitmap == null) {
@ -131,6 +135,7 @@ public class ViESurfaceRenderer implements Callback {
return byteBuffer;
}
@WebRTCJNITarget
public void SetCoordinates(float left, float top,
float right, float bottom) {
Log.d(TAG, "SetCoordinates " + left + "," + top + ":" +
@ -142,6 +147,7 @@ public class ViESurfaceRenderer implements Callback {
}
// It saves bitmap data to a JPEG picture, this function is for debug only.
@WebRTCJNITarget
private void saveBitmapToJPEG(int width, int height) {
ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteOutStream);
@ -159,6 +165,7 @@ public class ViESurfaceRenderer implements Callback {
}
}
@WebRTCJNITarget
public void DrawByteBuffer() {
if(byteBuffer == null)
return;

View File

@ -23,6 +23,8 @@ import android.view.SurfaceView;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
class CodecState {
private static final String TAG = "CodecState";
@ -41,6 +43,7 @@ class CodecState {
private long mLastMediaTimeUs;
@WebRTCJNITarget
public CodecState(
ViEMediaCodecDecoder view,
MediaFormat format,
@ -245,6 +248,7 @@ class ViEMediaCodecDecoder {
private Thread mLooperThread;
@WebRTCJNITarget
public void configure(SurfaceView surfaceView, int width, int height) {
mSurfaceView = surfaceView;
Log.d(TAG, "configure " + "width" + width + "height" + height + mSurfaceView.toString());
@ -267,6 +271,7 @@ class ViEMediaCodecDecoder {
initMediaCodecView();
}
@WebRTCJNITarget
public void setEncodedImage(ByteBuffer buffer, long renderTimeMs) {
// TODO(dwkang): figure out why exceptions just make this thread finish.
try {
@ -354,6 +359,7 @@ class ViEMediaCodecDecoder {
Log.d(TAG, "initMediaCodecView end");
}
@WebRTCJNITarget
public void start() {
Log.d(TAG, "start");

View File

@ -59,6 +59,7 @@ import android.widget.TabHost.TabSpec;
import android.widget.TextView;
import org.webrtc.videoengine.ViERenderer;
import org.mozilla.gecko.mozglue.WebRTCJNITarget;
import java.io.File;
import java.io.IOException;
@ -1046,6 +1047,7 @@ public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
Log.d(TAG, "No setting selected");
}
@WebRTCJNITarget
public int updateStats(int inFrameRateI, int inBitRateI,
int inPacketLoss, int inFrameRateO, int inBitRateO) {
frameRateI = inFrameRateI;

View File

@ -22,6 +22,7 @@ mgjar.sources += [
'mozglue/NativeReference.java',
'mozglue/NativeZip.java',
'mozglue/RobocopTarget.java',
'mozglue/WebRTCJNITarget.java',
]
mgjar.generated_sources += [
'org/mozilla/gecko/mozglue/GeckoLoader.java',

View File

@ -0,0 +1,11 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.mozglue;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Retention(RetentionPolicy.CLASS)
public @interface WebRTCJNITarget {}