【发布时间】:2014-01-17 18:24:17
【问题描述】:
我想开发一个可以使用加速计拍照的应用程序。 我可以使用按钮拍照,但是当我从 'onSensorChanged' 方法调用相同的方法,即 camera.takePicture 时,应用程序会崩溃。
这是我的代码
package com.example.recognizer;
import java.io.IOException;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
public class Camera_Capture extends Activity implements SurfaceHolder.Callback
{
private SurfaceView sv;
private SurfaceHolder sHolder;
private Camera mCamera;
private Parameters parameters;
private Button button;
private boolean mInitialized;
private SensorManager sensorManager =null;
private Sensor mAccelerometer;
private final float NOISE = (float) 2.0;
private float mLastx, mLasty;
/** Called when the activity is first created. */
@SuppressWarnings("deprecation")
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera_capture);
sv = (SurfaceView) findViewById(R.id.surfaceview);
Log.d("Logging", " inside onCreate()");
mInitialized =false;
sensorManager =(SensorManager)getSystemService(Context.SENSOR_SERVICE);
mAccelerometer = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
sensorManager.registerListener(sl, mAccelerometer, SensorManager.SENSOR_DELAY_NORMAL);
button = (Button)findViewById(R.id.capture_image);
button.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
Log.d("Logging", "inside onclick()");
capture();
}
});
sHolder = sv.getHolder();
sHolder.addCallback(this);
Log.d("Logging", "callback added");
//tells Android that this surface will have its data constantly replaced
sHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera = Camera.open(0);
try {
mCamera.setPreviewDisplay(sHolder);
Log.d("Logging", "before startpreview()");
mCamera.startPreview();
} catch (IOException exception) {
exception.printStackTrace();
mCamera.release();
mCamera = null;
}
}
@Override
protected void onResume()
{
super.onResume();
sensorManager.registerListener(sl, mAccelerometer,SensorManager.SENSOR_DELAY_NORMAL);
}
@Override
protected void onPause()
{
sensorManager.unregisterListener(sl);
super.onPause();
}
@Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3)
{
Log.d("Logging", "inside surfaceChanged()");
try
{
mCamera.setPreviewDisplay(arg0);
} catch (IOException e)
{
e.printStackTrace();
}
parameters = mCamera.getParameters();
parameters.setPictureSize(640,480);
mCamera.setParameters(parameters);
Log.d("Logging", "before startpreview() of surfaceChanged()");
mCamera.startPreview();
}
private void capture()
{
Log.d("Cam_capture","Inside Capture" );
mCamera.takePicture(null, null, new Camera.PictureCallback()
{
@Override
public void onPictureTaken(byte[] data, Camera camera)
{
Log.d("Capture_cam", "Inside onPictureTaken()");
Intent intent = new Intent();
intent.putExtra("image_arr", data);
mCamera.stopPreview();
setResult(RESULT_OK, intent);
finish();
}
});
}
@Override
public void surfaceCreated(SurfaceHolder holder)
{
if (Build.VERSION.SDK_INT >= 8) mCamera.setDisplayOrientation(90);
// The Surface has been created, acquire the camera and tell it where
// to draw the preview.
}
@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
SensorEventListener sl =new SensorEventListener() {
@Override
public void onSensorChanged(SensorEvent event) {
float x = event.values[0];
float y = event.values[1];
if(!mInitialized){
mLastx = x;
mLasty = y;
mInitialized = true;
}
else{
float deltaX = Math.abs(mLastx - x);
float deltaY = Math.abs(mLasty - y);
Log.d("Cam_cap","DeltaX="+deltaX+"deltay="+deltaY);
mLastx = x;
mLasty = y;
if(deltaX > NOISE || deltaY > NOISE)
capture();
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// TODO Auto-generated method stub
}
};
}
这是我的 logcat 的输出*(已编辑)*
01-20 16:42:39.835 20331 20331 D AndroidRuntime:
01-20 16:42:39.835 20331 20331 D AndroidRuntime: >>>>>> AndroidRuntime START com.android.internal.os.RuntimeInit (tool) <<<<<<
01-20 16:42:39.835 20331 20331 D AndroidRuntime: CheckJNI is OFF
01-20 16:42:39.835 20331 20331 D AndroidRuntime: language=-Duser.language=en region=-Duser.region=IN
01-20 16:42:39.971 20331 20331 D AndroidRuntime: Calling main entry com.android.commands.am.Am
01-20 16:42:39.981 20331 20331 D AndroidRuntime: Shutting down VM
01-20 16:43:34.148 20346 20346 D AndroidRuntime: Shutting down VM
01-20 16:43:34.171 20346 20346 E AndroidRuntime: FATAL EXCEPTION: main
01-20 16:43:34.171 20346 20346 E AndroidRuntime: java.lang.RuntimeException: takePicture failed
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at android.hardware.Camera.native_takePicture(Native Method)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at android.hardware.Camera.takePicture(Camera.java:1278)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at com.example.currencyrecognizer.Camera_Capture$1.onSensorChanged(Camera_Capture.java:216)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at android.hardware.SystemSensorManager$ListenerDelegate$1.handleMessage(SystemSensorManager.java:204)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at android.os.Handler.dispatchMessage(Handler.java:99)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at android.os.Looper.loop(Looper.java:153)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at android.app.ActivityThread.main(ActivityThread.java:5297)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at java.lang.reflect.Method.invokeNative(Native Method)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at java.lang.reflect.Method.invoke(Method.java:511)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:833)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:600)
01-20 16:43:34.171 20346 20346 E AndroidRuntime: at dalvik.system.NativeStart.main(Native Method)
01-20 16:43:40.926 20572 20572 D AndroidRuntime:
01-20 16:43:40.926 20572 20572 D AndroidRuntime: >>>>>> AndroidRuntime START com.android.internal.os.RuntimeInit (tool) <<<<<<
01-20 16:43:40.927 20572 20572 D AndroidRuntime: CheckJNI is OFF
01-20 16:43:40.927 20572 20572 D AndroidRuntime: language=-Duser.language=en region=-Duser.region=IN
01-20 16:43:41.074 20572 20572 D AndroidRuntime: Calling main entry com.android.commands.am.Am
01-20 16:43:41.094 20572 20572 D AndroidRuntime: Shutting down VM
【问题讨论】:
-
您的 logcat 输出被剪裁太多。我强烈建议使用
adb logcat -v threadtime来解决线程问题。有tag:Cam_cap和tag:Cam_capture行吗? mCamera 是否打开并开始预览? -
正在打印 Capture_capture 标签,但未打印 Capture_cam 标签。我可以使用调用 capture() 方法的按钮来捕获图像,但是当我从 onSensorChanged() 调用相同的方法时,我收到了这个错误。 'adb logcat -v threadtime' 正在输出大量数据,因为我正在真实设备上对其进行测试。
-
您可以过滤日志,例如
adb logcat -v threadtime | grep 5513;但来自MediaServer的一些系统日志实际上可能对您的情况有所帮助。 -
在加速器触发对
capture()的调用之前,您是否验证预览已完成其初始化过程? -
好的。我正在发布我的整个代码。
标签: android android-camera android-sensors