Video recording App doesn't get destroyed properly. When i press Back button, the Camera App is onPause(). On starting a new instance of APP the video recording fails. If i manually kill previous instance and re-run APP it works perfect. According to my assumption, all life cycle to kill and release camera are implemented. But the integration or Calls are perhaps creating problem. Need help to sort it out, please.
CameraPreview Class
public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback {
private SurfaceHolder mHolder;
private Camera mCamera;
private MyDrawing md;
public CameraPreview(Context context, Camera camera) {
super(context);
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(this);
// deprecated setting, but required on Android versions prior to 3.0
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
// create the surface and start camera preview
if (mCamera == null) {
mCamera.setPreviewDisplay(holder);
mCamera.startPreview();
}
} catch (IOException e) {
Log.d(VIEW_LOG_TAG, "Error setting camera preview: " + e.getMessage());
}
}
public void refreshCamera(Camera camera) {
if (mHolder.getSurface() == null) {
// preview surface does not exist
return;
}
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e) {
// ignore: tried to stop a non-existent preview
}
// set preview size and make any resize, rotate or
// reformatting changes here
// start preview with new settings
setCamera(camera);
try {
mCamera.setPreviewDisplay(mHolder);
mCamera.startPreview();
//startFaceDetection();
} catch (Exception e) {
Log.d(VIEW_LOG_TAG, "Error starting camera preview: " + e.getMessage());
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
refreshCamera(mCamera);
}
public void setCamera(Camera camera) {
//method to set a camera instance
mCamera = camera;
mCamera.setFaceDetectionListener(faceDetectionListener);
startFaceDetection();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
mCamera.release();
}
private Camera.FaceDetectionListener faceDetectionListener = new Camera.FaceDetectionListener() {
@Override
public void onFaceDetection(Camera.Face[] faces, Camera c) {
if (faces.length > 0) {
Log.d("FaceDetection", "face detected X and Y are as: " + faces.length +
" Face 1 Location X: " + faces[0].rect.centerX() +
"Y: " + faces[0].rect.centerY() +" LIES IN "+(MyDrawing.w-MyDrawing.radius) +"--"+(MyDrawing.w+MyDrawing.radius));
if(faces[0].rect.centerX()>=0 && faces[0].rect.centerX()<115 )
{
Log.d("ALERT = ", "Detection Started" );
AndroidVideoCaptureExample.capture.setText("Recording/ stopNsave ");
AndroidVideoCaptureExample.faceDetect();
}
} else {
Log.d("FaceDetection", "circle cordinates are as: " + (MyDrawing.w-MyDrawing.radius) +"cX"+ MyDrawing.radius+"cY");
}
}
};
public void startFaceDetection(){
// Try starting Face Detection
Camera.Parameters params = mCamera.getParameters();
// start face detection only *after* preview has started
if (params.getMaxNumDetectedFaces() > 0){
// camera supports face detection, so can start it:
mCamera.startFaceDetection();
}
}
}
Main
public class AndroidVideoCaptureExample extends Activity {
private static Camera mCamera;
private static int vWidth,vHeight;
private CameraPreview mPreview;
public static MediaRecorder mediaRecorder;
public static Button capture, switchCamera;
private Context myContext;
private FrameLayout cameraPreview;
private boolean cameraFront = false;
private static int desiredwidth=640, desiredheight=360;
private MyDrawing md;
public static boolean vRecording = false;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
myContext = this;
initialize();
Log.d("FaceDetection", "face detected BASEER" );
}
private int findFrontFacingCamera() {
int cameraId = -1;
// Search for the front facing camera
int numberOfCameras = Camera.getNumberOfCameras();
for (int i = 0; i < numberOfCameras; i++) {
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
cameraId = i;
cameraFront = true;
break;
}
}
return cameraId;
}
public void onResume() {
super.onResume();
if (!hasCamera(myContext)) {
Toast toast = Toast.makeText(myContext, "Sorry, your phone does not have a camera!", Toast.LENGTH_LONG);
toast.show();
finish();
}
if (mCamera == null) {
// if the front facing camera does not exist
if (findFrontFacingCamera() < 0) {
Toast.makeText(this, "No front facing camera found.", Toast.LENGTH_LONG).show();
switchCamera.setVisibility(View.GONE);
}
mCamera = Camera.open(findFrontFacingCamera());
mCamera.setDisplayOrientation(90);
mPreview.refreshCamera(mCamera);
}
}
public void initialize() {
cameraPreview = (FrameLayout) findViewById(R.id.camera_preview);
mPreview = new CameraPreview(myContext, mCamera);
cameraPreview.addView(mPreview);
capture = (Button) findViewById(R.id.button_capture);
capture.setOnClickListener(captrureListener);
}
@Override
protected void onPause() {
super.onPause();
// when on Pause, release camera in order to be used from other
// applications
releaseCamera();
}
private boolean hasCamera(Context context) {
// check if the device has camera
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
return true;
} else {
return false;
}
}
static boolean recording = false;
OnClickListener captrureListener = new OnClickListener() {
@Override
public void onClick(View v) {
if (recording) {
// stop recording and release camera
mediaRecorder.stop(); // stop the recording
releaseMediaRecorder(); // release the MediaRecorder object
Toast.makeText(AndroidVideoCaptureExample.this, "Video captured!", Toast.LENGTH_LONG).show();
Toast.makeText(AndroidVideoCaptureExample.this, vWidth+"BY"+vHeight, Toast.LENGTH_LONG).show();
recording = false;
}
}
};
public static void faceDetect()
{
prepareMediaRecorder();
recording = true;
mediaRecorder.start();
}
private static void releaseMediaRecorder() {
if (mediaRecorder != null) {
mediaRecorder.reset(); // clear recorder configuration
mediaRecorder.release(); // release the recorder object
mediaRecorder = null;
mCamera.lock(); // lock camera for later use
}
}
private static boolean prepareMediaRecorder() {
List<Camera.Size> videosizes = mCamera.getParameters().getSupportedVideoSizes();
Camera.Size videosize = videosizes.get(1);
Camera.Size optimalVideoSize = getOptimalPreviewSize(videosize, desiredwidth, desiredheight);
vWidth = optimalVideoSize.width;//mCamera.getParameters().getPreviewSize().width;
vHeight = optimalVideoSize.height;//mCamera.getParameters().getPreviewSize().height;
mediaRecorder = new MediaRecorder();
mCamera.unlock();
mediaRecorder.setCamera(mCamera);
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_LOW));
mediaRecorder.setVideoEncodingBitRate(512* 1000);
mediaRecorder.setVideoFrameRate(15);
mediaRecorder.setVideoSize(optimalVideoSize.width, optimalVideoSize.height);
mediaRecorder.setOutputFile("/sdcard/myvideo.mp4");
mediaRecorder.setMaxDuration(600000); // Set max duration 60 sec.
mediaRecorder.setMaxFileSize(50000000); // Set max file size 50M
try {
mediaRecorder.prepare();
} catch (IllegalStateException e) {
releaseMediaRecorder();
return false;
} catch (IOException e) {
releaseMediaRecorder();
return false;
}
return true;
}
private void releaseCamera() {
// stop and release camera
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
private static Camera.Size getOptimalPreviewSize(Camera.Size sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.2;
double targetRatio = (double) w / h;
if (sizes == null)
return null;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
// Try to find an size match aspect ratio and size
Camera.Size size = sizes;
Log.d("Camera", "Checking size " + size.width + "w " + size.height
+ "h");
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) <= ASPECT_TOLERANCE)
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
// Cannot find the one match the aspect ratio, ignore the
// requirement
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
if (Math.abs(size.height - targetHeight) < minDiff) {
optimalSize = size;
minDiff = Math.abs(size.height - targetHeight);
}
}
return optimalSize;
}
}
ErrorLog
Error/Crash:If App goes to foreground and reopens, As soon as face is detected (video recording starts on this detection)App crashes with following Error.
E/MediaRecorder: start failed: -38
D/AndroidRuntime: Shutting down VM
W/dalvikvm: threadid=1: thread exiting with uncaught exception (group=0x42230c08)
E/AndroidRuntime: FATAL EXCEPTION: main
E/AndroidRuntime: Process: com.javacodegeeks.androidvideocaptureexample, PID: 8350
E/AndroidRuntime: java.lang.IllegalStateException
E/AndroidRuntime: at android.media.MediaRecorder.start(Native Method)
E/AndroidRuntime: at com.javacodegeeks.androidvideocaptureexample.AndroidVideoCaptureExample.faceDetect(AndroidVideoCaptureExample.java:141)
E/AndroidRuntime: at com.javacodegeeks.androidvideocaptureexample.CameraPreview$1.onFaceDetection(CameraPreview.java:105)
E/AndroidRuntime: at android.hardware.Camera$EventHandler.handleMessage(Camera.java:1015)
E/AndroidRuntime: at android.os.Handler.dispatchMessage(Handler.java:102)
E/AndroidRuntime: at android.os.Looper.loop(Looper.java:146)
E/AndroidRuntime: at android.app.ActivityThread.main(ActivityThread.java:5635)
E/AndroidRuntime: at java.lang.reflect.Method.invokeNative(Native Method)
E/AndroidRuntime: at java.lang.reflect.Method.invoke(Method.java:515)
E/AndroidRuntime: at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1291)
E/AndroidRuntime: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1107)
E/AndroidRuntime: at dalvik.system.NativeStart.main(Native Method)
CameraPreview.surfaceDestroyed() release the camera, but does not set mCamera = null;
. When the app is recalled from background, AndroidVideoCaptureExample.onCreate() may be skipped, so the mPreview object with an old mCamera reference will be used. Now if surfaceChanged() is executed before AndroidVideoCaptureExample.onResume() calls mPreview.refreshCamera(mCamera);, you are screwed.
The easy fix would be to add mCamera = null;
to CameraPreview.surfaceDestroyed(), and check if (camera == null) { return; }
in the beginning of CameraPreview.refreshCamera(Camera camera).
BTW, CameraPreview.surfaceCreated() has some broken code:
if (mCamera == null) {
mCamera.setPreviewDisplay(holder);
…
You can simply delete all this block, these operations will be performed in refreshCamera() called from surfaceChanged().
You can also remove the second parameter from CameraPreview constructor.