• After 15+ years, we've made a big change: Android Forums is now Early Bird Club. Learn more here.

Use Samsung Galaxy Note10+ for capturing depth image

Bruce Kao

Lurker
When I use Samsung Galaxy Note10+ for capturing depth image by camera2 API, I find that in preview mode, I can capture depth image correctly. However, in capture mode, the captured depth image is always empty. Does anyone know why? And how to fix it?
 
The following is my code for depth camera:

public class AndroidDepthCamera
{
public TextureView depthPreviewTextureView;
public TextureView depthPreviewTextureView2;

private MainActivity mMainActivity;
private CameraManager camMgr;
private WindowManager windowMgr;
private String packageName;

private CameraDevice depthCameraDevice = null;
private int depthImgWidth = 640, depthImgHeight = 480;
private CaptureRequest.Builder depthPreviewBuilder = null;
private CameraCaptureSession depthCamPreviewCaptureSession = null;
private ImageReader depthPreviewImgReader;

private CameraCaptureSession depthCamTakePicCaptureSession = null;
private File coloredDepthImgFile;
private CaptureRequest.Builder depthCapBuilder;
private Handler depthTakePicBGHandler;
private ImageReader depthTakePicImgReader;
private Bitmap grayScaleDepthBitmapG;


public AndroidDepthCamera(MainActivity mMainActivity, CameraManager camMgr, WindowManager windowMgr, String packageName)
{
this.mMainActivity = mMainActivity;
this.camMgr = camMgr;
this.windowMgr = windowMgr;
this.packageName = packageName;

depthPreviewImgReader = ImageReader.newInstance(depthImgWidth, depthImgHeight, ImageFormat.DEPTH16, 2);
depthPreviewImgReader.setOnImageAvailableListener(depthImgReaderPreviewOnImageAvailable, null);
}


public void openCamera()
{
try
{
String depthCamId = camMgr.getCameraIdList()[4];
CameraCharacteristics depthCamChar = camMgr.getCameraCharacteristics(depthCamId);

StreamConfigurationMap colorStrConfigMap = depthCamChar.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

if(ContextCompat.checkSelfPermission(mMainActivity, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED)
camMgr.openCamera(depthCamId, depthCameraStateCallback, null);
}
catch (CameraAccessException e)
{
e.printStackTrace();
}
}


private CameraDevice.StateCallback depthCameraStateCallback = new CameraDevice.StateCallback()
{
@override
public void onOpened(@NonNull CameraDevice cameraDevice)
{
depthCameraDevice = cameraDevice;
startDepthCamPreview();
}

@override
public void onDisconnected(@NonNull CameraDevice cameraDevice)
{
Toast.makeText(mMainActivity, "Cannot use color camera!", Toast.LENGTH_LONG).show();
}

@override
public void onError(@NonNull CameraDevice cameraDevice, int i)
{
Toast.makeText(mMainActivity, "Color camera opening error!", Toast.LENGTH_LONG).show();
}
};


private void startDepthCamPreview()
{
try
{
depthPreviewBuilder = depthCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}
catch (CameraAccessException e)
{
e.printStackTrace();
}


depthPreviewBuilder.set(CaptureRequest.JPEG_ORIENTATION, 0);
Range<Integer> fpsRange = new Range<>(15, 30);
depthPreviewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
depthPreviewBuilder.addTarget(depthPreviewImgReader.getSurface());

try
{
depthCameraDevice.createCaptureSession(Arrays.asList(depthPreviewImgReader.getSurface()), depthCamPreviewCaptureSessionStateCallback, null);
}
catch (CameraAccessException e)
{
e.printStackTrace();
}
}


private CameraCaptureSession.StateCallback depthCamPreviewCaptureSessionStateCallback = new CameraCaptureSession.StateCallback()
{
@override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession)
{
closeAllCameraCaptureSession();

depthCamPreviewCaptureSession = cameraCaptureSession;
depthPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
HandlerThread backgroundThread = new HandlerThread("DepthCameraPreview");
backgroundThread.start();
Handler backgroundHandler = new Handler(backgroundThread.getLooper());

try
{
depthCamPreviewCaptureSession.setRepeatingRequest(depthPreviewBuilder.build(), null, backgroundHandler);
}
catch (CameraAccessException e)
{
e.printStackTrace();
}
}

@override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession)
{
Toast.makeText(mMainActivity, "Depth camera preview error!", Toast.LENGTH_LONG).show();
}
};


private ImageReader.OnImageAvailableListener depthImgReaderPreviewOnImageAvailable = new ImageReader.OnImageAvailableListener()
{
@override
public void onImageAvailable(ImageReader imageReader)
{
try
{
Image image = imageReader.acquireNextImage();
if (image != null && image.getFormat() == ImageFormat.DEPTH16)
{
int [] grayScaleDepthImg = getGrayScaleDepthImg(image);
Bitmap grayScaleDepthBitmap = convertToARGBBitmap(grayScaleDepthImg);
grayScaleDepthBitmapG = grayScaleDepthBitmap;
renderBitmapToTextureView(grayScaleDepthBitmap, depthPreviewTextureView);
}
image.close();
}
catch (Exception e)
{
Toast.makeText(mMainActivity, "Failed to acquireNextImage: " + e.getMessage(), Toast.LENGTH_LONG).show();
}
}
};


private int [] getGrayScaleDepthImg(Image image)
{
int [] grayScaleDepthImg = new int[depthImgWidth*depthImgHeight];
ShortBuffer shortDepthBuffer = image.getPlanes()[0].getBuffer().asShortBuffer();

for (int i = 0; i < depthImgWidth*depthImgHeight; i++)
{
int depth = shortDepthBuffer.get(i) & 0x1FFF;
//depth = Math.max(200, depth);
//depth = Math.min(1600, depth);
grayScaleDepthImg = depth;
}

Vector<Float> histograms = calculateHistogram(grayScaleDepthImg, Math.round((float)0xFFFF));

for (int i = 0; i < depthImgWidth*depthImgHeight; i++)
grayScaleDepthImg = histograms.get(grayScaleDepthImg).intValue();

return grayScaleDepthImg;
}


Vector<Float> calculateHistogram(int[] depthImg, int histogramSize)
{
float numPts = 0;
Vector<Float> histograms = new Vector<Float>();
for (int i = 0; i < histogramSize; i++)
histograms.add(0.0f);

for (int i = 0; i < depthImgWidth*depthImgHeight; i++)
{
if (depthImg != 0)
{
histograms.set(depthImg, histograms.get(depthImg) + 1);
numPts++;
}
}

for (int idx = 1; idx < histogramSize; idx++)
histograms.set(idx, histograms.get(idx) + histograms.get(idx - 1));

if (numPts > 0)
for (int idx = 1; idx < histogramSize; idx++)
histograms.set(idx, (255.0f*(1.0f - (histograms.get(idx) / numPts))));

return histograms;
}


private Bitmap convertToARGBBitmap(int[] grayScaleDepthImg)
{
Bitmap bitmap = Bitmap.createBitmap(depthImgWidth, depthImgHeight, Bitmap.Config.ARGB_4444);
for (int y = 0; y < depthImgHeight; y++)
{
for (int x = 0; x < depthImgWidth; x++)
{
int index = y * depthImgWidth + x;
bitmap.setPixel(x, y, Color.argb(255, grayScaleDepthImg[index], grayScaleDepthImg[index], 0));
}
}
return bitmap;
}


private void renderBitmapToTextureView(Bitmap bitmap, TextureView textureView)
{
Bitmap bitmapTemp;
int displayRotation = windowMgr.getDefaultDisplay().getRotation();
if (displayRotation == Surface.ROTATION_0 || displayRotation == Surface.ROTATION_180)
{
Mat matTemp = new Mat();
Utils.bitmapToMat(bitmap, matTemp);
Core.transpose(matTemp, matTemp);
Core.flip(matTemp, matTemp, 1);
bitmapTemp = Bitmap.createBitmap(matTemp.width(), matTemp.height(), Bitmap.Config.ARGB_4444);
Utils.matToBitmap(matTemp, bitmapTemp);
matTemp.release();
}
else if (displayRotation == Surface.ROTATION_270)
{
Mat matTemp = new Mat();
Utils.bitmapToMat(bitmap, matTemp);
Core.flip(matTemp, matTemp, 0);
Core.flip(matTemp, matTemp, 1);
bitmapTemp = Bitmap.createBitmap(matTemp.width(), matTemp.height(), Bitmap.Config.ARGB_4444);
Utils.matToBitmap(matTemp, bitmapTemp);
matTemp.release();
}
else
bitmapTemp = bitmap;


Canvas canvas = textureView.lockCanvas();
if(canvas != null)
{
canvas.drawBitmap(bitmapTemp, getBitmap2TextureViewTransform(textureView, bitmapTemp), null);
}
textureView.unlockCanvasAndPost(canvas);
bitmap.recycle();
bitmapTemp.recycle();
}


private Matrix getBitmap2TextureViewTransform(TextureView textureView, Bitmap bitmap)
{
Matrix matrix = new Matrix();
RectF textureViewRect = new RectF(0, 0, textureView.getWidth(), textureView.getHeight());
RectF bitmapRect = new RectF(0, 0, bitmap.getWidth(), bitmap.getHeight());
matrix.setRectToRect(bitmapRect, textureViewRect, Matrix.ScaleToFit.CENTER);
return matrix;
}








public void takePicture()
{
if(depthCameraDevice == null)
{
Toast.makeText(mMainActivity, "Depth Camera error!", Toast.LENGTH_LONG).show();
return;
}


coloredDepthImgFile = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).getPath(), "coloredDepthImg.jpg");


try
{
depthTakePicImgReader = ImageReader.newInstance(depthImgWidth, depthImgHeight, ImageFormat.DEPTH16, 2);

HandlerThread thread = new HandlerThread(" DepthCameraTakePicture");
thread.start();
depthTakePicBGHandler = new Handler(thread.getLooper());

depthTakePicImgReader.setOnImageAvailableListener(depthImgReaderTakePicOnImageAvailable, depthTakePicBGHandler);

List<Surface> outputSurfaces = new ArrayList<Surface>(1);
outputSurfaces.add(depthTakePicImgReader.getSurface());

depthCapBuilder = depthCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
depthCapBuilder.addTarget(depthTakePicImgReader.getSurface());
depthCapBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);

depthCameraDevice.createCaptureSession(outputSurfaces, depthCamTakePicCaptureSessionStateCallback, depthTakePicBGHandler);
}
catch (CameraAccessException e)
{
e.printStackTrace();
}
}


ImageReader.OnImageAvailableListener depthImgReaderTakePicOnImageAvailable = new ImageReader.OnImageAvailableListener() // imagereader 有影像時,儲存影像
{
@override
public void onImageAvailable(ImageReader imageReader)
{
Image image = null;

try
{
image = imageReader.acquireNextImage();

if (image != null && image.getFormat() == ImageFormat.DEPTH16)
{
int [] grayScaleDepthImg = getGrayScaleDepthImg_v2(image);
Bitmap grayScaleDepthBitmap = convertToARGBBitmap(grayScaleDepthImg);
renderBitmapToTextureView(grayScaleDepthBitmap, depthPreviewTextureView2);

//Bitmap depthBitmap = convertToARGBBitmap(image);
//renderBitmapToTextureView(depthBitmap, depthPreviewTextureView2);

// OutputStream output = null;
// try
// {
// output = new FileOutputStream(coloredDepthImgFile);
// output.write(stream.toByteArray());
// }
// finally
// {
// if(null != output)
// output.close();
// }
}
}
// catch (FileNotFoundException e)
// {
// e.printStackTrace();
// }
// catch (IOException e)
// {
// e.printStackTrace();
// }
finally
{
if(image != null)
image.close();
}
}
};


private int [] getGrayScaleDepthImg_v2(Image image)
{
int [] grayScaleDepthImg = new int[depthImgWidth*depthImgHeight];
ShortBuffer shortDepthBuffer = image.getPlanes()[0].getBuffer().asShortBuffer();

for (int i = 0; i < depthImgWidth*depthImgHeight; i++)
{
int depth = shortDepthBuffer.get(i) & 0x1FFF;
grayScaleDepthImg = depth;
}

Vector<Float> histograms = calculateHistogram(grayScaleDepthImg, Math.round((float)0xFFFF));

for (int i = 0; i < depthImgWidth*depthImgHeight; i++)
grayScaleDepthImg = histograms.get(grayScaleDepthImg).intValue();

return grayScaleDepthImg;
}


private Bitmap convertToARGBBitmap(Image image)
{
ByteBuffer colorBuffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[colorBuffer.capacity()];
colorBuffer.get(bytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
return bitmap;
}


private CameraCaptureSession.StateCallback depthCamTakePicCaptureSessionStateCallback = new CameraCaptureSession.StateCallback()
{
@override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession)
{
try
{
closeAllCameraCaptureSession();

depthCamTakePicCaptureSession = cameraCaptureSession;

depthCamTakePicCaptureSession.capture(depthCapBuilder.build(), depthCamTakePicCaptureSessionCapCallback, depthTakePicBGHandler);
}
catch (CameraAccessException e)
{
e.printStackTrace();
}
}

@override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession)
{
Toast.makeText(mMainActivity, "Picture taking initialization error!", Toast.LENGTH_LONG).show();
}
};


CameraCaptureSession.CaptureCallback depthCamTakePicCaptureSessionCapCallback = new CameraCaptureSession.CaptureCallback()
{
@override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result)
{
super.onCaptureCompleted(session, request, result);

Uri uri = Uri.parse("android.resource://" + packageName + "/" + R.raw.sound_camera_shutter);
MediaPlayer mp = MediaPlayer.create(mMainActivity, uri);
mp.start();

Toast.makeText(mMainActivity, "Finishing taking picture\nPicture: " + coloredDepthImgFile, Toast.LENGTH_SHORT).show();
startDepthCamPreview();
}

@override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {}
};


private void closeAllCameraCaptureSession()
{
if(depthCamPreviewCaptureSession != null)
{
depthCamPreviewCaptureSession.close();
depthCamPreviewCaptureSession = null;
}


if(depthCamTakePicCaptureSession != null)
{
depthCamTakePicCaptureSession.close();
depthCamTakePicCaptureSession = null;
}
}


public void releaseCameraDevice()
{
if(depthCameraDevice != null)
{
depthCameraDevice.close();
depthCameraDevice = null;
}
}
}
 
Back
Top Bottom