package lu.circl.mispbump.cam; import android.Manifest; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.Bitmap; import android.graphics.ImageFormat; import android.graphics.Matrix; import android.graphics.Point; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.renderscript.Allocation; import android.renderscript.Element; import android.renderscript.RenderScript; import android.renderscript.ScriptIntrinsicYuvToRGB; import android.renderscript.Type; import android.util.Log; import android.util.Size; import android.util.SparseArray; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.core.app.ActivityCompat; import androidx.core.content.ContextCompat; import androidx.fragment.app.DialogFragment; import androidx.fragment.app.Fragment; import com.google.android.gms.vision.Frame; import com.google.android.gms.vision.barcode.Barcode; import com.google.android.gms.vision.barcode.BarcodeDetector; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import lu.circl.mispbump.R; public class CameraFragment extends Fragment implements ActivityCompat.OnRequestPermissionsResultCallback { private class ImageProcessingThread extends Thread { private boolean isRunning = true; private int lastAccessedIndex = 0; private Bitmap[] processQueue = new Bitmap[10]; ImageProcessingThread() { Log.i(TAG, "Image worker thread created"); } void addToQueue(Bitmap bitmap) { processQueue[lastAccessedIndex] = bitmap; // circular array access lastAccessedIndex = (lastAccessedIndex + 1) % processQueue.length; } @Override public void run() { while (isRunning) { // no need to process further images if (!readQrEnabled) { continue; } for (int i = 0; i < processQueue.length; i++) { // queue position already processed or not in use if (processQueue[i] == null) { continue; } // analyze image for qr codes SparseArray barcodes = barcodeDetector.detect( new Frame.Builder().setBitmap(processQueue[i]).build() ); // does the frame contain any qr code? if (barcodes.size() > 0) { if (readQrEnabled) { qrResultCallback.qrScanResult(barcodes.valueAt(0).rawValue); } } // set buffer entry as processed processQueue[i] = null; } // sleep between analysis of buffer (-25% cpu usage) try { sleep(250); } catch (InterruptedException e) { e.printStackTrace(); } } } } private static final String TAG = "CAMERA"; private View hideCamView; private QrScanCallback qrResultCallback; @Override public void onAttach(Context context) { super.onAttach(context); } private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); private static final int REQUEST_CAMERA_PERMISSION = 1; private static final String FRAGMENT_DIALOG = "dialog"; static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); } /** * Max preview width that is guaranteed by Camera2 API */ private static final int MAX_PREVIEW_WIDTH = 1920; /** * Max preview height that is guaranteed by Camera2 API */ private static final int MAX_PREVIEW_HEIGHT = 1080; /** * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a * {@link TextureView}. */ private final TextureView.SurfaceTextureListener mSurfaceTextureListener = new TextureView.SurfaceTextureListener() { boolean processing = false; @Override public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) { Log.i(TAG, "Width: " + width + "; height: " + height); openCamera(width, height); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) { configureTransform(width, height); } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) { return true; } @Override public void onSurfaceTextureUpdated(SurfaceTexture texture) { } }; private ImageProcessingThread imageProcessingThread; /** * ID of the current {@link CameraDevice}. */ private String mCameraId; /** * An {@link AutoFitTextureView} for camera preview. */ private AutoFitTextureView autoFitTextureView; /** * A {@link CameraCaptureSession } for camera preview. */ private CameraCaptureSession previewCaptureSession; /** * A reference to the opened {@link CameraDevice}. */ private CameraDevice mCameraDevice; /** * The {@link android.util.Size} of camera preview. */ private Size mPreviewSize; /** * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */ private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice cameraDevice) { // This method is called when the camera is opened. We start camera preview here. mCameraOpenCloseLock.release(); mCameraDevice = cameraDevice; createCameraPreviewSession(); } @Override public void onDisconnected(@NonNull CameraDevice cameraDevice) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; } @Override public void onError(@NonNull CameraDevice cameraDevice, int error) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; Activity activity = getActivity(); if (null != activity) { activity.finish(); } } }; /** * An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread mBackgroundThread; /** * A {@link Handler} for running tasks in the background. */ private Handler mBackgroundHandler; /** * An {@link ImageReader} that handles still bitmap capture. */ private ImageReader stillImageReader; /** * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a * still bitmap is ready to be saved. */ private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = reader.acquireNextImage(); Bitmap bitmap = YUV2Bitmap(image); imageProcessingThread.addToQueue(bitmap); image.close(); } }; /** * {@link CaptureRequest.Builder} for the camera preview */ private CaptureRequest.Builder mPreviewRequestBuilder; /** * {@link CaptureRequest} generated by {@link #mPreviewRequestBuilder} */ private CaptureRequest mPreviewRequest; /** * A {@link Semaphore} to prevent the app from exiting before closing the camera. */ private Semaphore mCameraOpenCloseLock = new Semaphore(1); /** * Shows a {@link Toast} on the UI thread. * * @param text The message to show */ private void showToast(final String text) { final Activity activity = getActivity(); if (activity != null) { activity.runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); } }); } } /** * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that * is at least as large as the respective texture view size, and that is at most as large as the * respective max size, and whose aspect ratio matches with the specified value. If such size * doesn't exist, choose the largest one that is at most as large as the respective max size, * and whose aspect ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output * class * @param textureViewWidth The width of the texture view relative to sensor coordinate * @param textureViewHeight The height of the texture view relative to sensor coordinate * @param maxWidth The maximum width that can be chosen * @param maxHeight The maximum height that can be chosen * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List bigEnough = new ArrayList<>(); // Collect the supported resolutions that are smaller than the preview Surface List notBigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) { if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) { bigEnough.add(option); } else { notBigEnough.add(option); } } } // Pick the smallest of those big enough. If there is no one big enough, pick the // largest of those not big enough. if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else if (notBigEnough.size() > 0) { return Collections.max(notBigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_camera, container, false); hideCamView = v.findViewById(R.id.hideCam); hideCamView.setVisibility(View.GONE); initRenderScript(); setUpBarcodeDetector(); return v; } @Override public void onViewCreated(final View view, Bundle savedInstanceState) { autoFitTextureView = view.findViewById(R.id.texture); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } @Override public void onResume() { super.onResume(); enablePreview(); // startBackgroundThread(); // // imageProcessingThread = new ImageProcessingThread(); // imageProcessingThread.start(); // // // When the screen is turned off and turned back on, the SurfaceTexture is already // // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open // // a camera and start preview from here (otherwise, we wait until the surface is ready in // // the SurfaceTextureListener). // if (autoFitTextureView.isAvailable()) { // openCamera(autoFitTextureView.getWidth(), autoFitTextureView.getHeight()); // } else { // autoFitTextureView.setSurfaceTextureListener(mSurfaceTextureListener); // } } @Override public void onPause() { closeCamera(); stopBackgroundThread(); if (imageProcessingThread.isAlive()) { imageProcessingThread.isRunning = false; } super.onPause(); } private void requestCameraPermission() { if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA)) { new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG); } else { requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION); } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { if (requestCode == REQUEST_CAMERA_PERMISSION) { if (grantResults.length != 1 || grantResults[0] != PackageManager.PERMISSION_GRANTED) { ErrorDialog.newInstance("REQUEST PERMISSION").show(getChildFragmentManager(), FRAGMENT_DIALOG); } } else { super.onRequestPermissionsResult(requestCode, permissions, grantResults); } } /** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */ @SuppressWarnings("SuspiciousNameCombination") private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); assert activity != null; CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still bitmap captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea()); stillImageReader = ImageReader.newInstance(largest.getWidth() / 8, largest.getHeight() / 8, ImageFormat.YUV_420_888, 2); stillImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions int mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } Size[] sizes = map.getOutputSizes(SurfaceTexture.class); for (Size size : sizes) { Log.i(TAG, size.toString()); } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { autoFitTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { autoFitTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance("CAMERA ERROR").show(getChildFragmentManager(), FRAGMENT_DIALOG); } } /** * Opens the camera specified by {@link CameraFragment#mCameraId}. */ private void openCamera(int width, int height) { Activity activity = getActivity(); if (ContextCompat.checkSelfPermission(activity, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { requestCameraPermission(); return; } setUpCameraOutputs(width, height); configureTransform(width, height); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening.", e); } } /** * Closes the current {@link CameraDevice}. */ private void closeCamera() { try { mCameraOpenCloseLock.acquire(); if (null != previewCaptureSession) { previewCaptureSession.close(); previewCaptureSession = null; } if (null != mCameraDevice) { mCameraDevice.close(); mCameraDevice = null; } if (null != stillImageReader) { stillImageReader.close(); stillImageReader = null; } } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera closing.", e); } finally { mCameraOpenCloseLock.release(); } } /** * Starts a background thread and its {@link Handler}. */ private void startBackgroundThread() { mBackgroundThread = new HandlerThread("CameraBackground"); mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); } /** * Stops the background thread and its {@link Handler}. */ private void stopBackgroundThread() { if (mBackgroundThread == null) { return; } try { mBackgroundThread.quitSafely(); mBackgroundThread.join(); mBackgroundThread = null; mBackgroundHandler = null; } catch (InterruptedException e) { e.printStackTrace(); } } /** * Creates a new {@link CameraCaptureSession} for camera preview. */ private void createCameraPreviewSession() { try { // from AutoFitTextureView SurfaceTexture texture = autoFitTextureView.getSurfaceTexture(); // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); Surface mImageSurface = stillImageReader.getSurface(); // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); // TEMPLATE_ZERO_SHUTTER_LAG mPreviewRequestBuilder.addTarget(surface); mPreviewRequestBuilder.addTarget(mImageSurface); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.createCaptureSession(Arrays.asList(surface, stillImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } // When the session is ready, we start displaying the preview. previewCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // CONTROL_AF_MODE_CONTINUOUS_PICTURE // Finally, we start displaying the camera preview. mPreviewRequest = mPreviewRequestBuilder.build(); previewCaptureSession.setRepeatingRequest(mPreviewRequest, null, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { showToast("Failed"); } }, null ); } catch (CameraAccessException e) { e.printStackTrace(); } } /** * Configures the necessary {@link android.graphics.Matrix} transformation to `autoFitTextureView`. * This method should be called after the camera preview size is determined in * setUpCameraOutputs and also the size of `autoFitTextureView` is fixed. * * @param viewWidth The width of `autoFitTextureView` * @param viewHeight The height of `autoFitTextureView` */ private void configureTransform(int viewWidth, int viewHeight) { Activity activity = getActivity(); if (null == autoFitTextureView || null == mPreviewSize || null == activity) { return; } int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); Matrix matrix = new Matrix(); RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); float scale = Math.max( (float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY); } else if (Surface.ROTATION_180 == rotation) { matrix.postRotate(180, centerX, centerY); } autoFitTextureView.setTransform(matrix); } /** * Compares two {@code Size}s based on their areas. */ static class CompareSizesByArea implements Comparator { @Override public int compare(Size lhs, Size rhs) { // We cast here to ensure the multiplications won't overflow return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } /** * Shows an error message dialog. */ public static class ErrorDialog extends DialogFragment { private static final String ARG_MESSAGE = "message"; public static ErrorDialog newInstance(String message) { ErrorDialog dialog = new ErrorDialog(); Bundle args = new Bundle(); args.putString(ARG_MESSAGE, message); dialog.setArguments(args); return dialog; } @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { final Activity activity = getActivity(); return new AlertDialog.Builder(activity) .setMessage(getArguments().getString(ARG_MESSAGE)) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialogInterface, int i) { activity.finish(); } }) .create(); } } /** * Shows OK/Cancel confirmation dialog about camera permission. */ public static class ConfirmationDialog extends DialogFragment { @NonNull @Override public Dialog onCreateDialog(Bundle savedInstanceState) { final Fragment parent = getParentFragment(); return new AlertDialog.Builder(getActivity()) .setMessage("REQUEST PERMISSION") .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { parent.requestPermissions(new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERMISSION); } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Activity activity = parent.getActivity(); if (activity != null) { activity.finish(); } } }) .create(); } } public interface QrScanCallback { void qrScanResult(String qrData); } public interface CameraReadyCallback { void ready(); } private CameraReadyCallback cameraReadyCallback; private boolean readQrEnabled = true; private BarcodeDetector barcodeDetector; private RenderScript renderScript; private void initRenderScript() { renderScript = RenderScript.create(getActivity()); } private Bitmap YUV2Bitmap(Image image) { if (image == null) { return null; } ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(renderScript, Element.U8_4(renderScript)); int W = image.getWidth(); int H = image.getHeight(); Image.Plane Y = image.getPlanes()[0]; Image.Plane U = image.getPlanes()[1]; Image.Plane V = image.getPlanes()[2]; int Yb = Y.getBuffer().remaining(); int Ub = U.getBuffer().remaining(); int Vb = V.getBuffer().remaining(); byte[] data = new byte[Yb + Ub + Vb]; Y.getBuffer().get(data, 0, Yb); V.getBuffer().get(data, Yb, Vb); U.getBuffer().get(data, Yb + Vb, Ub); Type.Builder yuvType = new Type.Builder(renderScript, Element.U8(renderScript)).setX(data.length); Allocation in = Allocation.createTyped(renderScript, yuvType.create(), Allocation.USAGE_SCRIPT); Type.Builder rgbaType = new Type.Builder(renderScript, Element.RGBA_8888(renderScript)).setX(W).setY(H); Allocation out = Allocation.createTyped(renderScript, rgbaType.create(), Allocation.USAGE_SCRIPT); final Bitmap bmpout = Bitmap.createBitmap(W, H, Bitmap.Config.ARGB_8888); in.copyFromUnchecked(data); yuvToRgbIntrinsic.setInput(in); yuvToRgbIntrinsic.forEach(out); out.copyTo(bmpout); image.close(); return bmpout; } public void setReadQrEnabled(boolean enabled) { readQrEnabled = enabled; } public void disablePreview() { hideCamView.setAlpha(0f); hideCamView.setVisibility(View.VISIBLE); hideCamView.animate() .alpha(1f) .setDuration(250) .setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { closeCamera(); stopBackgroundThread(); if (imageProcessingThread.isAlive()) { imageProcessingThread.isRunning = false; } } }); } public void enablePreview() { startBackgroundThread(); imageProcessingThread = new ImageProcessingThread(); imageProcessingThread.start(); if (autoFitTextureView.isAvailable()) { openCamera(autoFitTextureView.getWidth(), autoFitTextureView.getHeight()); } else { autoFitTextureView.setSurfaceTextureListener(mSurfaceTextureListener); } hideCamView.setAlpha(1f); hideCamView.setVisibility(View.VISIBLE); hideCamView.animate() .alpha(0f) .setStartDelay(100) .setDuration(1000) .setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { hideCamView.setVisibility(View.GONE); if (cameraReadyCallback != null) { cameraReadyCallback.ready(); } } }); } private void setUpBarcodeDetector() { barcodeDetector = new BarcodeDetector.Builder(getActivity()) .setBarcodeFormats(Barcode.QR_CODE) .build(); if (!barcodeDetector.isOperational()) { Toast.makeText(getActivity(), "Could not setup QR-Code scanner!", Toast.LENGTH_SHORT).show(); } } public void setOnQrAvailableListener(QrScanCallback callback) { qrResultCallback = callback; } public void setCameraReadyCallback(CameraReadyCallback callback) { this.cameraReadyCallback = callback; } }