fix: 首次提交

This commit is contained in:
2024-12-09 11:25:23 +08:00
parent d0c01071e9
commit 2c2109a5f3
4741 changed files with 290641 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
# Default reviewers for this and subdirectories.
bonianchen@google.com
changbetty@google.com
wengsu@google.com
zoeychen@google.com
# Emergency approvers in case the above are not available

View File

@@ -0,0 +1,490 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settingslib.qrcode;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.Message;
import android.util.ArrayMap;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
import android.view.WindowManager;
import androidx.annotation.VisibleForTesting;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.LuminanceSource;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
public class QrCamera extends Handler {
private static final String TAG = "QrCamera";
private static final int MSG_AUTO_FOCUS = 1;
/**
* The max allowed difference between picture size ratio and preview size ratio.
* Uses to filter the picture sizes of similar preview size ratio, for example, if a preview
* size is 1920x1440, MAX_RATIO_DIFF 0.1 could allow picture size of 720x480 or 352x288 or
* 176x44 but not 1920x1080.
*/
private static final double MAX_RATIO_DIFF = 0.1;
private static final long AUTOFOCUS_INTERVAL_MS = 1500L;
private static Map<DecodeHintType, List<BarcodeFormat>> HINTS = new ArrayMap<>();
private static List<BarcodeFormat> FORMATS = new ArrayList<>();
static {
FORMATS.add(BarcodeFormat.QR_CODE);
HINTS.put(DecodeHintType.POSSIBLE_FORMATS, FORMATS);
}
@VisibleForTesting
Camera mCamera;
private Size mPreviewSize;
private WeakReference<Context> mContext;
private ScannerCallback mScannerCallback;
private MultiFormatReader mReader;
private DecodingTask mDecodeTask;
private int mCameraOrientation;
@VisibleForTesting
Camera.Parameters mParameters;
public QrCamera(Context context, ScannerCallback callback) {
mContext = new WeakReference<Context>(context);
mScannerCallback = callback;
mReader = new MultiFormatReader();
mReader.setHints(HINTS);
}
/**
* The function start camera preview and capture pictures to decode QR code continuously in a
* background task.
*
* @param surface The surface to be used for live preview.
*/
public void start(SurfaceTexture surface) {
if (mDecodeTask == null) {
mDecodeTask = new DecodingTask(surface);
// Execute in the separate thread pool to prevent block other AsyncTask.
mDecodeTask.executeOnExecutor(Executors.newSingleThreadExecutor());
}
}
/**
* The function stop camera preview and background decode task. Caller call this function when
* the surface is being destroyed.
*/
public void stop() {
removeMessages(MSG_AUTO_FOCUS);
if (mDecodeTask != null) {
mDecodeTask.cancel(true);
mDecodeTask = null;
}
if (mCamera != null) {
try {
mCamera.stopPreview();
releaseCamera();
} catch (RuntimeException e) {
Log.e(TAG, "Stop previewing camera failed:" + e);
mCamera = null;
}
}
}
/** The scanner which includes this QrCodeCamera class should implement this */
public interface ScannerCallback {
/**
* The function used to handle the decoding result of the QR code.
*
* @param result the result QR code after decoding.
*/
void handleSuccessfulResult(String result);
/** Request the QR code scanner to handle the failure happened. */
void handleCameraFailure();
/**
* The function used to get the background View size.
*
* @return Includes the background view size.
*/
Size getViewSize();
/**
* The function used to get the frame position inside the view
*
* @param previewSize Is the preview size set by camera
* @param cameraOrientation Is the orientation of current Camera
* @return The rectangle would like to crop from the camera preview shot.
*/
Rect getFramePosition(Size previewSize, int cameraOrientation);
/**
* Sets the transform to associate with preview area.
*
* @param transform The transform to apply to the content of preview
*/
void setTransform(Matrix transform);
/**
* Verify QR code is valid or not. The camera will stop scanning if this callback returns
* true.
*
* @param qrCode The result QR code after decoding.
* @return Returns true if qrCode hold valid information.
*/
boolean isValid(String qrCode);
}
@VisibleForTesting
void setCameraParameter() {
mParameters = mCamera.getParameters();
mPreviewSize = getBestPreviewSize(mParameters);
mParameters.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
Size pictureSize = getBestPictureSize(mParameters);
mParameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
final List<String> supportedFlashModes = mParameters.getSupportedFlashModes();
if (supportedFlashModes != null &&
supportedFlashModes.contains(Camera.Parameters.FLASH_MODE_OFF)) {
mParameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
}
final List<String> supportedFocusModes = mParameters.getSupportedFocusModes();
if (supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} else if (supportedFocusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
mCamera.setParameters(mParameters);
}
private boolean startPreview() {
if (mContext.get() == null) {
return false;
}
final WindowManager winManager =
(WindowManager) mContext.get().getSystemService(Context.WINDOW_SERVICE);
final int rotation = winManager.getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
final int rotateDegrees = (mCameraOrientation - degrees + 360) % 360;
mCamera.setDisplayOrientation(rotateDegrees);
mCamera.startPreview();
if (Camera.Parameters.FOCUS_MODE_AUTO.equals(mParameters.getFocusMode())) {
mCamera.autoFocus(/* Camera.AutoFocusCallback */ null);
sendMessageDelayed(obtainMessage(MSG_AUTO_FOCUS), AUTOFOCUS_INTERVAL_MS);
}
return true;
}
private class DecodingTask extends AsyncTask<Void, Void, String> {
private QrYuvLuminanceSource mImage;
private SurfaceTexture mSurface;
private DecodingTask(SurfaceTexture surface) {
mSurface = surface;
}
@Override
protected String doInBackground(Void... tmp) {
if (!initCamera(mSurface)) {
return null;
}
final Semaphore imageGot = new Semaphore(0);
while (true) {
// This loop will try to capture preview image continuously until a valid QR Code
// decoded. The caller can also call {@link #stop()} to interrupts scanning loop.
mCamera.setOneShotPreviewCallback(
(imageData, camera) -> {
mImage = getFrameImage(imageData);
imageGot.release();
});
try {
// Semaphore.acquire() blocking until permit is available, or the thread is
// interrupted.
imageGot.acquire();
Result qrCode = decodeQrCode(mImage);
if (qrCode == null) {
// Check color inversion QR code
qrCode = decodeQrCode(mImage.invert());
}
if (qrCode != null) {
if (mScannerCallback.isValid(qrCode.getText())) {
return qrCode.getText();
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return null;
}
}
}
private Result decodeQrCode(LuminanceSource source) {
try {
return mReader.decodeWithState(new BinaryBitmap(new HybridBinarizer(source)));
} catch (ReaderException e) {
// No logging since every time the reader cannot decode the
// image, this ReaderException will be thrown.
} finally {
mReader.reset();
}
return null;
}
@Override
protected void onPostExecute(String qrCode) {
if (qrCode != null) {
mScannerCallback.handleSuccessfulResult(qrCode);
}
}
private boolean initCamera(SurfaceTexture surface) {
final int numberOfCameras = Camera.getNumberOfCameras();
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
try {
for (int i = 0; i < numberOfCameras; ++i) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
releaseCamera();
mCamera = Camera.open(i);
mCameraOrientation = cameraInfo.orientation;
break;
}
}
if (mCamera == null && numberOfCameras > 0) {
Log.i(TAG, "Can't find back camera. Opening a different camera");
Camera.getCameraInfo(0, cameraInfo);
releaseCamera();
mCamera = Camera.open(0);
mCameraOrientation = cameraInfo.orientation;
}
} catch (RuntimeException e) {
Log.e(TAG, "Fail to open camera: " + e);
mCamera = null;
mScannerCallback.handleCameraFailure();
return false;
}
try {
if (mCamera == null) {
throw new IOException("Cannot find available camera");
}
mCamera.setPreviewTexture(surface);
setCameraParameter();
setTransformationMatrix();
if (!startPreview()) {
throw new IOException("Lost contex");
}
} catch (IOException ioe) {
Log.e(TAG, "Fail to startPreview camera: " + ioe);
mCamera = null;
mScannerCallback.handleCameraFailure();
return false;
}
return true;
}
}
private void releaseCamera() {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
/** Set transform matrix to crop and center the preview picture */
private void setTransformationMatrix() {
final boolean isPortrait = mContext.get().getResources().getConfiguration().orientation
== Configuration.ORIENTATION_PORTRAIT;
final int previewWidth = isPortrait ? mPreviewSize.getWidth() : mPreviewSize.getHeight();
final int previewHeight = isPortrait ? mPreviewSize.getHeight() : mPreviewSize.getWidth();
final float ratioPreview = (float) getRatio(previewWidth, previewHeight);
// Calculate transformation matrix.
float scaleX = 1.0f;
float scaleY = 1.0f;
if (previewWidth > previewHeight) {
scaleY = scaleX / ratioPreview;
} else {
scaleX = scaleY / ratioPreview;
}
// Set the transform matrix.
final Matrix matrix = new Matrix();
matrix.setScale(scaleX, scaleY);
mScannerCallback.setTransform(matrix);
}
private QrYuvLuminanceSource getFrameImage(byte[] imageData) {
final Rect frame = mScannerCallback.getFramePosition(mPreviewSize, mCameraOrientation);
final QrYuvLuminanceSource image = new QrYuvLuminanceSource(imageData,
mPreviewSize.getWidth(), mPreviewSize.getHeight());
return (QrYuvLuminanceSource)
image.crop(frame.left, frame.top, frame.width(), frame.height());
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_AUTO_FOCUS:
// Calling autoFocus(null) will only trigger the camera to focus once. In order
// to make the camera continuously auto focus during scanning, need to periodically
// trigger it.
mCamera.autoFocus(/* Camera.AutoFocusCallback */ null);
sendMessageDelayed(obtainMessage(MSG_AUTO_FOCUS), AUTOFOCUS_INTERVAL_MS);
break;
default:
Log.d(TAG, "Unexpected Message: " + msg.what);
}
}
/**
* Get best preview size from the list of camera supported preview sizes. Compares the
* preview size and aspect ratio to choose the best one.
*/
private Size getBestPreviewSize(Camera.Parameters parameters) {
final double minRatioDiffPercent = 0.1;
final Size windowSize = mScannerCallback.getViewSize();
final double winRatio = getRatio(windowSize.getWidth(), windowSize.getHeight());
double bestChoiceRatio = 0;
Size bestChoice = new Size(0, 0);
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
double ratio = getRatio(size.width, size.height);
if (size.height * size.width > bestChoice.getWidth() * bestChoice.getHeight()
&& (Math.abs(bestChoiceRatio - winRatio) / winRatio > minRatioDiffPercent
|| Math.abs(ratio - winRatio) / winRatio <= minRatioDiffPercent)) {
bestChoice = new Size(size.width, size.height);
bestChoiceRatio = getRatio(size.width, size.height);
}
}
return bestChoice;
}
/**
* Get best picture size from the list of camera supported picture sizes. Compares the
* picture size and aspect ratio to choose the best one.
*/
private Size getBestPictureSize(Camera.Parameters parameters) {
final Camera.Size previewSize = parameters.getPreviewSize();
final double previewRatio = getRatio(previewSize.width, previewSize.height);
List<Size> bestChoices = new ArrayList<>();
final List<Size> similarChoices = new ArrayList<>();
// Filter by ratio
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
double ratio = getRatio(size.width, size.height);
if (ratio == previewRatio) {
bestChoices.add(new Size(size.width, size.height));
} else if (Math.abs(ratio - previewRatio) < MAX_RATIO_DIFF) {
similarChoices.add(new Size(size.width, size.height));
}
}
if (bestChoices.size() == 0 && similarChoices.size() == 0) {
Log.d(TAG, "No proper picture size, return default picture size");
Camera.Size defaultPictureSize = parameters.getPictureSize();
return new Size(defaultPictureSize.width, defaultPictureSize.height);
}
if (bestChoices.size() == 0) {
bestChoices = similarChoices;
}
// Get the best by area
int bestAreaDifference = Integer.MAX_VALUE;
Size bestChoice = null;
final int previewArea = previewSize.width * previewSize.height;
for (Size size : bestChoices) {
int areaDifference = Math.abs(size.getWidth() * size.getHeight() - previewArea);
if (areaDifference < bestAreaDifference) {
bestAreaDifference = areaDifference;
bestChoice = size;
}
}
return bestChoice;
}
private double getRatio(double x, double y) {
return (x < y) ? x / y : y / x;
}
@VisibleForTesting
protected void decodeImage(BinaryBitmap image) {
Result qrCode = null;
try {
qrCode = mReader.decodeWithState(image);
} catch (ReaderException e) {
} finally {
mReader.reset();
}
if (qrCode != null) {
mScannerCallback.handleSuccessfulResult(qrCode.getText());
}
}
/**
* After {@link #start(SurfaceTexture)}, DecodingTask runs continuously to capture images and
* decode QR code. DecodingTask become null After {@link #stop()}.
*
* Uses this method in test case to prevent power consumption problem.
*/
public boolean isDecodeTaskAlive() {
return mDecodeTask != null;
}
}

View File

@@ -0,0 +1,86 @@
/*
* Copyright (C) 2023 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settingslib.qrcode
import android.annotation.ColorInt
import android.graphics.Bitmap
import android.graphics.Color
import com.google.zxing.BarcodeFormat
import com.google.zxing.EncodeHintType
import com.google.zxing.MultiFormatWriter
import com.google.zxing.WriterException
import java.nio.charset.StandardCharsets
import java.util.EnumMap
object QrCodeGenerator {
/**
* Generates a barcode image with [contents].
*
* @param contents The contents to encode in the barcode
* @param size The preferred image size in pixels
* @param invert Whether to invert the black/white pixels (e.g. for dark mode)
* @return Barcode bitmap
*/
@JvmStatic
@Throws(WriterException::class, java.lang.IllegalArgumentException::class)
fun encodeQrCode(contents: String, size: Int, invert: Boolean): Bitmap =
encodeQrCode(contents, size, DEFAULT_MARGIN, invert)
private const val DEFAULT_MARGIN = -1
/**
* Generates a barcode image with [contents].
*
* @param contents The contents to encode in the barcode
* @param size The preferred image size in pixels
* @param margin The margin around the actual barcode
* @param invert Whether to invert the black/white pixels (e.g. for dark mode)
* @return Barcode bitmap
*/
@JvmOverloads
@JvmStatic
@Throws(WriterException::class, IllegalArgumentException::class)
fun encodeQrCode(
contents: String,
size: Int,
margin: Int = DEFAULT_MARGIN,
invert: Boolean = false,
): Bitmap {
val hints = EnumMap<EncodeHintType, Any>(EncodeHintType::class.java)
if (!isIso88591(contents)) {
hints[EncodeHintType.CHARACTER_SET] = StandardCharsets.UTF_8.name()
}
if (margin != DEFAULT_MARGIN) {
hints[EncodeHintType.MARGIN] = margin
}
val qrBits = MultiFormatWriter().encode(contents, BarcodeFormat.QR_CODE, size, size, hints)
@ColorInt val setColor = if (invert) Color.WHITE else Color.BLACK
@ColorInt val unsetColor = if (invert) Color.BLACK else Color.WHITE
@ColorInt val pixels = IntArray(size * size)
for (x in 0 until size) {
for (y in 0 until size) {
pixels[x * size + y] = if (qrBits[x, y]) setColor else unsetColor
}
}
return Bitmap.createBitmap(size, size, Bitmap.Config.RGB_565).apply {
setPixels(pixels, 0, size, 0, 0, size, size)
}
}
private fun isIso88591(contents: String): Boolean =
StandardCharsets.ISO_8859_1.newEncoder().canEncode(contents)
}

View File

@@ -0,0 +1,142 @@
/**
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settingslib.qrcode;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import com.android.settingslib.R;
public class QrDecorateView extends View {
private static final float CORNER_STROKE_WIDTH = 4f; // 4dp
private static final float CORNER_LINE_LENGTH = 264f; // 264dp
private static final float CORNER_RADIUS = 16f; // 16dp
private final int mCornerColor;
private final int mFocusedCornerColor;
private final int mBackgroundColor;
private final Paint mStrokePaint;
private final Paint mTransparentPaint;
private final Paint mBackgroundPaint;
private final float mRadius;
private final float mInnerRadius;
private Bitmap mMaskBitmap;
private Canvas mMaskCanvas;
private RectF mOuterFrame;
private RectF mInnerFrame;
private boolean mFocused;
public QrDecorateView(Context context) {
this(context, null);
}
public QrDecorateView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public QrDecorateView(Context context, AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}
public QrDecorateView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
mFocused = false;
mRadius = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, CORNER_RADIUS,
getResources().getDisplayMetrics());
// Inner radius needs to minus stroke width for keeping the width of border consistent.
mInnerRadius = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
CORNER_RADIUS - CORNER_STROKE_WIDTH, getResources().getDisplayMetrics());
mCornerColor = context.getResources().getColor(R.color.qr_corner_line_color);
mFocusedCornerColor = context.getResources().getColor(R.color.qr_focused_corner_line_color);
mBackgroundColor = context.getResources().getColor(R.color.qr_background_color);
mStrokePaint = new Paint();
mStrokePaint.setAntiAlias(true);
mTransparentPaint = new Paint();
mTransparentPaint.setAntiAlias(true);
mTransparentPaint.setColor(getResources().getColor(android.R.color.transparent));
mTransparentPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
mBackgroundPaint = new Paint();
mBackgroundPaint.setColor(mBackgroundColor);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
if (mMaskBitmap == null) {
mMaskBitmap = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888);
mMaskCanvas = new Canvas(mMaskBitmap);
}
calculateFramePos();
}
@Override
protected void onDraw(Canvas canvas) {
if (mMaskCanvas != null && mMaskBitmap != null) {
// Set frame line color.
mStrokePaint.setColor(mFocused ? mFocusedCornerColor : mCornerColor);
// Draw background color.
mMaskCanvas.drawColor(mBackgroundColor);
// Draw outer corner.
mMaskCanvas.drawRoundRect(mOuterFrame, mRadius, mRadius, mStrokePaint);
// Draw inner transparent corner.
mMaskCanvas.drawRoundRect(mInnerFrame, mInnerRadius, mInnerRadius, mTransparentPaint);
canvas.drawBitmap(mMaskBitmap, 0, 0, mBackgroundPaint);
}
super.onDraw(canvas);
}
private void calculateFramePos() {
final int centralX = getWidth() / 2;
final int centralY = getHeight() / 2;
final float cornerLineLength = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
CORNER_LINE_LENGTH, getResources().getDisplayMetrics()) / 2;
final float strokeWidth = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
CORNER_STROKE_WIDTH, getResources().getDisplayMetrics());
mOuterFrame = new RectF(centralX - cornerLineLength, centralY - cornerLineLength,
centralX + cornerLineLength, centralY + cornerLineLength);
mInnerFrame = new RectF(mOuterFrame.left + strokeWidth, mOuterFrame.top + strokeWidth,
mOuterFrame.right - strokeWidth, mOuterFrame.bottom - strokeWidth);
}
// Draws green lines if focused. Otherwise, draws white lines.
public void setFocused(boolean focused) {
mFocused = focused;
invalidate();
}
}

View File

@@ -0,0 +1,72 @@
/**
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settingslib.qrcode;
import com.google.zxing.LuminanceSource;
public class QrYuvLuminanceSource extends LuminanceSource {
private byte[] mYuvData;
private int mWidth;
private int mHeight;
public QrYuvLuminanceSource(byte[] yuvData, int width, int height) {
super(width, height);
mWidth = width;
mHeight = height;
mYuvData = yuvData;
}
@Override
public boolean isCropSupported() {
return true;
}
@Override
public LuminanceSource crop(int left, int top, int crop_width, int crop_height) {
final byte[] newImage = new byte[crop_width * crop_height];
int inputOffset = top * mWidth + left;
if (left + crop_width > mWidth || top + crop_height > mHeight) {
throw new IllegalArgumentException("cropped rectangle does not fit within image data.");
}
for (int y = 0; y < crop_height; y++) {
System.arraycopy(mYuvData, inputOffset, newImage, y * crop_width, crop_width);
inputOffset += mWidth;
}
return new QrYuvLuminanceSource(newImage, crop_width, crop_height);
}
@Override
public byte[] getRow(int y, byte[] row) {
if (y < 0 || y >= mHeight) {
throw new IllegalArgumentException("Requested row is outside the image: " + y);
}
if (row == null || row.length < mWidth) {
row = new byte[mWidth];
}
System.arraycopy(mYuvData, y * mWidth, row, 0, mWidth);
return row;
}
@Override
public byte[] getMatrix() {
return mYuvData;
}
}