X-Git-Url: http://git.rot13.org/?a=blobdiff_plain;f=android%2Fsrc%2Fcom%2Fgoogle%2Fzxing%2Fclient%2Fandroid%2FYUVMonochromeBitmapSource.java;h=8aad467387ca3542b27eebae87058dbaea119afa;hb=7eec24ee881d16e10dac4228adb5aa199eec0b29;hp=ff03fcf2f575ee20fc3beba6a8544d8e846e6bf6;hpb=3952301b023d6d26f1ed3cc3d466dd5ef3ec9086;p=zxing.git diff --git a/android/src/com/google/zxing/client/android/YUVMonochromeBitmapSource.java b/android/src/com/google/zxing/client/android/YUVMonochromeBitmapSource.java index ff03fcf2..8aad4673 100755 --- a/android/src/com/google/zxing/client/android/YUVMonochromeBitmapSource.java +++ b/android/src/com/google/zxing/client/android/YUVMonochromeBitmapSource.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2008 Google Inc. + * Copyright (C) 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,110 +17,140 @@ package com.google.zxing.client.android; import android.graphics.Bitmap; -import com.google.zxing.BlackPointEstimationMethod; -import com.google.zxing.MonochromeBitmapSource; -import com.google.zxing.ReaderException; -import com.google.zxing.common.BitArray; -import com.google.zxing.common.BlackPointEstimator; +import android.graphics.Rect; +import com.google.zxing.common.BaseMonochromeBitmapSource; /** - * This object implements MonochromeBitmapSource around an Android Bitmap. Rather than capturing an - * RGB image and calculating the grey value at each pixel, we ask the camera driver for YUV data and - * strip out the luminance channel directly. This should be faster but provides fewer bits, i.e. - * fewer grey levels. + * This object implements MonochromeBitmapSource around an array of YUV data, giving you the option + * to crop to a rectangle within the full data. This can be used to exclude superfluous pixels + * around the perimeter and speed up decoding. * - * @author dswitkin@google.com (Daniel Switkin) - * @author srowen@google.com (Sean Owen) - * @deprecated + * @author Sean Owen + * @author Daniel Switkin */ -@Deprecated -final class YUVMonochromeBitmapSource implements MonochromeBitmapSource { +public final class YUVMonochromeBitmapSource extends BaseMonochromeBitmapSource { - private final Bitmap image; - private int blackPoint; - private BlackPointEstimationMethod lastMethod; - private int lastArgument; + private final byte[] mYUVData; + private final int mDataWidth; + private final int mCropTop; + private final int mCropLeft; - private static final int LUMINANCE_BITS = 5; - private static final int LUMINANCE_SHIFT = 8 - LUMINANCE_BITS; - private static final int LUMINANCE_BUCKETS = 1 << LUMINANCE_BITS; - - YUVMonochromeBitmapSource(Bitmap image) { - this.image = image; - blackPoint = 0x7F; - lastMethod = null; - lastArgument = 0; + /** + * Builds an object around a YUV buffer from the camera. The image is not cropped. + * + * @param yuvData A byte array of planar Y data, followed by interleaved U and V + * @param dataWidth The width of the Y data + * @param dataHeight The height of the Y data + */ + public YUVMonochromeBitmapSource(byte[] yuvData, int dataWidth, int dataHeight) { + this(yuvData, dataWidth, dataHeight, 0, 0, dataHeight, dataWidth); } - public boolean isBlack(int x, int y) { - return ((image.getPixel(x, y) >> 16) & 0xFF) < blackPoint; + /** + * Builds an object around a YUV buffer from the camera. THe image is cropped and only + * that part of the image is evaluated. + * + * @param yuvData A byte array of planar Y data, followed by interleaved U and V + * @param dataWidth The width of the Y data + * @param dataHeight The height of the Y data + * @param crop The rectangle within the yuvData to expose to MonochromeBitmapSource users + */ + public YUVMonochromeBitmapSource(byte[] yuvData, int dataWidth, int dataHeight, Rect crop) { + this(yuvData, dataWidth, dataHeight, crop.top, crop.left, crop.bottom, crop.right); } - public BitArray getBlackRow(int y, BitArray row, int startX, int getWidth) { - if (row == null) { - row = new BitArray(getWidth); - } else { - row.clear(); - } - int[] pixelRow = new int[getWidth]; - image.getPixels(pixelRow, 0, getWidth, startX, y, getWidth, 1); - for (int i = 0; i < getWidth; i++) { - if (((pixelRow[i] >> 16) & 0xFF) < blackPoint) { - row.set(i); - } + /** + * Builds an object around a YUV buffer from the camera. The image is cropped and only + * that part of the image is evaluated. + * + * @param yuvData A byte array of planar Y data, followed by interleaved U and V + * @param dataWidth The width of the Y data + * @param dataHeight The height of the Y data + * @param cropTop Top coordinate of rectangle to crop + * @param cropLeft Left coordinate of rectangle to crop + * @param cropBottom Bottom coordinate of rectangle to crop + * @param cropRight Right coordinate of rectangle to crop + */ + public YUVMonochromeBitmapSource(byte[] yuvData, + int dataWidth, + int dataHeight, + int cropTop, + int cropLeft, + int cropBottom, + int cropRight) { + super(cropRight - cropLeft, cropBottom - cropTop); + if (cropRight - cropLeft > dataWidth || cropBottom - cropTop > dataHeight) { + throw new IllegalArgumentException(); } - return row; + mYUVData = yuvData; + mDataWidth = dataWidth; + this.mCropTop = cropTop; + this.mCropLeft = cropLeft; } - public int getHeight() { - return image.height(); + /** + * The Y channel is stored as planar data at the head of the array, so we just ignore the + * interleavd U and V which follow it. + * + * @param x The x coordinate to fetch within crop + * @param y The y coordinate to fetch within crop + * @return The luminance as an int, from 0-255 + */ + @Override + protected int getLuminance(int x, int y) { + return mYUVData[(y + mCropTop) * mDataWidth + x + mCropLeft] & 0xff; } - public int getWidth() { - return image.width(); - } - - public void estimateBlackPoint(BlackPointEstimationMethod method, int argument) throws ReaderException { - if (!method.equals(lastMethod) || argument != lastArgument) { - int width = image.width(); - int height = image.height(); - int[] histogram = new int[LUMINANCE_BUCKETS]; - if (method.equals(BlackPointEstimationMethod.TWO_D_SAMPLING)) { - int minDimension = width < height ? width : height; - int startI = height == minDimension ? 0 : (height - width) >> 1; - int startJ = width == minDimension ? 0 : (width - height) >> 1; - for (int n = 0; n < minDimension; n++) { - int pixel = image.getPixel(startJ + n, startI + n); - histogram[((pixel >> 16) & 0xFF) >> LUMINANCE_SHIFT]++; - } - } else if (method.equals(BlackPointEstimationMethod.ROW_SAMPLING)) { - if (argument < 0 || argument >= height) { - throw new IllegalArgumentException("Row is not within the image: " + argument); - } - int[] pixelRow = new int[width]; - image.getPixels(pixelRow, 0, width, 0, argument, width, 1); - for (int x = 0; x < width; x++) { - histogram[((pixelRow[x] >> 16) & 0xFF) >> LUMINANCE_SHIFT]++; - } - } else { - throw new IllegalArgumentException("Unknown method: " + method); - } - blackPoint = BlackPointEstimator.estimate(histogram) << LUMINANCE_SHIFT; - lastMethod = method; - lastArgument = argument; + @Override + protected int[] getLuminanceRow(int y, int[] row) { + int width = getWidth(); + if (row == null || row.length < width) { + row = new int[width]; } + int offset = (y + mCropTop) * mDataWidth + mCropLeft; + byte[] yuvData = mYUVData; + for (int x = 0; x < width; x++) { + row[x] = yuvData[offset + x] & 0xff; + } + return row; } - public BlackPointEstimationMethod getLastEstimationMethod() { - return lastMethod; + @Override + protected int[] getLuminanceColumn(int x, int[] column) { + int height = getHeight(); + if (column == null || column.length < height) { + column = new int[height]; + } + int dataWidth = mDataWidth; + int offset = mCropTop * dataWidth + mCropLeft + x; + byte[] yuvData = mYUVData; + for (int y = 0; y < height; y++) { + column[y] = yuvData[offset] & 0xff; + offset += dataWidth; + } + return column; } - public MonochromeBitmapSource rotateCounterClockwise() { - throw new IllegalStateException("Rotate not supported"); - } + /** + * Create a greyscale Android Bitmap from the YUV data based on the crop rectangle. + * + * @return An 8888 bitmap. + */ + public Bitmap renderToBitmap() { + int width = getWidth(); + int height = getHeight(); + int[] pixels = new int[width * height]; + byte[] yuvData = mYUVData; + for (int y = 0, base = mCropTop * mDataWidth + mCropLeft; y < height; y++, base += mDataWidth) { + for (int x = 0; x < width; x++) { + int grey = yuvData[base + x] & 0xff; + pixels[y * width + x] = (0xff << 24) | (grey << 16) | (grey << 8) | grey; + } + } - public boolean isRotateSupported() { - return false; + Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); + bitmap.setPixels(pixels, 0, width, 0, 0, width, height); + return bitmap; } -} \ No newline at end of file +}