之前寫的一篇文章中我通過提取bitmap的數組,然後通過位移運算提取兩個數組的alpha值通過比例混合爲一張新的圖片,這樣對於大圖片來說會很卡。所以我在查閱資料後:
通過RenderScript使用GPU加速來獲得非常好的轉換效果,只要使用安卓自身提供的腳本工具類ScriptIntrinsicBlend,配置好參數和輸入輸出,還有混合模式,即可快速完成該任務。
4k×2k的兩張Bitmap可以在我的手機上只要30ms就可以混合完成。下面貼出我寫的工具類:
package com.meizu.argbinsertyuv;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicBlend;
import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
import android.util.Log;
import java.io.OutputStream;
/**@author 陳杰柱**/
public class YuvUtil {
private Context mContext;
private RenderScript rs;
private ScriptIntrinsicYuvToRGB mYuvToRgbIntrinsic;
private ScriptIntrinsicBlend mScriptIntrinsicBlend;
private Type.Builder mYuvType, mRgbaType;
private Type.Builder mRgbType1, mRgbType2;
private Allocation mYuvin, mYuvOut;
private Allocation mARGBPic1, mARGBPic2;
private Bitmap mBmpout;
public YuvUtil(Context context) {
this.mContext = context;
rs = RenderScript.create(context);
mYuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
mScriptIntrinsicBlend = ScriptIntrinsicBlend.create(rs, Element.RGBA_8888(rs));
}
/**nv21轉bmp**/
public Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
long start = System.currentTimeMillis();
if (mYuvType == null) {
mYuvType = new Type.Builder(rs, Element.U8(rs)).setX(nv21.length);
mYuvin = Allocation.createTyped(rs, mYuvType.create(), Allocation.USAGE_SCRIPT);
mRgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height);
mYuvOut = Allocation.createTyped(rs, mRgbaType.create(), Allocation.USAGE_SCRIPT);
}
mYuvin.copyFrom(nv21);
mYuvToRgbIntrinsic.setInput(mYuvin);
mYuvToRgbIntrinsic.forEach(mYuvOut);
if (mBmpout == null) {
mBmpout = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
} else if (mBmpout.getWidth() != width || mBmpout.getHeight() != height) {
mBmpout = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
}
mYuvOut.copyTo(mBmpout);
Log.i("cjztest", "nv21轉bmp" + (System.currentTimeMillis() - start) + "ms");
return mBmpout;
}
/**兩個bmp按照透明度混合**/
public Bitmap bitmapBlendBitmap(Bitmap bottom, Bitmap top) {
long start = System.currentTimeMillis();
if (mRgbType1 == null || mRgbType2 == null) {
mRgbType1 = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(top.getWidth()).setY(top.getHeight());
mRgbType2 = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(bottom.getWidth()).setY(bottom.getHeight());
mARGBPic1 = Allocation.createTyped(rs, mRgbType1.create(), Allocation.USAGE_SCRIPT);
mARGBPic2 = Allocation.createTyped(rs, mRgbType2.create(), Allocation.USAGE_SCRIPT);
}
mARGBPic1.copyFrom(top);
mARGBPic2.copyFrom(bottom);
// mScriptIntrinsicBlend.forEachAdd(mARGBPic1, mYuvOut);
// mScriptIntrinsicBlend.forEachDstAtop(mARGBPic1, mARGBPic2);
// mScriptIntrinsicBlend.forEachDstIn(mARGBPic1, mARGBPic2);
// mScriptIntrinsicBlend.forEachDstOut(mARGBPic2, mARGBPic1);
mScriptIntrinsicBlend.forEachDstOver(mARGBPic2, mARGBPic1);
if (mBmpout == null) {
mBmpout = Bitmap.createBitmap(top.getWidth(), top.getHeight(), Bitmap.Config.ARGB_8888);
} else if (mBmpout.getWidth() != top.getWidth() || mBmpout.getHeight() != top.getHeight()) {
mBmpout = Bitmap.createBitmap(top.getWidth(), top.getHeight(), Bitmap.Config.ARGB_8888);
}
mARGBPic1.copyTo(mBmpout);
Log.i("cjztest", "兩個bmp按照透明度混合使用的時間" + (System.currentTimeMillis() - start) + "ms");
return mBmpout;
}
/**傳統方法把YUV轉jpg**/
public void nv21ToBitmapDirectSave(byte[] nv21, int width, int height, OutputStream os) {
YuvImage im = new YuvImage(nv21, ImageFormat.NV21, width, height, new int[]{width, width, width});
im.compressToJpeg(new Rect(0, 0, width, height), 80, os);
}
}
另外可以符合使用,例如如果你希望實現在NV21上混合一個同樣寬高的bmp,可以這麼使用,也就是NV21先變成BMP再混合:
Bitmap resultBmp = mYuvUtil.bitmapBlendBitmap(mYuvUtil.nv21ToBitmap(photoBuffer, width, height), waterMarkBmp);