Android eclipse RenderScript 初探(快速处理图片)

请参考:“Android preview YUV转换 RenderScript 优化”

https://blog.youkuaiyun.com/zhjali123/article/details/80086987

计算:RenderScript Runtime API参考:简介

https://blog.youkuaiyun.com/Explorer_day/article/details/71641333

基于以上文章,进行实际操作。

1. 创建了一个 5.0.1的 Android项目“hellorenderscript”

2. 创建界面

<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:tools="http://schemas.android.com/tools"
    android:id="@+id/LinearLayout1"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    android:orientation="vertical"
    android:paddingBottom="@dimen/activity_vertical_margin"
    android:paddingLeft="@dimen/activity_horizontal_margin"
    android:paddingRight="@dimen/activity_horizontal_margin"
    android:paddingTop="@dimen/activity_vertical_margin"
    tools:context="com.example.hellorenderscript.MainActivity" >

    <LinearLayout
        android:layout_width="match_parent"
        android:layout_height="wrap_content" >

        <Button
            android:id="@+id/Camera1"
            android:layout_width="wrap_content"
            android:layout_height="wrap_content"
            android:layout_weight="1"
            android:text="Camera" />

        <Button
            android:id="@+id/PauseButton"
            android:layout_width="wrap_content"
            android:layout_height="wrap_content"
            android:layout_weight="1"
            android:text="Pause/Start" />

    </LinearLayout>

    <LinearLayout
        android:layout_width="match_parent"
        android:layout_height="wrap_content"
        android:layout_weight="1"
        android:orientation="vertical" >

        <ImageView
            android:id="@+id/imageView1"
            android:layout_width="match_parent"
            android:layout_height="wrap_content"
            android:layout_weight="6.82"
            android:src="@drawable/ic_launcher" />
    </LinearLayout>

</LinearLayout>

3. 在project.properties 文件中加入:renderscript.target=18

4. 创建“hello.rs”

内容如下,如果eclipse报红色警告,并修改代码后,记得clean一下(eclipse菜单 Project ==> Clean)。

#pragma version(1)
#pragma rs java_package_name(com.hc.renderscript)

rs_allocation inputAllocation;
uchar4 out;
int wIn, hIn;
int numTotalPixels;
static float scaleInv;
static int inputWidth, inputHeight, outputWidth, outputHeight;

uchar4 __attribute__((kernel)) invert(uchar4 in)
{
  uchar4 out = in;
  out.r =255- in.r;
  out.g = 255-in.g;
  out.b = 255-in.b;
  return out;

}

const static float3 gMonoMult1 = {0.299f, 0.587f, 0.114f};

void RGBA2Gray(const uchar4 *v_in, uchar4 *v_out) {
    float4 f4 = rsUnpackColor8888(*v_in);//将输入的颜色值转化为r g b a
    float3 mono = dot(f4.rgb, gMonoMult1);//计算向量的点积
    *v_out = rsPackColorTo8888(mono);
    
    
    //return 20;  根据返回值(ms)作刷新。相当于每20毫秒刷新一次。
}



// Function to invoke before applying conversion
void setInputImageSize(int _w, int _h)
{
    wIn = _w;
    hIn = _h;
    numTotalPixels = wIn * hIn;
}


void setInformation(int _inputWidth, int _inputHeight,
    int _outputWidth, int _outputHeight){

    inputWidth = _inputWidth;
    inputHeight = _inputHeight;
    outputWidth = _outputWidth;
    outputHeight = _outputHeight;

    // Calculates inverse scale factor, by which
    // to round coordinates.
    //
    // Ex:
    // Input size is 100
    // Output desired size is 25
    //
    // Scale factor is 25 / 100 = 0.25
    // Inverse scale factor is 1 / 0.25 = 4
    //
    // When iterating directly on the output
    // allocation, to get input element it is needed
    // to use the inverse scale factor.
    //
    // Current output element index is 20
    // Respective input element index is 20 * 4 = 80
    //
    scaleInv = (float)inputWidth/(float)outputWidth;
}


const static uchar Myclamp(short x, uchar Mymin, uchar Mymax){

	if (x > Mymax)
		return Mymax;
	if (x < Mymin)
		return Mymin;
	return (uchar)x;
}

// Kernel that converts a YUV element to a RGBA one
uchar4 __attribute__((kernel)) convert(uint32_t x, uint32_t y)
{

    // YUV 4:2:0 planar image, with 8 bit Y samples, followed by
    // interleaved V/U plane with 8bit 2x2 subsampled chroma samples
    //两行Y,两列X共用一对V&U.
    int baseIdx = x + y * wIn;
    
 int baseVYIndex = numTotalPixels + (y>>1) * wIn + (x & 0xfffffe);
 int baseUYIndex=baseVYIndex+1;

    uchar _y = rsGetElementAt_uchar(inputAllocation, baseIdx);
    uchar _u = rsGetElementAt_uchar(inputAllocation, baseUYIndex);
    uchar _v = rsGetElementAt_uchar(inputAllocation, baseVYIndex);
    _y = _y < 16 ? 16 : _y;

    short Y = ((short)_y) - 16;
    short U = ((short)_u) - 128;
    short V = ((short)_v) - 128;

    uchar4 out;

    out.r =  (uchar)Myclamp(((Y * 298 + V * 408 + 128) >> 8), 0, 255);
    out.g =  (uchar)Myclamp(((Y * 298 - U * 100 - V * 208 + 128) >> 8), 0, 255);
    out.b =  (uchar)Myclamp(((Y * 298 + U * 516 + 128) >> 8), 0, 255);

	out.a = 255;

//以下为黑白图像		
//	out.r =Y;
//	out.g=Y;
//	out.b =Y;
	
    return out;
}

uchar4 __attribute__ ((kernel)) rotate_90_clockwise ( uint32_t x, uint32_t y) {
    uint32_t inX  = wIn - 1 - y;
    uint32_t inY = x;
    uchar4 out;
    out = rsGetElementAt_uchar4(inputAllocation,inX, inY);
    return out;
}
 
uchar4 __attribute__ ((kernel)) rotate_270_clockwise (uint32_t x, uint32_t y) {
    uint32_t inX = y;
    uint32_t inY = hIn - 1 - x;
    uchar4 out;
     out = rsGetElementAt_uchar4(inputAllocation,inX, inY);
    return out;
}


const static float Limit(float x, float Mymin, float Mymax){

	if (x > Mymax)
		return Mymax;
	if (x < Mymin)
		return Mymin;
	return x;
}




uchar4 __attribute__((kernel)) resizeNearest(uint32_t x, uint32_t y) {

    float fX = clamp(x*scaleInv, 0.0f, (float)inputWidth);
    float fY = clamp(y*scaleInv, 0.0f, (float)inputHeight);

    return rsGetElementAt_uchar4(inputAllocation, fX, fY);
}



5. 保存hello.rs,若报错的情况下,eclipse会自动生成一个带“Script_”的类“Script_hello”。

6. 创建一个类“RSImgProcess”

package com.example.hellorenderscript;


import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicBlur;
import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
import android.util.Log;

import com.hc.renderscript.ScriptC_hello;

public class RSImgProcess {
    private static RenderScript mRS;
    final float per = 1.0f;
    private int outwidth, outheight;
    private static ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
    private static Type.Builder yuvType;
 		private static Type.Builder rgbaType;
 		   private static Allocation in;
 		private static Allocation out;
    
    private ScriptC_hello customYUVToRGBAScript;
    private ScriptC_hello resizeScript;
    private ScriptIntrinsicBlur blurScript;

    private boolean isInit = false;
   	ThreadPoolExecutor GlobalThreadPool;

    public RSImgProcess(final Context context) {
    	
    	GlobalThreadPool = new ThreadPoolExecutor(4, 4, 2, TimeUnit.MILLISECONDS,
                new ArrayBlockingQueue<Runnable>(200));
    	
        GlobalThreadPool.execute(new Runnable() {
            @Override
            public void run() {
               
                    init(context);
                    isInit = true;
                    
                    Log.i("RSImgProcess","Init");
                

            }
        });

    }


    private void init(Context context) {
        // Creates a RS context.
        mRS = RenderScript.create(context);
        customYUVToRGBAScript = new ScriptC_hello(mRS);
        // Instantiates the blur script

        resizeScript = new ScriptC_hello(mRS);
    }

  
    private Allocation resize(Allocation inputAllocation, int width, int height) {
       
        
        // Creates an output Allocation where to store the blur result
        Type.Builder tbOutput = new Type.Builder(mRS, Element.RGBA_8888(mRS));
        tbOutput.setX(outwidth);
        tbOutput.setY(outheight);
        Allocation outputAllocation = Allocation.createTyped(mRS, tbOutput.create());

        resizeScript.set_inputAllocation(inputAllocation);
        resizeScript.invoke_setInformation(width, height, outwidth, outheight);
        resizeScript.forEach_resizeNearest(outputAllocation);
        return outputAllocation;
    }

    
    private Allocation Rotate90(Allocation inputAllocation, int width, int height) {
       
        
        // Creates an output Allocation where to store the blur result
        Type.Builder tbOutput = new Type.Builder(mRS, Element.RGBA_8888(mRS));
        tbOutput.setX(height);
        tbOutput.setY(width);
        Allocation outputAllocation = Allocation.createTyped(mRS, tbOutput.create());

        resizeScript.set_inputAllocation(inputAllocation);
        resizeScript.invoke_setInputImageSize(width, height);
        resizeScript.forEach_rotate_90_clockwise(outputAllocation);
        return outputAllocation;
    }   
    
   private Allocation Rotate270(Allocation inputAllocation, int width, int height) {
       
        
        // Creates an output Allocation where to store the blur result
        Type.Builder tbOutput = new Type.Builder(mRS, Element.RGBA_8888(mRS));
        tbOutput.setX(height);
        tbOutput.setY(width);
        Allocation outputAllocation = Allocation.createTyped(mRS, tbOutput.create());

        resizeScript.set_inputAllocation(inputAllocation);
        resizeScript.invoke_setInputImageSize(width, height);
        resizeScript.forEach_rotate_270_clockwise(outputAllocation);
        return outputAllocation;
    } 
    
    
    private Allocation YUVToRGBA(byte[] dataIn, int width, int height) {
        // Calculates expected YUV bytes count as YUV is not a human friendly way of storing data:
        // https://en.wikipedia.org/wiki/YUV#Y.27UV420p_.28and_Y.27V12_or_YV12.29_to_RGB888_conversion
        int expectedBytes = (int) (width * height * ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8f);

        Type.Builder yuvTypeBuilder = new Type.Builder(mRS, Element.U8(mRS)).setX(expectedBytes);
        Type yuvType = yuvTypeBuilder.create();
        Allocation inputAllocation = Allocation.createTyped(mRS, yuvType, Allocation.USAGE_SCRIPT);
        inputAllocation.copyFrom(dataIn);

        customYUVToRGBAScript.invoke_setInputImageSize(width, height);
        customYUVToRGBAScript.set_inputAllocation(inputAllocation);

        // Creates temporary allocation that will match camera preview size
        Type.Builder rgbaType = new Type.Builder(mRS, Element.RGBA_8888(mRS)).setX(width).setY(height);
        Allocation midAllocation = Allocation.createTyped(mRS, rgbaType.create(), Allocation.USAGE_SCRIPT);

        customYUVToRGBAScript.forEach_convert(midAllocation);
      
        
        return midAllocation;
    }

    private static Allocation YUVToBMPByRS(byte[] YUVData,int OriginWidth,int OriginHeight){
		Bitmap BMPOut;
		
    	
		
        	Log.i("YUVToBMPByRS","RenderScript create again");

    		
    	yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(mRS, Element.RGBA_8888(mRS));
    	 
    	    rgbaType = new Type.Builder(mRS, Element.RGBA_8888(mRS)).setX(OriginWidth).setY(OriginHeight);
    	    out = Allocation.createTyped(mRS, rgbaType.create(), Allocation.USAGE_SCRIPT);
   	
    	
    	
      	    yuvType = new Type.Builder(mRS, Element.U8(mRS)).setX(YUVData.length);
    	    in = Allocation.createTyped(mRS, yuvType.create(), Allocation.USAGE_SCRIPT);
   	
    	 
    	in.copyFrom(YUVData);
    	 
    	yuvToRgbIntrinsic.setInput(in);
    	yuvToRgbIntrinsic.forEach(out);
    	 
 //   	BMPOut = Bitmap.createBitmap(OriginWidth, OriginHeight, Bitmap.Config.ARGB_8888);
 //   	out.copyTo(BMPOut);
//		return BMPOut;
    	return out;
		
	}
    

    public Bitmap processImg(byte[] dataIn, int width, int height,int outWidth,int outHeight,int screen_orientation) {
    	Allocation	outputallocation;

        long startTime = System.currentTimeMillis();
        
        
        //Allocation allocation = YUVToBMPByRS(dataIn, width, height);
        Allocation allocation = YUVToRGBA(dataIn, width, height);
        
       // Allocation resizeAllocation = resize(allocation, width, height);
       
        if(screen_orientation==1  || screen_orientation==3){
       // 	Log.i("processImg","screen_orientation"+screen_orientation);
        	
        	outputallocation=Rotate270(allocation, width, height);
        	outwidth=height;
            outheight=width;
        	
        }else{
        	outwidth=width;
            outheight=height;
            outputallocation=allocation;
        }
        
        Bitmap outBitmap;

        // Creates output Bitmap, matching input one size.
       // Bitmap outBitmap = Bitmap.createBitmap(outWidth, outHeight, Bitmap.Config.ARGB_8888);
        // Copy calculation result to the output Bitmap.
       // resizeAllocation.copyTo(outBitmap);
        
        
    //    Log.i("processImg","outwidth"+outwidth);
        
        outBitmap = Bitmap.createBitmap(outwidth, outheight, Bitmap.Config.ARGB_8888);

        outputallocation.copyTo(outBitmap);
        
       
        return outBitmap;
    }

}

7. Java 调用 这些类。

7.1 MainActivity中的变量

	Allocation aIn;
	Allocation aOut;
	
	static Bitmap mOutBitmap,bitmap;
	boolean isScreenShot=false,isPreview=false;
	static boolean oldPreviewStatus=false;
	Camera mCameraDevice=null;  
	Camera.Parameters mParameters;
	ImageView iv_capture_view;
	Handler mHandler=new  Handler();
	RSImgProcess imgProcess;
	ThreadPoolExecutor GlobalThreadPool;
	OnClickListener CameraClickListener=null;
	Button CameraButton,PauseButton;
	ImageView mSrcImageView,mDstImageView;
	int Camera_w,Camera_h;
	byte [] PreviewBuff=null;
	byte [] OneFrameBuff=null;
	int FrameCount=0;
	long Time1,Time2;
	int screen_orientation;
	MainActivity mMainActivity;
	

7.2 MainActivity中的函数 prepareCamera

private void prepareCamera() {
	
	mCameraDevice=Camera.open();
	mParameters=mCameraDevice.getParameters();
	
	
	Camera_w=640;Camera_h=480;


	 mParameters.setPreviewSize(Camera_w,Camera_h);//w,h
	 Camera_w=mParameters.getPreviewSize().width ;
	 Camera_h=mParameters.getPreviewSize().height; 
	 
	 mParameters.setPictureSize(Camera_w,Camera_h);
	 
    float perPixel = ImageFormat.getBitsPerPixel(mParameters.getPreviewFormat()) / 8f;
    final int OneFrameSize=(int)(Camera_w*Camera_h* perPixel);
    
    final int previewBufferSize = (int) (Camera_w * Camera_h *2* perPixel);
 
    Log.i("prepareCamera","w*h:"+Camera_w+"*"+Camera_h);

    Log.i("prepareCamera","perPixel:"+perPixel);
    Log.i("prepareCamera","previewBufferSize:"+previewBufferSize);

    OneFrameBuff=new byte[(int)(Camera_w*Camera_h* perPixel)];
    PreviewBuff=new byte[previewBufferSize];
	mCameraDevice.addCallbackBuffer(PreviewBuff);
	
	mParameters.setPictureFormat(ImageFormat.YV12);
	mCameraDevice.setParameters(mParameters);
	
    isScreenShot=true;
    isPreview=true;
    
    mCameraDevice.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
          
			if (isScreenShot && isPreview) {
                isScreenShot = false;
            //    System.arraycopy(data,0,OneFrameBuff,0,OneFrameSize);
             //   Log.i("onPreviewFrame","data.length"+data.length);
                
              //  Log.i("onPreviewFrame","screen_orientation"+screen_orientation); 
            	screen_orientation = iv_capture_view.getResources().getConfiguration().orientation;
             //   Log.i("onPreviewFrame","screen_orientation"+screen_orientation); 
            	screenShot(data,screen_orientation);
            	
             //   screenShot(OneFrameBuff,screen_orientation);
            }
           // camera.addCallbackBuffer(data);
			
        //    Log.i("onPreviewFrame","FrameCount"+FrameCount+isScreenShot+isPreview); 
			
			camera.addCallbackBuffer(PreviewBuff);
			
        }
    });
    
    mCameraDevice.startPreview(); 
    mCameraDevice.autoFocus(null);


    Time1= System.currentTimeMillis();
    FrameCount=0;
    
    Log.i("prepareCamera","mCameraDevice"+mCameraDevice);
    
    
}
	

7.3 MainActivity中的函数 onresume/onPause

protected void onResume() {
	// TODO Auto-generated method stub
		
	super.onResume();	
	
	screen_orientation = iv_capture_view.getResources().getConfiguration().orientation;
	iv_capture_view.setImageBitmap(mOutBitmap);
	isScreenShot=true;
	isPreview=oldPreviewStatus;
	
	Log.i("onResume","screen_orientation"+screen_orientation+" "+isScreenShot+isPreview);

	
	imgProcess=new RSImgProcess(mMainActivity);
	
	GlobalThreadPool = new ThreadPoolExecutor(8, 8, 80, TimeUnit.MILLISECONDS,
            new ArrayBlockingQueue<Runnable>(200));
	
	if(isPreview && mCameraDevice==null){	//暂停后,Camera被注销为空了。
	 prepareCamera();
	Log.i("onResume","Resume Camera");	
	}
	
	
}





	@Override
	protected void onPause() {
		// TODO Auto-generated method stub
		
		
		oldPreviewStatus=isPreview;
		
		mOutBitmap=bitmap;
		
		isPreview=false;
		
		if(mCameraDevice!=null){
		mCameraDevice.stopPreview(); 
		mCameraDevice.release();
		mCameraDevice=null;
		}
		
		GlobalThreadPool.shutdown();
		
		try {
			Thread.sleep(30);
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
//		GlobalThreadPool.shutdownNow();		
		
		super.onPause();
		
		Log.i("onPause","mCameraDevice"+mCameraDevice);
		
	}

7.4 MainActivity中的函数: 图像处理

private void screenShot(final byte[] pixel,final int screenorientation) {

	
	GlobalThreadPool.execute(new Runnable() {
        @Override
        public void run() {
           
		bitmap = imgProcess.processImg(pixel, Camera_w, Camera_h,iv_capture_view.getWidth(),iv_capture_view.getHeight(),screenorientation);
            
			mHandler.post(new Runnable() {
                @Override
                public void run() {
                    
					iv_capture_view.setImageBitmap(bitmap);
		//			Log.i("post","setImageBitmap width"+bitmap.getWidth());
					
		//			Log.i("post","setImageBitmap");
     //               iv_capture_view.setVisibility(View.VISIBLE);
    //                AlphaAnimation inAnimation = new AlphaAnimation(0f, 1.0f);
    //                inAnimation.setDuration(250);
    //                iv_capture_view.startAnimation(inAnimation);
					
					FrameCount++;
					if(FrameCount>=100){
					Time2= System.currentTimeMillis();
					Log.i("screenShot","Time:"+(Time2-Time1));
					
					FrameCount=0;
					}
					
					if(FrameCount==0){
						Time1= System.currentTimeMillis();
					}
					
					
                }
            });
            mHandler.postDelayed(new Runnable() {
                @Override
                public void run() {
                  //  iv_capture_view.setVisibility(View.INVISIBLE);
                	
                	isScreenShot=true;
                }
            }, 20);
        }
    });

}

8. 效果图:

  

9. 可以自己修改代码,变成黑白图。

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值