JNI | c++ 調用 Java 自定義類、方法————以Camera爲例

小白出品,可搬可參,yuv2rgb效率一般(640*480 以下應該還可以接受),請自行更換算法。

github:https://github.com/Coder-Wjt/JNI_Samples

測試工具:Eclipse+Unity

Java:

PreviewMode.java:

package wjt.camera.plugin;

public class PreviewMode
{
  private int width;
  private int height;
  private int fps;

  public PreviewMode(int width, int height, int fps)
  {
    this.width = width;
    this.height = height;
    this.fps = fps;
  }


  public int getWidth() {
    return this.width;
  }

  public int getHeight() {
    return this.height;
  }


  public int getFps() {
    return this.fps;
  }

  public String toString()
  {
    return "ImageStreamMode{width=" + this.width + ", height=" + this.height + ", fps=" + this.fps + '}';
  }
}

AndroidCamera.java:

package wjt.camera.plugin;

import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import java.io.IOException;
import java.util.List;

public class AndroidCamera implements Camera.PreviewCallback {
	private static final String TAG = AndroidCamera.class.getSimpleName();
	private Camera camera;
	private Camera.Parameters cameraParams;
	private boolean isStreaming = false;
	private byte[] dataBuffer;
	private static AndroidCamera androidCameraInstance;
	private static final int MAGIC_TEXTURE_ID = 6;
	private SurfaceTexture gSurfaceTexture;

	private AndroidCamera() {
		Log.d(TAG, "Constructor");
	}

	public static synchronized AndroidCamera getInstance() {
		if (androidCameraInstance == null) {
			androidCameraInstance = new AndroidCamera();
		}
		return androidCameraInstance;
	}

	public boolean openCamera() {
		Log.d(TAG, "openCamera()");

		if (this.camera == null) {
			int numOfCameras = Camera.getNumberOfCameras();
			Log.v(TAG, "openCamera(): Number of Cameras " + numOfCameras);

			for (int i = 0; i < numOfCameras; i++) {
				try {
					this.camera = Camera.open(i);
				} catch (Exception e) {
					Log.d(TAG, "openCamera(): Exception while opening camera #" + i + ": " + e.toString());
				}
			}
		}

		if (this.camera == null) {
			return false;
		}

		cameraOpened();

		return true;
	}

	public boolean closeCamera() {
		Log.d(TAG, "closeCamera()");

		if (this.camera != null) {
			try {
				this.camera.setPreviewTexture(null);
			} catch (IOException e) {
				e.printStackTrace();
			}
			this.camera.setPreviewCallback(null);
			this.camera.stopPreview();
			this.isStreaming = false;
			this.camera.release();
			this.camera = null;
			this.dataBuffer = null;
		}

		cameraClosed();

		return true;
	}

	public boolean startCamera() {
		Log.d(TAG, "startCamera()");

		if (this.camera != null) {
			this.cameraParams = this.camera.getParameters();

			boolean foundNV21 = false;
			List<Integer> formats = this.cameraParams.getSupportedPreviewFormats();
			Log.d(TAG, "Preview format supported count: " + formats.size());
			for (int i = 0; i < formats.size(); i++) {
				int format = ((Integer) formats.get(i)).intValue();
				Log.d(TAG, "startCamera: Preview format supported: " + format + " bits per pixel: "
						+ ImageFormat.getBitsPerPixel(format));
				if (format == ImageFormat.NV21) {
					/*ImageFormat.NV21 == 17*/
					this.cameraParams.setPreviewFormat(ImageFormat.NV21);
					foundNV21 = true;
//					break;
				}
			}

			if (!foundNV21) {
				Log.d(TAG, "startCamera: Camera doesn't support ImageFormat.NV21. Can't use it.");
				return false;
			}

			List<int[]> fpsRange = this.cameraParams.getSupportedPreviewFpsRange();
			for (int i = 0; i < fpsRange.size(); i++) {
				int[] fps = (int[]) fpsRange.get(i);
				String fpsString = "startCamera: Supported preview FPS";
				for (int j = 0; j < fps.length; j++) {
					fpsString = fpsString + " " + fps[j];
				}
				Log.d(TAG, fpsString);
			}

			this.camera.setParameters(this.cameraParams);

			Camera.Size previewSize = this.cameraParams.getPreviewSize();

			int bytesPerPixel = 3;

			this.dataBuffer = new byte[previewSize.width * previewSize.height * bytesPerPixel / 2];
			this.camera.addCallbackBuffer(this.dataBuffer);

			if (this.gSurfaceTexture == null) {
				this.gSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
			}
			try {
				this.camera.setPreviewTexture(this.gSurfaceTexture);
			} catch (IOException e) {
				e.printStackTrace();
			}

			this.camera.setPreviewCallback(this);
			this.camera.startPreview();

			this.isStreaming = true;

			this.cameraParams = this.camera.getParameters();

			int fps = cameraParams.getPreviewFrameRate();
			Log.i(TAG, "startCamera: PreviewSize With = " + previewSize.width
					+ " Height = " + previewSize.height + " fps:" + fps);

			cameraStarted(previewSize.width, previewSize.height, fps);
			return true;
		}

		return false;
	}

	public boolean stopCamera() {
		Log.d(TAG, "stopCamera()");

		if (this.camera != null) {
			this.camera.setPreviewCallback(null);
			this.camera.stopPreview();
			this.isStreaming = false;
		}
		cameraStopped();
		return true;
	}

	public PreviewMode[] getAvailableCameraModes() {
		Log.d(TAG, "getAvailableCameraModes()");

		this.cameraParams = this.camera.getParameters();

		List<Integer> formats = this.cameraParams.getSupportedPreviewFormats();
		Log.d(TAG, "Preview format supported count: " + formats.size());
		for (int i = 0; i < formats.size(); i++) {
			int format = ((Integer) formats.get(i)).intValue();
			Log.d(TAG,
					"Preview format supported: " + format + " bits per pixel: " + ImageFormat.getBitsPerPixel(format));
		}

		List<int[]> fpsRange = this.cameraParams.getSupportedPreviewFpsRange();
		for (int i = 0; i < fpsRange.size(); i++) {
			int[] fps = (int[]) fpsRange.get(i);
			String fpsString = "Supported preview FPS";
			for (int j = 0; j < fps.length; j++) {
				fpsString = fpsString + " " + fps[j];
			}
			Log.d(TAG, fpsString);
		}

		int fps = this.cameraParams.getPreviewFrameRate();
		List<Size> supportedPreviewSizes = this.cameraParams.getSupportedPreviewSizes();
		if ((supportedPreviewSizes == null) || (supportedPreviewSizes.size() <= 0)) {
			PreviewMode[] modes = new PreviewMode[1];
			Camera.Size previewSize = this.cameraParams.getPreviewSize();
			String sizeString = "Supported preview size ";
			sizeString = sizeString + "width:" + previewSize.width + " height:" + previewSize.height;
			Log.d(TAG, sizeString);
			modes[0] = new PreviewMode( previewSize.width, previewSize.height, fps);
			return modes;
		}

		PreviewMode[] modes = new PreviewMode[supportedPreviewSizes.size()];

		for (int i = 0; i < supportedPreviewSizes.size(); i++) {
			Camera.Size size = (Camera.Size) supportedPreviewSizes.get(i);
			String sizeString = "Supported preview size ";
			sizeString = sizeString + "width:" + size.width + " height:" + size.height; 
			Log.d(TAG, sizeString);
			modes[i] = new PreviewMode(size.width, size.height, fps);
		}

		return modes;
	}

	public PreviewMode getCameraMode() {
		Log.d(TAG, "getCameraMode()");
		Camera.Parameters cameraParams = this.camera.getParameters();
		Camera.Size previewSize = cameraParams.getPreviewSize();
		int fps = cameraParams.getPreviewFrameRate();
		return new PreviewMode(previewSize.width, previewSize.height, fps);
	}

	public void setCameraMode(PreviewMode mode) {
		Log.d(TAG, String.format("setCameraMode(%d,%d,%d)",mode.getWidth(),mode.getHeight(),mode.getFps()));
		this.cameraParams.setPreviewSize(mode.getWidth(), mode.getHeight());
		this.camera.setParameters(this.cameraParams);
		if (this.isStreaming) {
			stopCamera();
			startCamera();
		}
	}

	public void onPreviewFrame(byte[] data, Camera myCamera) {
		Camera.Parameters parameters = this.camera.getParameters();
		if ((data != null) && (parameters.getPreviewFormat() == ImageFormat.NV21)) {
			int width = parameters.getPreviewSize().width;
			int height = parameters.getPreviewSize().height;

			processCameraFrame(width, height, data);
		}

		this.camera.addCallbackBuffer(this.dataBuffer);
	}

	private native void cameraStarted(int width, int height, int fps);

	private native void cameraStopped();

	private native void cameraOpened();

	private native void cameraClosed();

	private native void processCameraFrame(int width, int height, byte[] imgdata);
}

Android.mk:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

LOCAL_MODULE    := astra_android_bridge
LOCAL_SRC_FILES := astra_android_bridge.cpp

LOCAL_C_INCLUDES	:= $(LOCAL_PATH)/include
LOCAL_CPPFLAGS 	:= -frtti -fexceptions
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)

Application.mk:

APP_ABI := armeabi-v7a
APP_STL := gnustl_static

C++:

astra_android_bridge.cpp

/*
 * astra_android_bridge.cpp
 *
 *  Created on: 2019年11月27日
 *      Author: wjintao
 */

#include <jni.h>

#include <stdio.h>
#include <android/log.h>
#include <time.h>
#include <unistd.h>

#define  JNICAMERA_CLASS "wjt/camera/plugin/AndroidCamera"
#define  JNIMODE_CLASS "wjt/camera/plugin/PreviewMode"
#define  LOG_TAG    "WJT_Plugin"
#define  LOGI(...)   __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define  LOGE(...)   __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#define  LOGD(...)   __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)

typedef struct PreviewMode {
public:
	int width;
	int height;
	int fps;
} PreviewMode;

#define IMAGEMODECOUNT 255

typedef struct ImageSupportedModeList {
public:
	int ImageModeCount;
	PreviewMode ImageModeList[IMAGEMODECOUNT];
} ImageSupportedModeList;

/// <summary>
/// 支持最大分辯率
/// </summary>
#define RGBDATALENGTH  (1920 * 1080 * 4)

/// <summary>
/// RGB類型
/// </summary>
typedef struct CRGBImage {
public:
	/// <summary>
	/// 當前寬度
	/// </summary>
	int width;
	/// <summary>
	/// 當前高度
	/// </summary>
	int height;
	/// <summary>
	/// 時間戳
	/// </summary>
	unsigned long long Timestamp;
	/// <summary>
	/// RGB數據
	/// </summary>
	unsigned char ImageData[RGBDATALENGTH];
} CRGBImage;

JNIEnv* env;
jclass jcCamera;
jobject joCamera;
jmethodID methodID_getInstance;
jmethodID methodID_openCamera;
jmethodID methodID_closeCamera;
jmethodID methodID_startCamera;
jmethodID methodID_stopCamera;
jmethodID methodID_getAvailableCameraModes;
jmethodID methodID_getCameraMode;
jmethodID methodID_setCameraMode;

PreviewMode nowMode;
int Timestamp;
int imgBytesLen;

unsigned char ImageData[RGBDATALENGTH];
unsigned char YUVSrcData[RGBDATALENGTH];
bool IsStartCamera = false;

const int SIZE = 256;
int RGBY[SIZE], RV[SIZE], GU[SIZE], GV[SIZE], BU[SIZE];

void yuv420_2_rgb24_table_init() {
	for (int i = 0; i < SIZE; i++) {
		//-128~127
		RV[i] = (int) ((i - 128) + (((i - 128) * 103) >> 8));
		GV[i] = (int) (((i - 128) * 183) >> 8);
		GU[i] = (int) (((i - 128) * 88) >> 8);
		BU[i] = (int) ((i - 128) + (((i - 128) * 198) >> 8));
	}
}

//IsMirrored 是否鏡像
void yuv420sp_to_rgb(unsigned char* yuv420sp, int width, int height,
		bool IsMirrored, unsigned char* rgb, int& Timestamp) {
	struct timeval tvStart1, tvEnd1;
//	gettimeofday(&tvStart1,NULL);
//	LOGI("yuv420sp_to_rgb start time:%ld\n",tvStart1.tv_usec);
	//定義單通道數據長度
	int frameSize = width * height;
	//準備解碼
	int i = 0, y = 0;
	int uvp = 0, u = 0, v = 0;
	//r g b 三元色初始化
	int r = 0, g = 0, b = 0;

	int ii = 0;
	//下面的兩個for循環都只是爲了把第一個像素點的的R G B讀取出來,就是一行一行循環讀取.
	for (int j = 0, yp = 0; j < height; j++) {
		uvp = frameSize + (j >> 1) * width;
		u = 0;
		v = 0;

		for (i = 0; i < width; i++, yp++) {
			y = (0xff & ((int) yuv420sp[yp])) - 16;
			if (y < 0)
				y = 0;
			if ((i & 1) == 0) {
				v = (0xff & yuv420sp[uvp++]);	// - 128;
				u = (0xff & yuv420sp[uvp++]);	// - 128;
			}

			//Partial table lookup  部分查表法
			//RV[SIZE],GU[SIZE],GV[SIZE],BU[SIZE]
			r = y + RV[v];
			g = y - GV[v] - GU[u];
			b = y + BU[u];

			//始終持 r g b在0 - 255
			if (r < 0)
				r = 0;
			else if (r > 255)
				r = 255;
			if (g < 0)
				g = 0;
			else if (g > 255)
				g = 255;
			if (b < 0)
				b = 0;
			else if (b > 255)
				b = 255;

			ii = (width - i - 1) + j * width;

			if (!IsMirrored) {
				ii = yp;
			}

			rgb[ii * 3] = (unsigned char) (r);
			rgb[ii * 3 + 1] = (unsigned char) (g);
			rgb[ii * 3 + 2] = (unsigned char) (b);
		}
	}
	gettimeofday(&tvEnd1, NULL);

//	long useTv = (tvEnd1.tv_sec - tvStart1.tv_sec)*1000000+(tvEnd1.tv_usec - tvStart1.tv_usec);
//	LOGI("yuv420sp_to_rgb() translation the frame:	\t%ld\n",useTv);

	Timestamp = tvEnd1.tv_sec * 1000000 + tvEnd1.tv_usec;
}

JNIEXPORT void JNICALL
cameraOpened(JNIEnv* env, jobject obj) {
	//camera open callback
	yuv420_2_rgb24_table_init();
}

JNIEXPORT void JNICALL
cameraClosed(JNIEnv* env, jobject obj) {
	//camera close callback
}

JNIEXPORT void JNICALL
cameraStarted(JNIEnv* env, jobject obj, jint width, jint height, jint fps) {
	//camera start callback
	LOGI("width:%d,height:%d,fps:%d\n", width, height, fps);
	if (nowMode.width != width || nowMode.height != height
			|| nowMode.fps != fps) {
		nowMode.width = width;
		nowMode.height = height;
		nowMode.fps = fps;
	}

	IsStartCamera = true;
}

JNIEXPORT void JNICALL
cameraStopped(JNIEnv* env, jobject obj) {
	//camera stop callback
	IsStartCamera = false;
}

JNIEXPORT void JNICALL
processCameraFrame(JNIEnv* env, jobject obj, jint width, jint height,
		jbyteArray imgdata) {
	//new frame callback
	if (!IsStartCamera)
		return;
	jbyte * imgBody = env->GetByteArrayElements(imgdata, 0);
	jsize imgdatalen = env->GetArrayLength(imgdata);
//	LOGI("width:%d,height:%d,data length:%d\n",width,height,imgdatalen);

	memcpy(YUVSrcData, imgBody, imgdatalen);

	env->ReleaseByteArrayElements(imgdata, imgBody, 0);
	env->DeleteLocalRef(imgdata);

	//YUV(NV21) to RGB
	yuv420sp_to_rgb(YUVSrcData, width, height, true, ImageData, Timestamp);
}

static JNINativeMethod jniMethods[] = { { "processCameraFrame", "(II[B)V",
		(void*) processCameraFrame }, { "cameraOpened", "()V",
		(void*) cameraOpened }, { "cameraClosed", "()V", (void*) cameraClosed },
		{ "cameraStarted", "(III)V", (void*) cameraStarted }, { "cameraStopped",
				"()V", (void*) cameraStopped } };

void setup(JNIEnv* env, jobject obj) {
	LOGI("setup");

	//獲取java類
	jcCamera = env->FindClass(JNICAMERA_CLASS);

	//獲取java類中的方法ID
	methodID_openCamera = env->GetMethodID(jcCamera, "openCamera", "()Z");
	methodID_closeCamera = env->GetMethodID(jcCamera, "closeCamera", "()Z");
	methodID_startCamera = env->GetMethodID(jcCamera, "startCamera", "()Z");
	methodID_stopCamera = env->GetMethodID(jcCamera, "stopCamera", "()Z");
	methodID_getAvailableCameraModes = env->GetMethodID(jcCamera,
			"getAvailableCameraModes", "()[Lwjt/camera/plugin/PreviewMode;");
	methodID_getCameraMode = env->GetMethodID(jcCamera, "getCameraMode",
			"()Lwjt/camera/plugin/PreviewMode;");
	methodID_setCameraMode = env->GetMethodID(jcCamera, "setCameraMode",
			"(Lwjt/camera/plugin/PreviewMode;)V");

	//找到對應的構造方法
//	//默認構造
//	methodID_getInstance = env->GetMethodID(jcCamera, "<init>", "()V");
	//靜態單例構造
	methodID_getInstance = env->GetStaticMethodID(jcCamera, "getInstance",
			"()Lwjt/camera/plugin/AndroidCamera;");
	if (methodID_getInstance == NULL) {
		LOGI("methodID_getInstance == NULL");
		return;
	}
	//創建相應的對象
//	//通過默認構造新建一個類對象
//	joCamera = env->NewObject(jcCamera, methodID_getInstance, NULL);
	joCamera = env->CallStaticObjectMethod(jcCamera, methodID_getInstance);
	if (joCamera == NULL) {
		LOGI("joCamera == NULL");
		return;
	}
	//創建全局引用
	joCamera = (jclass) env->NewGlobalRef(joCamera);
}

JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
	if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_4) != JNI_OK) {
		return -1;
	}
	jclass clz = env->FindClass(JNICAMERA_CLASS);
	env->RegisterNatives(clz, jniMethods,
			sizeof(jniMethods) / sizeof(JNINativeMethod));

	//java方法尋址
	setup(env, clz);

	env->DeleteLocalRef(clz);
	return JNI_VERSION_1_4;
}

#ifdef __cplusplus
extern "C" {
#endif

bool openCamera() {
	jboolean isOpen = env->CallBooleanMethod(joCamera, methodID_openCamera);
	return isOpen == JNI_TRUE;
}
bool closeCamera() {
	jboolean isClose = env->CallBooleanMethod(joCamera, methodID_closeCamera);
	return isClose == JNI_TRUE;
}

bool startCamera() {
	jboolean isStart = env->CallBooleanMethod(joCamera, methodID_startCamera);
	return isStart == JNI_TRUE;
}
bool stopCamera() {
	jboolean isStop = env->CallBooleanMethod(joCamera, methodID_stopCamera);
	return isStop == JNI_TRUE;
}

void getAvailableCameraModes(ImageSupportedModeList& modes) {
	//強轉爲數組
	jobjectArray array = (jobjectArray) env->CallObjectMethod(joCamera,
			methodID_getAvailableCameraModes);

	// 1. 獲得對應JAVA類的各個屬性
	jclass jcMode = env->FindClass(JNIMODE_CLASS);
	jfieldID heightFieldId = env->GetFieldID(jcMode, "height", "I");
	jfieldID widthFieldId = env->GetFieldID(jcMode, "width", "I");
	jfieldID fpsFieldId = env->GetFieldID(jcMode, "fps", "I");

	int length = env->GetArrayLength(array);

	LOGI("PreviewModes length:%d\n", length);

	// 2. 將數組拆分,每個對象轉爲結構體
	jobject obj;
	int count = 0;
	for (int i = 0; i < length; i++) {
		obj = env->GetObjectArrayElement(array, i);
		modes.ImageModeList[i].width = env->GetIntField(obj, widthFieldId);
		LOGI("modes[%d] width:%d\n", i, modes.ImageModeList[i].width);
		modes.ImageModeList[i].height = env->GetIntField(obj, heightFieldId);
		LOGI("modes[%d] height:%d\n", i, modes.ImageModeList[i].height);
		modes.ImageModeList[i].fps = env->GetIntField(obj, fpsFieldId);
		LOGI("modes[%d] fps:%d\n", i, modes.ImageModeList[i].fps);
		count++;
	}
	modes.ImageModeCount = count;

	env->DeleteLocalRef(array);
	env->DeleteLocalRef(jcMode);
	env->DeleteLocalRef(obj);
}

void getCameraMode(PreviewMode& Mode) {
	//獲取結構體對象
	jobject obj = env->CallObjectMethod(joCamera, methodID_getCameraMode);

	// 1. 獲得對應JAVA類的各個屬性
	jclass jcMode = env->FindClass(JNIMODE_CLASS);
	jfieldID widthFieldId = env->GetFieldID(jcMode, "width", "I");
	jfieldID heightFieldId = env->GetFieldID(jcMode, "height", "I");
	jfieldID fpsFieldId = env->GetFieldID(jcMode, "fps", "I");

	Mode.width = env->GetIntField(obj, widthFieldId);
	Mode.height = env->GetIntField(obj, heightFieldId);
	Mode.fps = env->GetIntField(obj, fpsFieldId);

	LOGI("Mode width:%d,height:%d,fps:%d\n", Mode.width, Mode.height, Mode.fps);
	env->DeleteLocalRef(jcMode);
	env->DeleteLocalRef(obj);
}

void setCameraMode(PreviewMode Mode) {
	// 1. 獲得對應JAVA類的各個屬性
	jclass jcMode = env->FindClass(JNIMODE_CLASS);
	// 2、獲取ImageStreamMode的構造方法ID(構造方法的名統一爲:<init>)
	jmethodID methodID_ModeInstance = env->GetMethodID(jcMode, "<init>",
			"(III)V");
	// 3、創建Cat對象的實例(調用對象的構造方法並初始化對象)
	jobject joMode = env->NewObject(jcMode, methodID_ModeInstance, Mode.width,
			Mode.height, Mode.fps);

	env->CallVoidMethod(joCamera, methodID_setCameraMode, joMode);
	env->DeleteLocalRef(joMode);
	env->DeleteLocalRef(jcMode);
}

void GetImageData(CRGBImage* pImageData) {
	if (!IsStartCamera)
		return;
	pImageData->width = nowMode.width;
	pImageData->height = nowMode.height;
	pImageData->Timestamp = Timestamp;
	int DataLength = nowMode.width * nowMode.height * 3;
	memcpy(pImageData->ImageData, ImageData, DataLength);
}

//unsigned char* GetImageData()
//{
//	if(!IsStartCamera) return NULL;
//	return ImageData;
//}

unsigned char* GetYUV420Data() {
	if (!IsStartCamera)
		return NULL;
	return YUVSrcData;
}

void dispose() {
	LOGI("JNI_dispose");
	//清除對象的引用
	env->DeleteGlobalRef(joCamera);
	env->DeleteGlobalRef(jcCamera);
}

#ifdef __cplusplus
}
#endif

C#:

PreviewMode.cs:

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential, CharSet = System.Runtime.InteropServices.CharSet.Ansi)]
public struct PreviewMode
{
    public int width;
    public int height;
    public int fps;
}

ImageSupportedModeList.cs

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;

public struct ImageSupportedModeList
{
    public const int IMAGEMODECOUNT = 255;
    public int ImageModeCount; 
    [System.Runtime.InteropServices.MarshalAs(System.Runtime.InteropServices.UnmanagedType.ByValArray, SizeConst = IMAGEMODECOUNT, ArraySubType = System.Runtime.InteropServices.UnmanagedType.Struct)]
    public PreviewMode[] ImageModeList;
}

RGBImage.cs

using UnityEngine;
using System.Collections;
using System;

/// <summary>
/// RGB類型
/// </summary>
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential, CharSet = System.Runtime.InteropServices.CharSet.Ansi)]
public struct RGBImage
{

    /// <summary>
    /// 初始化
    /// </summary>
    /// <param name="imageData">數據</param>
    public RGBImage(RGBImage imageData)
        : this(imageData, false)
    {

    }

    /// <summary>
    /// 初始化
    /// </summary>
    /// <param name="imageData">數據</param>
    /// <param name="IsCompress">是否壓縮,壓縮了的數據直接傳進c++有可能會報錯</param>
    public RGBImage(RGBImage imageData, bool IsCompress)
    {
        if (!IsCompress) ImageData = new byte[RGBDATALENGTH];
        else ImageData = new byte[RGBDATALENGTH];
        Array.Copy(imageData.ImageData, ImageData, imageData.ImageData.Length);
        Timestamp = imageData.Timestamp;
        width = imageData.width;
        height = imageData.height;
    }

    /// <summary>
    /// 拷貝
    /// </summary>
    /// <param name="imageData"></param>
    public void Copy(RGBImage imageData)
    {
        Array.Copy(imageData.ImageData, ImageData, imageData.ImageData.Length);
        Timestamp = imageData.Timestamp;
        width = imageData.width;
        height = imageData.height;
    }


    /// <summary>
    /// 當前x軸長度
    /// </summary>
    public int width;

    /// <summary>
    /// 當前y軸長度
    /// </summary>
    public int height;
    /// <summary>
    /// 時間戳
    /// </summary>
    public ulong Timestamp;

    /// <summary>
    /// 支持最大分辯率
    /// </summary>
    public const int RGBDATALENGTH = 1920 * 1080 * 4;
    /// <summary>
    /// RGB數據
    /// </summary>
    [System.Runtime.InteropServices.MarshalAs(System.Runtime.InteropServices.UnmanagedType.ByValArray, SizeConst = RGBDATALENGTH, ArraySubType = System.Runtime.InteropServices.UnmanagedType.Struct)]
    //public short[] ImageData;
    public byte[] ImageData;

}

 NativeMethods.cs:

using UnityEngine;
using System.Collections;
using System.Runtime.InteropServices;
using System;

public class NativeMethods {
    [DllImport("astra_android_bridge", EntryPoint = "openCamera")]
    public static extern bool openCamera();
    [DllImport("astra_android_bridge", EntryPoint = "closeCamera")]
    public static extern bool closeCamera();
    [DllImport("astra_android_bridge", EntryPoint = "startCamera")]
    public static extern bool startCamera();
    [DllImport("astra_android_bridge", EntryPoint = "stopCamera")]
    public static extern bool stopCamera();
    [DllImport("astra_android_bridge", EntryPoint = "getAvailableCameraModes")]
    public static extern void getAvailableCameraModes(ref ImageSupportedModeList pSupportedModeList);
    [DllImport("astra_android_bridge", EntryPoint = "getCameraMode")]
    public static extern void getCameraMode(ref PreviewMode Mode);
    [DllImport("astra_android_bridge", EntryPoint = "setCameraMode")]
    public static extern void setCameraMode(PreviewMode Mode);
    [DllImport("astra_android_bridge", EntryPoint = "GetImageData")]
    public static extern void GetImageData(IntPtr ImageData);
    [DllImport("astra_android_bridge", EntryPoint = "GetYUV420Data")]
    public static extern IntPtr GetYUV420Data();
    [DllImport("astra_android_bridge", EntryPoint = "dispose")]
    public static extern void dispose();

}

AndroidManifest.xml:

<?xml version="1.0" encoding="utf-8"?>
<manifest
		xmlns:android="http://schemas.android.com/apk/res/android"
		package="com.unity3d.player"
	android:installLocation="preferExternal"
		android:versionCode="1"
		android:versionName="1.0">

  <uses-sdk
		android:minSdkVersion="12"
		android:targetSdkVersion="18"/>

  <supports-screens
			android:smallScreens="true"
			android:normalScreens="true"
			android:largeScreens="true"
			android:xlargeScreens="true"
			android:anyDensity="true"/>

  <uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW"/>
  <uses-permission android:name="android.permission.SYSTEM_OVERLAY_WINDOW"/>
  <uses-permission android:name="android.permission.MOUNT_UNMOUNT_FILESYSTEMS"/>
  <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
  <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
  <uses-permission android:name="android.permission.CAMERA" />
  <uses-feature android:name="android.hardware.camera" />

  <application
	android:theme="@android:style/Theme.NoTitleBar.Fullscreen"
	android:icon="@drawable/app_icon"
			android:label="@string/app_name"
			android:debuggable="true">
    <activity android:name="com.unity3d.player.UnityPlayerActivity"
							android:label="@string/app_name">
      <intent-filter>
        <action android:name="android.intent.action.MAIN" />
        <category android:name="android.intent.category.LAUNCHER" />
      </intent-filter>

      <meta-data android:name="unityplayer.UnityActivity" android:value="true" />
    </activity>
  </application>
</manifest>

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章