记录一下之前项目的实际使用过程。
将按照Java层------>JNI接口------>JNI代码中使用FFmpeg解码。
首先Java层:
public class CodecWrapper {
//加载FFmpeg的动态so库
static {
System.loadLibrary("codec");
System.loadLibrary("avutil-55");
System.loadLibrary("swresample-2");
System.loadLibrary("avcodec-57");
System.loadLibrary("avformat-57");
System.loadLibrary("swscale-4");
System.loadLibrary("postproc-54");
System.loadLibrary("avfilter-6");
System.loadLibrary("avdevice-57");
}
//进行FFmpeg解码器部分的初始化
public native long get_codec();
//传入数据,解码。没来一帧数据都会调用这个方法。
//surface是解码之后的数据显示的地方,作为一个对象传入到JNI。
public native void decode_stream(byte[] frame, int length, long decoder, Surface surface);
//解码完成,释放资源
public native void release_codec(long decoder);
//解码之后,图像数据将被添加到Surface的数据缓冲区中,完成显示。它通过SurfaceView对象可以获得。
private Surface mSurface;
private long mDecoderHandle;
public CodecWrapper(Surface surface){
mSurface = surface;
init();
}
public void init(){
mDecoderHandle = get_codec();
}
public void decodeStream(byte[] frame, int length){
decode_stream(frame, length, mDecoderHandle, mSurface);
}
public void release(){
release_codec(mDecoderHandle);
}
@Override
protected void finalize() throws Throwable {
try {
release();
} finally {
try {
super.finalize();
} catch (Throwable e) {
e.printStackTrace();
}
}
}
//this method was called by jni when one frame was decode ok
/*public void onFrameDecode(byte[] data, int width, int height){
getOneFrame(data, width, height);
}
abstract void getOneFrame(byte[] data, int width, int height);*/
public void onFrameDecode(int[] data, int width, int height){
getOneFrame(data, width, height);
}
abstract void getOneFrame(int[] data, int width, int height);
}
**首先,**需要调用CodecWrapper的构造方法,传入Surface对象,然后调用native方法get_codec,对FFmpeg的解码器进行初始化。
**之后,**每来一帧数据,就会调用decode_stream方法进行真正的数据解码。
**解码完成之后,**调用release_codec方法释放JNI层的资源,防止出现内存泄露。
接下来看JNI接口的部分代码:
com_xxx_xxx_CodecWrapper.h
#include <jni.h>
/*
* Method: 初始化
*/
JNIEXPORT jint JNICALL Java_com_xxx_CodecWrapper_get_codec
(JNIEnv *, jobject);
/*
* Method: 解码
* Signature: ([BIILcom/xxxx/CodecWrapper;)V
*/
JNIEXPORT void JNICALL Java_com_xxx_CodecWrapper_decode_stream
(JNIEnv *, jobject, jbyteArray, jint, jint, jobject);
/*
* Method: 释放资源
* Signature: (I)V
*/
JNIEXPORT void JNICALL Java_com_xxx_CodecWrapper_release_1codec
(JNIEnv *, jobject, jint);
com_xxx_xxx_CodecWrapper.cpp
#include <jni.h>
#include "../decoder.h"
#include "../yuv_2_rgb.h"
#include "../android/android_native_window.h"
#include <opencv2/opencv.hpp>
#include <android/bitmap.h>
#include <android/native_window.h>
#include <android/native_window_jni.h>
#include "../decoder.h"
#include "../timeutil.h"
#include <libavutil/imgutils.h>
enum AVPixelFormat pixelFormat = AV_PIX_FMT_RGB565LE;
int native_pix_format = PIXEL_FORMAT_RGB_565;
//对当前JVM环境的封装,记录JNIEnv、Java层的对象(Java中哪个对象调用的当前的JNI方法)、Surface对象
typedef struct _EnvPackage{
JNIEnv *env;
jobject *obj;
jobject *surface;
} EnvPackage;
JNIEXPORT long JNICALL Java_com_xxxx_CodecWrapper_get_1codec
(JNIEnv *env, jobject obj){
decoder *p = new decoder();
p->initialize(pixelFormat);
return p;
}
JNIEXPORT void JNICALL Java_com_xxxx_CodecWrapper_decode_1stream(JNIEnv *env, jobject obj, jbyteArray jdata, jint length, jlong this_obj_long, jobject surface){
decoder *this_obj = this_obj_long;
//将Java层的byte数组,转成JNI中的jbyte指针数组
jbyte *cdata = env->GetByteArrayElements(jdata, JNI_FALSE);
jbyte *cdata_rec = cdata;
if(cdata != NULL) {
EnvPackage package;
package.env = env;
package.obj = &obj;
package.surface = &surface;
//解码显示
this_obj->decodeFrame(cdata, length, handle_data, &package,this_obj);