步骤
1.Android端写surfaceView,设置视频格式ARGB,callback,holder,不多说
2.解码视频数据,得到一帧一帧的YUV数据(了解视频解码流程)
3.通过surfaceView获取nativewindow
4.lock Window
5.第三方的libyuv to RGBA
6.set buffer
7.unlock Window
实现代码
#include <jni.h>
#include <android/log.h>
#include <stdlib.h>
#include <stdio.h>
//编解码相关
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
//像素处理
#include "include/libswscale/swscale.h"
// 自定义头文件
#include "com_example_ffmpeg_FfmpegUtil.h"
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"render",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"render",FORMAT,##__VA_ARGS__);
JNIEXPORT void JNICALL Java_com_example_rendor_SurfaceUtil_render(JNIEnv *env,
jclass jclzz, jstring input_jstr, jobject surface) {
const char *input_cstr = (*env)->GetStringUTFChars(env, input_jstr, NULL);
// 注册所有的组件
av_register_all();
// 拿到封装格式上下文
AVFormatContext * aVFormatContext = avformat_alloc_context();
// 打开视频文件
int isOpen = avformat_open_input(&aVFormatContext, input_cstr, NULL, NULL);
if (isOpen < 0) {
LOGE("%s", "打开视频文件失败");
return;
}
LOGE("%s", "avformat_open_input");
int stream_info = avformat_find_stream_info(aVFormatContext, NULL);
if (stream_info < 0) {
LOGE("%s", "stream_info失败");
return;
}
LOGE("%s", "avformat_open_input");
// 拿到解码器
AVCodecContext *aVCodecContext;
int i = 0;
int index = -1;
for (; i < aVFormatContext->nb_streams; i++) {
aVCodecContext = aVFormatContext->streams[i]->codec;
if (aVCodecContext->codec_type == AVMEDIA_TYPE_VIDEO) {
index = i;
break;
}
}
if (index == -1) {
LOGE("%s", "没有找到视频流");
return;
}
// 找到解码器
AVCodec *avcodec = avcodec_find_decoder(
aVFormatContext->streams[index]->codec->codec_id);
if (avcodec == NULL) {
LOGE("%s", "没有找到解码器");
return;
}
LOGE("%s", "avcodec_find_decoder");
// 打开解码器
int isOpenCodec = avcodec_open2(aVCodecContext, avcodec, NULL);
if (isOpenCodec < 0) {
LOGE("%s", "打开解码器失败");
return;
}
LOGE("%s", "avcodec_open2");
//读取每一祯压缩数据
AVPacket *aVPacket = (AVPacket *) av_malloc(sizeof(AVPacket));
AVFrame *aVFrame = av_frame_alloc();
AVFrame *rgbFrame = av_frame_alloc();
// 通过Java的surface对象拿到nativeWindow,处理完成之后需要释放
ANativeWindow* native_window = ANativeWindow_fromSurface(env, surface);
ANativeWindow_Buffer outBuffer;
LOGE("%s", "初始化缓冲区");
int got_picture_ptr;
int frameCount;
int len;
while (av_read_frame(aVFormatContext, aVPacket) >= 0) {
LOGE("%s", "av_read_frame");
len = avcodec_decode_video2(aVCodecContext, aVFrame, &got_picture_ptr,
aVPacket);
if (got_picture_ptr) {
// 获取到解压数据
// 设置nativewindow的缓冲区,width,height来控制缓冲区的大小(像素),buffer会进行相应的缩放来适配屏幕的大小
ANativeWindow_setBuffersGeometry(native_window,
aVCodecContext->width, aVCodecContext->height,
WINDOW_FORMAT_RGBA_8888);
//* Lock the window's next drawing surface for writing.
int lock = ANativeWindow_lock(native_window, &outBuffer, NULL);
//初始化缓冲区 这里把解码后的视频frame的缓冲区与nativewindow的缓冲区相关联
avpicture_fill((AVPicture *) rgbFrame, outBuffer.bits,
PIX_FMT_RGB32, aVCodecContext->width,
aVCodecContext->height);
// 将yuv420的frame转换成rgb
int convert = I420ToARGB(aVFrame->data[0], aVFrame->linesize[0],
aVFrame->data[2], aVFrame->linesize[2], aVFrame->data[1],
aVFrame->linesize[1], rgbFrame->data[0],
rgbFrame->linesize[0], aVCodecContext->width,
aVCodecContext->height);
// * Unlock the window's drawing surface after previously locking it,
// * posting the new buffer to the display.
int32_t unlock = ANativeWindow_unlockAndPost(native_window);
usleep(16 * 1000);
}
av_free_packet(aVPacket);
}
avformat_free_context(aVFormatContext);
avcodec_free_context(&aVCodecContext);
av_frame_free(&aVFrame);
av_frame_free(&rgbFrame);
ANativeWindow_release(native_window);
(*env)->ReleaseStringUTFChars(env, input_jstr, input_cstr);
}
Android.mk
依赖FFmpeg的8个so(实际上用不着),以及第三方的libyuv(YUV 2 RGB)
LOCAL_PATH := $(call my-dir)
#ffmpeg lib
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := libavcodec-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avdevice
LOCAL_SRC_FILES := libavdevice-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := libavfilter-5.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := libavformat-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := libavutil-54.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := postproc
LOCAL_SRC_FILES := libpostproc-53.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := libswresample-1.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := libswscale-3.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := yuv
LOCAL_SRC_FILES := libyuv.so
include $(PREBUILT_SHARED_LIBRARY)
#myapp
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := ffmpeg.c
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/ffmpeg
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/libyuv
##-landroid参数 for native windows(这行很重要,不要编译老是报找不到window.h)
LOCAL_LDLIBS := -llog -landroid
LOCAL_SHARED_LIBRARIES := yuv avcodec avdevice avfilter avformat avutil postproc swresample swscale
include $(BUILD_SHARED_LIBRARY)
Application.mk
// 指定支持的平台
APP_ABI := armeabi
// 指定platFrom,不指定的话低版本的platform(小于9)会没有这个头文件
APP_PLATFORM := android-9
本文介绍了如何在Android应用中利用FFmpeg解码视频,并将解码后的YUV数据转换为RGBA格式,然后绘制到SurfaceView上。步骤包括设置SurfaceView、解码视频、获取NativeWindow、转换颜色空间以及更新缓冲区。
3万+

被折叠的 条评论
为什么被折叠?



