交叉编译OPENCV-FFMPEG-X264,并且在安卓中调用这三个包直接拉流rtsp。

参考了下面博主的文章 直接用了博主的资源 已经测试了是可以运行的工具

我没有自己编译文件,但是博主的文件是有用的!!!

项目地址

// 这就是调用opencv拉流代码。
// nanodet  
    {
        ncnn::MutexLockGuard g(lock);

        if (g_yolo)
        {
            std::vector<Object> objects;
            std::vector<Object> objects_s;
            std::string rtsp1 = "rtsp://XXXXXXXX/h265/ch1/main/av_stream";
            cv::VideoCapture stream1 = cv::VideoCapture(rtsp1,cv::CAP_FFMPEG);
            if (1){
                cv::Mat frame;
                stream1.read(frame);
                cv::imwrite("/sdcard/test/output2.jpg", frame); // 保存图片
            }
// CMakeLists.txt文件内容
project(yolov8ncnn)

cmake_minimum_required(VERSION 3.10)

#set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/opencv-mobile-4.5.1-android/sdk/native/jni)
#find_package(OpenCV REQUIRED core imgproc highgui)

#set(OpenCV_DIR /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/jni)
#find_package(OpenCV REQUIRED)
#find_package(OpenCV REQUIRED  opencv_core opencv_videoio opencv_highgui opencv_imgproc opencv_imgcodecs opencv_dnn opencv_core)
#find_package(OpenCV REQUIRED core imgproc highgui)

set(ncnn_DIR ${CMAKE_SOURCE_DIR}/ncnn-20220420-android-vulkan/arm64-v8a/lib/cmake/ncnn)
find_package(ncnn REQUIRED)

#set(FFMPEG_DIR ${CMAKE_SOURCE_DIR}/ffmpeg/product/arm64-v8a)

# 添加FFmpeg的include目录
include_directories(/home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/include)

# 添加FFmpeg的库目录
link_directories(/home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib)

# 添加opencv的include目录
include_directories(/home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/jni/include)

# 添加opencv的库目录
link_directories(/home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a)


# 添加x264的include目录
include_directories(/home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/X264/product/arm64-v8a/include)

# 添加x264的库目录
link_directories(/home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/X264/product/arm64-v8a/lib)



# 添加FFmpeg的链接库
add_library(avcodec SHARED IMPORTED)
set_target_properties(avcodec PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib/libavcodec.so)

add_library(avformat SHARED IMPORTED)
set_target_properties(avformat PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib/libavformat.so)


add_library(avutil SHARED IMPORTED)
set_target_properties(avutil PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib/libavutil.so)

add_library(swscale SHARED IMPORTED)
set_target_properties(swscale PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib/libswscale.so)

add_library(swresample SHARED IMPORTED)
set_target_properties(swresample PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib/libswresample.so)

add_library(avdevice SHARED IMPORTED)
set_target_properties(avdevice PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib/libavdevice.so)

add_library(avfilter SHARED IMPORTED)
set_target_properties(avfilter PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/ffmpeg/product/arm64-v8a/lib/libavfilter.so)


# 添加opencv的链接库
add_library(opencv_videoio SHARED IMPORTED)
set_target_properties(opencv_videoio PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a/libopencv_videoio.so)

add_library(opencv_photo SHARED IMPORTED)
set_target_properties(opencv_photo PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a/libopencv_photo.so)

add_library(opencv_highgui SHARED IMPORTED)
set_target_properties(opencv_highgui PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a/libopencv_highgui.so)

add_library(opencv_imgproc SHARED IMPORTED)
set_target_properties(opencv_imgproc PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a/libopencv_imgproc.so)

add_library(opencv_imgcodecs SHARED IMPORTED)
set_target_properties(opencv_imgcodecs PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a/libopencv_imgcodecs.so)

add_library(opencv_dnn SHARED IMPORTED)
set_target_properties(opencv_dnn PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a/libopencv_dnn.so)

add_library(opencv_core SHARED IMPORTED)
set_target_properties(opencv_core PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/Opencv-4.6.0/product/arm64-v8a/sdk/native/libs/arm64-v8a/libopencv_core.so)

add_library(x264 SHARED IMPORTED)
set_target_properties(x264 PROPERTIES IMPORTED_LOCATION /home/sjy/zzc/ncnn-android-yolov8/app/src/main/jni/X264/product/arm64-v8a/lib/libx264.so)


add_library(yolov8ncnn SHARED yolov8ncnn.cpp yolo.cpp  ndkcamera.cpp )

target_link_libraries(yolov8ncnn ncnn ${OpenCV_LIBS} camera2ndk mediandk
        # opencv
        opencv_videoio
        opencv_photo
        opencv_highgui
        opencv_imgproc
        opencv_imgcodecs
        opencv_dnn
        opencv_core


        ${avformat}
        ${avdevice}
        ${avcodec}
        ${avutil}
        ${swscale}
        ${swresample}
        ${avfilter}




        ${x264}


        # android jni库
        jnigraphics
        android
        log
        )

各种的问题 需要把FFMPEG和X264的SO文件放到这两个文件夹里面,可以解决一部分的问题

项目的结构

拉流的话需要把网络打开,否则拉流会直接闪退

### 使用Python从RTSP视频抓取帧并保存为图片 #### 方法一:使用FFmpeg-Python库 通过`ffmpeg-python`库可以直接调用FFmpeg命令来捕获RTSP中的帧,并将其转换为图像文件。此方法适合那些希望利用FFmpeg强大功能而不必深入理解其内部工作原理的人。 ```python import ffmpeg input_stream = 'rtsp://your_rtsp_url' output_image_path = './frame.png' ( ffmpeg .input(input_stream, rtsp_transport='tcp') .filter('select', 'gte(n,{})'.format(0)) # 抓取第几帧,这里为抓取第一帧 .output(output_image_path, vframes=1) .run() ) ``` 这种方法简单易用,只需指定输入源和输出路径即可完成操作[^1]。 #### 方法二:基于OpenCV读取RTSP并截屏 另一种常见的做法是采用OpenCV库连接到RTSP服务器,逐帧读取视频数据,在适当时候保存选定的帧作为静态图片。这种方式提供了更多的灵活性,允许开发者自定义何时以及如何处理每一帧的数据。 ```python import cv2 cap = cv2.VideoCapture('rtsp://your_rtsp_url') if not cap.isOpened(): print("Error opening video stream or file") while(cap.isOpened()): ret, frame = cap.read() if ret: # 显示每帧画面 (可选) # cv2.imshow('Frame', frame) # 按下q键退出循环或每隔一定时间间隔保存一张截图 key = cv2.waitKey(1) & 0xFF if key == ord('q'): break elif key == ord('s'): # 按's'保存当前帧为图片 cv2.imwrite('./saved_frame.jpg', frame) else: break cap.release() cv2.destroyAllWindows() ``` 需要注意的是,当保存图像时应确保路径不含特殊字符如中文或空格,以免引起错误[^4]。 #### 方法三:借助GStreamer工具实现相同目标 对于更高级的应用场景,还可以考虑使用GStreamer框架配合相应的插件来进行同样的任务。它不仅支持多种多媒体协议,而且具有良好的跨平台兼容性和扩展能力。 ```bash gst-launch-1.0 rtspsrc location=rtsp://your_rtsp_url ! decodebin ! jpegenc ! multifilesink location=./image%02d.jpg ``` 上述Shell命令展示了怎样配置一个简单的管道结构用于接收来自特定URL的RTSP直播信号并将之编码成JPEG格式的照片序列存储于本地磁盘上[^3]。
评论 5
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值