Android jni硬编码保存视频

目录

头文件:

surface版

不用surface版:


头文件:

#ifndef TERMINALCLIENT_ENCODER_H
#define TERMINALCLIENT_ENCODER_H

#include <media/NdkMediaFormat.h>
#include <media/NdkMediaCodec.h>
#include <stdio.h>
#include <string.h>
#include <fstream>
#include <iostream>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <fcntl.h>
#include <android/log.h>
#include <android/native_window.h>
#include "media/NdkMediaMuxer.h"

#include "config.h"
#include <opencv2/opencv.hpp>

#include<opencv2/videoio.hpp> // 或者
using namespace std;

struct FourCCInfo{
    int fourcc;
    const char* mime;
    OutputFormat muxerFormat;
};

class VideoEncoder {

public:

    VideoEncoder();
    int close_m();

    const FourCCInfo* findInfo(int fourcc);
    std::shared_ptr<AMediaCodec> encoder;
    std::shared_ptr<AMediaFormat> format;
    std::shared_ptr<AMediaMuxer> muxer;

    float videoFrameRate;
    int32_t videoFrameCount;
    int32_t videoRotation;
    int32_t videoRotationCode;
    bool videoOrientationAuto;
    std::vector<uint8_t> buffer;

    #if __ANDROID_API__ >= 26
    ANativeWindow* surface;
    #endif
    long frameIndex;
    int width;
    int height;
    double frameRate;
    ssize_t videoTrackIndex;
    int fd;
    int open(string filename, int fourcc, double fps, cv::Size frameSize);

    static const int64_t TIMEOUT = 2000L;

    bool sawInputEOS;
    bool sawOutputEOS;
    int32_t frameStride;
    int32_t frameWidth;
    int32_t frameHeight;
    int32_t colorFormat;
    int32_t videoWidth;
    int32_t videoHeight;
    int writeMat(cv::Mat& img);
    int drainEncoder(bool end);
    int writeBytes( uint8_t* inputBuffer, size_t inputBufferSize ) ;
    cv::Mat yuv2rgb(cv::Mat& yuv);
    ~VideoEncoder();
    const FourCCInfo FOURCC_INFO[];
};

#endif //TERMINALCLIENT_ENCODER_H

surface版

buffer.format 返回34,没找到对应的数据处理方法。
#include "encoder.h"

static inline void deleter_AMediaMuxer(AMediaMuxer  *muxer) {
    if (muxer) {
        AMediaMuxer_stop(muxer);
        AMediaMuxer_delete(muxer);
    }
}

static inline void deleter_AMediaCodec(AMediaCodec *codec) {
    if (codec){
        AMediaCodec_stop(codec);
        AMediaCodec_delete(codec);
    }
}

static inline void deleter_AMediaFormat(AMediaFormat *format) {
    if (format){
        AMediaFormat_delete(format);
    }
}

const FourCCInfo FOURCC_INFO[] = {
        { cv::VideoWriter::fourcc('H', '2', '6', '4'), "video/avc", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { cv::VideoWriter::fourcc('H', '2', '6', '5'), "video/hevc", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { cv::VideoWriter::fourcc('H', '2', '6', '3'), "video/3gpp", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { cv::VideoWriter::fourcc('M', 'P', '4', 'V'), "video/mp4v-es", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { 0, NULL },
};



const FourCCInfo* VideoEncoder::findInfo(int fourcc) {
    for( const FourCCInfo *it = FOURCC_INFO; NULL != it->mime; it++ ) {
        if (fourcc == it->fourcc) return it;
    }
    return NULL;
}

VideoEncoder::VideoEncoder() {
}

int VideoEncoder::writeBytes( uint8_t* inputBuffer, size_t inputBufferSize ) {
    LOGI("[writeBytes] inputBufferSize=%u", (unsigned int)inputBufferSize);

    ssize_t bufferIndex;
    size_t  bufferSize;
    uint8_t* buffer;
    size_t  partialSize;
    bool firstCall = true;
    uint32_t flags;

    while(inputBufferSize > 0 || firstCall) {
        bufferIndex = AMediaCodec_dequeueInputBuffer(encoder.get(), TIMEOUT);

        if (bufferIndex >= 0) {
            firstCall = false;
            buffer = AMediaCodec_getInputBuffer(encoder.get(), (size_t)bufferIndex, &bufferSize);
            if (NULL == buffer || 0 == bufferSize) break;

            flags = 0;
            partialSize = (inputBufferSize > bufferSize) ? bufferSize : inputBufferSize;
            if (partialSize > 0) {
                memcpy(buffer, inputBuffer, partialSize);
                inputBuffer += partialSize;
                inputBufferSize -= partialSize;
                if (inputBufferSize > 0) {
                    flags = AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME;
                }
            } else {
                flags = AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
            }


            AMediaCodec_queueInputBuffer(encoder.get(), (size_t)bufferIndex, 0, partialSize, frameIndex * 1000000L / frameRate, flags);
            if (NULL != inputBuffer) drainEncoder(false);
        }
    }
    return 0;
}

int VideoEncoder::drainEncoder(bool end){
    if (end) {
#if __ANDROID_API__ >= 26
        AMediaCodec_signalEndOfInputStream(encoder.get());
#else
        writeBytes(NULL, 0);
#endif
    }

    AMediaCodecBufferInfo bufferInfo;
    ssize_t bufferIndex;
    size_t  bufferSize;
    uint8_t *buffer;

    while (true) {
        bufferIndex = AMediaCodec_dequeueOutputBuffer(encoder.get(), &bufferInfo, TIMEOUT);
        if (bufferIndex >= 0) {
            buffer = AMediaCodec_getOutputBuffer(encoder.get(), (size_t)bufferIndex, &bufferSize);

            if (NULL == buffer || 0 == bufferSize){
                LOGE("Can't get output buffer");
                break;
            }

            if (videoTrackIndex >= 0) {
                bufferInfo.presentationTimeUs = frameIndex * 1000000L / frameRate;
                LOGI("Muxer write to track %d: %d byte(s)", (int)videoTrackIndex, (int)bufferInfo.size);
                AMediaMuxer_writeSampleData(muxer.get(), (size_t)videoTrackIndex, buffer, &bufferInfo);
            } else {
                LOGE("Invalid video track !");
            }

            AMediaCodec_releaseOutputBuffer(encoder.get(), (size_t)bufferIndex, false);
            if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) break;
        } else if (AMEDIACODEC_INFO_TRY_AGAIN_LATER == bufferIndex) {
            if (!end) break;
        } else if (AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED == bufferIndex) {
            videoTrackIndex = AMediaMuxer_addTrack(muxer.get(), AMediaCodec_getOutputFormat(encoder.get()));
            if (videoTrackIndex >= 0) {
                AMediaMuxer_start(muxer.get());
            }
            LOGI("New videoTrackIndex: %d", (int)videoTrackIndex);
        }
    }
    return 0;
}

int VideoEncoder::writeMat(cv::Mat& image){

    if (CV_8UC3 != image.type() || image.cols > width || image.rows > height) {
        LOGE(
                "Expected input to be a mat of maximum %d x %d of type CV_8UC3 (%d), but received %d x %d of type: %d",
                width, height, CV_8UC3,
                image.cols, image.rows, image.type()
        );
        return -1;
    }

    ANativeWindow_Buffer buffer;
    if (0 != ANativeWindow_lock(surface, &buffer, NULL)) {
        LOGE("Failed to lock the surface");
        return -2; // 错误代码,表示无法锁定窗口
    } else {
        if (AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM == buffer.format) {
            cv::Mat bufferMat(image.rows, image.cols, CV_8UC4, buffer.bits, buffer.stride * 4);
            cv::cvtColor(image, bufferMat, cv::COLOR_BGR2RGBA);
        } else if (buffer.format == AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420) {
            // 为 YUV 缓冲区创建 cv::Mat
            // 注意:您需要根据您的具体YUV布局和缓冲区细节来调整下面的代码
            cv::Mat yPlane(image.rows, image.cols, CV_8UC1, buffer.bits); // Y平面
            cv::Mat uPlane(image.rows / 2, image.cols / 2, CV_8UC1); // U平面
            cv::Mat vPlane(image.rows / 2, image.cols / 2, CV_8UC1); // V平面
            // 填充 U 和 V 平面的数据指针
            // ...

            // 将 YUV 420 三平面合并为 YUV 2 平面 (NV12 或 NV21)
            cv::Mat nv12Mat;
            // ...

            // 使用 cv::cvtColor 将 YUV 转换为 RGBA
            cv::Mat bufferMat(image.rows, image.cols, CV_8UC4);
            cv::cvtColor(nv12Mat, bufferMat, cv::COLOR_YUV2RGBA_NV12); // 或者 COLOR_YUV2RGBA_NV21
        } else if (buffer.format == AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM) {
            cv::Mat bufferMat(image.rows, image.cols, CV_8UC4, buffer.bits, buffer.stride * 4);
            cv::cvtColor(image, bufferMat, cv::COLOR_BGR2RGBA);
//            cv::Mat bufferMat(image.rows, image.cols, CV_8UC4, buffer.bits, buffer.stride * 4);
//            cv::cvtColor(image, bufferMat, cv::COLOR_BGR2RGBA);

//            cv::Mat bufferMat(height + height / 2, width, CV_8UC1);
//            cv::cvtColor(image, bufferMat, cv::COLOR_BGR2YUV_I420);
        }else {
            LOGE("Unknown surface buffer format: %u", buffer.format);
            ANativeWindow_unlockAndPost(surface); // 即使格式不对,也应该解锁和提交
            return -3; // 错误代码,表示未知的缓冲区格式
        }

        ANativeWindow_unlockAndPost(surface);
    }

    drainEncoder(false);

    frameIndex++;

    return 0;
}

//int VideoEncoder::writeMat(cv::Mat& image){
//
//    if (CV_8UC3 != image.type() || image.cols > width || image.rows > height) {
//        LOGE(
//                "Expected input to be a mat of maximum %d x %d of type CV_8UC3 (%d), but received %d x %d of type: %d",
//                width, height, CV_8UC3,
//                image.cols, image.rows, image.type()
//        );
//        return -1;
//    }
//
//#if __ANDROID_API__ >= 26
//    ANativeWindow_Buffer buffer;
//   if (0 != ANativeWindow_lock(surface, &buffer, NULL)) {
//       LOGE("Failed to lock the surface");
//   } else {
//       if (AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM == buffer.format) {
//           Mat bufferMat(image.rows, image.cols, CV_8UC4, buffer.bits, buffer.stride * 4);
//           cvtColor(image, bufferMat, CV_BGR2RGBA);
//       } else {
//           LOGE("Unknow surface buffer format: %u", buffer.format);
//       }
//
//       ANativeWindow_unlockAndPost(surface);
//   }
//#else
//    LOGV("[write] image: %d  x %d", image.cols, image.rows);
//
//    //OpenCV don't support RGB to NV12 so we need to connvert to YV12 and then manually changed it to NV12
//    Mat imageYV12;
//    cvtColor(image, imageYV12, CV_BGR2YUV_YV12);
//
//    //convert from YV12 to NV12
//    size_t yPlaneSize = width * height;
//    size_t vPlaneSize = yPlaneSize / 4;
//
//    Mat channels[2] = {
//            Mat( vPlaneSize, 1, CV_8UC1, imageYV12.ptr() + yPlaneSize + vPlaneSize ).clone(),
//            Mat( vPlaneSize, 1, CV_8UC1, imageYV12.ptr() + yPlaneSize ).clone()
//    };
//    Mat vuMat( vPlaneSize, 1, CV_8UC2, imageYV12.ptr() + yPlaneSize );
//    merge(channels, 2, vuMat);
//
//    writeBytes( imageYV12.ptr(), imageYV12.rows * imageYV12.cols );
//#endif
//
//    drainEncoder(false);
//
//    frameIndex++;
//}
int VideoEncoder::open(std::string filename, int fourcc, double fps, cv::Size frameSize ){
    media_status_t status;

    close_m();

    FourCCInfo info = {
            // 这里使用了直接给出的整数值来代表 FOURCC,
            // 因为 CV_FOURCC 宏未定义,你可以直接计算或使用其他方式定义 FOURCC
            ('H' << 24) | ('2' << 16) | ('6' << 8) | '5', // 等同于 CV_FOURCC('H', '2', '6', '5')
            "video/hevc",
            AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4
    };

    //    const FourCCInfo* info = findInfo(fourcc);
    //    if (NULL == info) {
    //        LOGE("ERROR: findInfo");
    //        return false;
    //    }

    //    format = AMediaFormat_new();

    format = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), deleter_AMediaFormat);

    if (NULL == format.get()) {
        LOGE("ERROR: AMediaFormat_new");
        close_m();
        return -1;
    }

    LOGI("mime: %s, width: %d, height: %d, fps: %f", info.mime, frameSize.width, frameSize.height, fps);

    AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, info.mime);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_WIDTH, frameSize.width);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_HEIGHT, frameSize.height);
    AMediaFormat_setFloat(format.get(), AMEDIAFORMAT_KEY_FRAME_RATE, (float)fps);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, 5);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_BIT_RATE, frameSize.width * frameSize.height * 5);


//    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_COLOR_FORMAT,19);
    //            COLOR_FormatYUV420Planar

    encoder = std::shared_ptr<AMediaCodec>(AMediaCodec_createEncoderByType(info.mime), deleter_AMediaCodec);
//    std::shared_ptr<AMediaCodec> encoder(AMediaCodec_createEncoderByType(info.mime), deleter_AMediaCodec);
    //    if (!encoder) {
    //        return -3; // 或者其他错误处理
    //    }

    //    encoder = AMediaCodec_createEncoderByType(info->mime);
    if (NULL == encoder.get()) {
        LOGE("ERROR: AMediaCodec_createEncoderByType");
        close_m();
        return -1;
    }

    status = AMediaCodec_configure(encoder.get(), format.get(), NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
    if (AMEDIA_OK != status) {
        LOGE("ERROR: AMediaCodec_configure (%d)", status);
        close_m();
        return -1;
    }

    status = AMediaCodec_createInputSurface(encoder.get(), &surface);
    if (AMEDIA_OK != status || NULL == surface) {
        LOGE("ERROR: AMediaCodec_createInputSurface (%d)", status);
        close_m();
        return -1;
    }

    AMediaCodec_start(encoder.get());

    fd = ::open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666);
    if (fd < 0) {
        LOGE("ERROR: open");
        close_m();
        return -1;
    }

    //    muxer = AMediaMuxer_new(fd, info->muxerFormat);
    muxer = std::shared_ptr<AMediaMuxer>(AMediaMuxer_new(fd, info.muxerFormat), deleter_AMediaMuxer);
//    std::shared_ptr<AMediaMuxer> muxer(AMediaMuxer_new(fd, info.muxerFormat), deleter_AMediaMuxer);
    if (NULL == muxer) {
        LOGE("ERROR: AMediaMuxer_new");
        close_m();
        return -1;
    }

    //    AMediaMuxer_setOrientationHint(muxer, params.get(CAP_PROP_ORIENTATION_META, 0));

    frameIndex = 0;
    width = frameSize.width;
    height = frameSize.height;
    frameRate = fps;
    videoTrackIndex = -1;

    return 0;

}
VideoEncoder::~ VideoEncoder() {
    //    deleter_AMediaCodec(mediaCodec.get());
    //    deleter_AMediaExtractor(extractor.get());
}

int VideoEncoder::close_m(){
    if (videoTrackIndex >= 0 && NULL != muxer) {
        drainEncoder(true);
        AMediaMuxer_stop(muxer.get());
    }

    if (NULL != encoder) AMediaCodec_delete(encoder.get());
    if (NULL != muxer) AMediaMuxer_delete(muxer.get());

#if __ANDROID_API__ >= 26
    if (NULL != surface) ANativeWindow_release(surface);
#endif

    if (fd >= 0) ::close(fd);
    //    if (NULL != format.get()) AMediaFormat_delete(format);

    format = NULL;
    encoder = NULL;
    muxer = NULL;
#if __ANDROID_API__ >= 26
    surface = NULL;
#endif
    frameIndex = 0;
    width = 0;
    height = 0;
    frameRate = 0.;
    videoTrackIndex = -1;
    fd = -1;
    return 0;
}

cv::Mat  VideoEncoder::yuv2rgb(cv::Mat& yuv) {
    cv::Mat rgb;
    cv::cvtColor(yuv, rgb, cv::COLOR_YUV2RGB_I420);
    return rgb;
}

不用surface版:

#include "encoder.h"
#include <time.h>

int64_t systemTime() {
    struct timespec ts;
    clock_gettime(CLOCK_MONOTONIC, &ts);
    return ts.tv_sec * 1000000000LL + ts.tv_nsec;
}
static inline void deleter_AMediaMuxer(AMediaMuxer  *muxer) {
    if (muxer) {
        AMediaMuxer_stop(muxer);
        AMediaMuxer_delete(muxer);
    }
}

static inline void deleter_AMediaCodec(AMediaCodec *codec) {
    if (codec){
        AMediaCodec_stop(codec);
        AMediaCodec_delete(codec);
    }
}

static inline void deleter_AMediaFormat(AMediaFormat *format) {
    if (format){
        AMediaFormat_delete(format);
    }
}

const FourCCInfo FOURCC_INFO[] = {
        { cv::VideoWriter::fourcc('H', '2', '6', '4'), "video/avc", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { cv::VideoWriter::fourcc('H', '2', '6', '5'), "video/hevc", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { cv::VideoWriter::fourcc('H', '2', '6', '3'), "video/3gpp", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { cv::VideoWriter::fourcc('M', 'P', '4', 'V'), "video/mp4v-es", AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4 },
        { 0, NULL },
};



const FourCCInfo* VideoEncoder::findInfo(int fourcc) {
    for( const FourCCInfo *it = FOURCC_INFO; NULL != it->mime; it++ ) {
        if (fourcc == it->fourcc) return it;
    }
    return NULL;
}

VideoEncoder::VideoEncoder() {
}

int VideoEncoder::writeBytes( uint8_t* inputBuffer, size_t inputBufferSize ) {
    LOGI("[writeBytes] inputBufferSize=%u", (unsigned int)inputBufferSize);

    ssize_t bufferIndex;
    size_t  bufferSize;
    uint8_t* buffer;
    size_t  partialSize;
    bool firstCall = true;
    uint32_t flags;

    while(inputBufferSize > 0 || firstCall) {
        bufferIndex = AMediaCodec_dequeueInputBuffer(encoder.get(), TIMEOUT);

        if (bufferIndex >= 0) {
            firstCall = false;
            buffer = AMediaCodec_getInputBuffer(encoder.get(), (size_t)bufferIndex, &bufferSize);
            if (NULL == buffer || 0 == bufferSize) break;

            flags = 0;
            partialSize = (inputBufferSize > bufferSize) ? bufferSize : inputBufferSize;
            if (partialSize > 0) {
                memcpy(buffer, inputBuffer, partialSize);
                inputBuffer += partialSize;
                inputBufferSize -= partialSize;
                if (inputBufferSize > 0) {
                    flags = AMEDIACODEC_BUFFER_FLAG_PARTIAL_FRAME;
                }
            } else {
                flags = AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
            }


            AMediaCodec_queueInputBuffer(encoder.get(), (size_t)bufferIndex, 0, partialSize, frameIndex * 1000000L / frameRate, flags);
            if (NULL != inputBuffer) drainEncoder(false);
        }
    }
    return 0;
}

int VideoEncoder::drainEncoder(bool end){
    if (end) {
#if __ANDROID_API__ >= 26
        AMediaCodec_signalEndOfInputStream(encoder.get());
#else
        writeBytes(NULL, 0);
#endif
    }

    AMediaCodecBufferInfo bufferInfo;
    ssize_t bufferIndex;
    size_t  bufferSize;
    uint8_t *buffer;
    int64_t endTime = -1;
    bool sawOutputEOS = false;
    while (!sawOutputEOS) {
        bufferIndex = AMediaCodec_dequeueOutputBuffer(encoder.get(), &bufferInfo, TIMEOUT);
        if (bufferIndex >= 0) {
            buffer = AMediaCodec_getOutputBuffer(encoder.get(), (size_t)bufferIndex, &bufferSize);

            if (NULL == buffer || 0 == bufferSize){
                LOGE("Can't get output buffer");
                break;
            }

            if (videoTrackIndex >= 0) {
                bufferInfo.presentationTimeUs = frameIndex * 1000000L / frameRate;
                LOGI("Muxer write to track %d: %d byte(s)", (int)videoTrackIndex, (int)bufferInfo.size);
                AMediaMuxer_writeSampleData(muxer.get(), (size_t)videoTrackIndex, buffer, &bufferInfo);
            } else {
                LOGE("Invalid video track !");
            }

            AMediaCodec_releaseOutputBuffer(encoder.get(), (size_t)bufferIndex, false);
            if (bufferInfo.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
                sawOutputEOS = true;
            }
        } else if (AMEDIACODEC_INFO_TRY_AGAIN_LATER == bufferIndex) {
            if (!end) {
                // 如果不是结束,直接退出循环
                break;
            } else {
                // 如果是结束,设置一个超时时间
                if (endTime == -1) {
                    endTime = systemTime() + TIMEOUT;
                }
                // 如果超时了,也退出循环
                if (systemTime() > endTime) {
                    LOGE("drainEncoder timed out!");
                    break;
                }
            }
        } else if (AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED == bufferIndex) {
            videoTrackIndex = AMediaMuxer_addTrack(muxer.get(), AMediaCodec_getOutputFormat(encoder.get()));
            if (videoTrackIndex >= 0) {
                AMediaMuxer_start(muxer.get());
            }
            LOGI("New videoTrackIndex: %d", (int)videoTrackIndex);
        }else{
            if (endTime == -1) {
                endTime = systemTime() + TIMEOUT;
            }
            // 如果超时了,也退出循环
            if (systemTime() > endTime) {
                LOGE("drainEncoder timed out! bufferIndex:{}",bufferIndex);
                break;
            }
        }
    }
    return 0;
}

int VideoEncoder::writeMat(cv::Mat& image){

    if (CV_8UC3 != image.type() || image.cols > width || image.rows > height) {
        LOGE(
                "Expected input to be a mat of maximum %d x %d of type CV_8UC3 (%d), but received %d x %d of type: %d",
                width, height, CV_8UC3,
                image.cols, image.rows, image.type()
        );
        return -1;
    }

    //    ANativeWindow_Buffer buffer;
    //   if (0 != ANativeWindow_lock(surface, &buffer, NULL)) {
    //       LOGE("Failed to lock the surface");
    //	   return -2; // 错误代码,表示无法锁定窗口
    //   } else {
    //       if (AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM == buffer.format) {
    //           cv::Mat bufferMat(image.rows, image.cols, CV_8UC4, buffer.bits, buffer.stride * 4);
    //           cv::cvtColor(image, bufferMat, cv::COLOR_BGR2RGBA);
    //       } else {
    //            LOGE("Unknown surface buffer format: %u", buffer.format);
    //            ANativeWindow_unlockAndPost(surface); // 即使格式不对,也应该解锁和提交
    //		   return -3; // 错误代码,表示未知的缓冲区格式
    //       }
    //
    //       ANativeWindow_unlockAndPost(surface);
    //   }

    cv::Mat imageYV12;
    cvtColor(image, imageYV12, cv::COLOR_BGR2YUV_YV12);

    //convert from YV12 to NV12
    size_t yPlaneSize = width * height;
    size_t vPlaneSize = yPlaneSize / 4;

    cv::Mat channels[2] = {
            cv::Mat( vPlaneSize, 1, CV_8UC1, imageYV12.ptr() + yPlaneSize + vPlaneSize ).clone(),
            cv::Mat( vPlaneSize, 1, CV_8UC1, imageYV12.ptr() + yPlaneSize ).clone()
    };
    cv::Mat vuMat( vPlaneSize, 1, CV_8UC2, imageYV12.ptr() + yPlaneSize );
    merge(channels, 2, vuMat);

    writeBytes( imageYV12.ptr(), imageYV12.rows * imageYV12.cols );
    drainEncoder(false);

    frameIndex++;

    return 0;
}

//int VideoEncoder::writeMat(cv::Mat& image){
//
//    if (CV_8UC3 != image.type() || image.cols > width || image.rows > height) {
//        LOGE(
//                "Expected input to be a mat of maximum %d x %d of type CV_8UC3 (%d), but received %d x %d of type: %d",
//                width, height, CV_8UC3,
//                image.cols, image.rows, image.type()
//        );
//        return -1;
//    }
//
//#if __ANDROID_API__ >= 26
//    ANativeWindow_Buffer buffer;
//   if (0 != ANativeWindow_lock(surface, &buffer, NULL)) {
//       LOGE("Failed to lock the surface");
//   } else {
//       if (AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM == buffer.format) {
//           Mat bufferMat(image.rows, image.cols, CV_8UC4, buffer.bits, buffer.stride * 4);
//           cvtColor(image, bufferMat, CV_BGR2RGBA);
//       } else {
//           LOGE("Unknow surface buffer format: %u", buffer.format);
//       }
//
//       ANativeWindow_unlockAndPost(surface);
//   }
//#else
//    LOGV("[write] image: %d  x %d", image.cols, image.rows);
//
//    //OpenCV don't support RGB to NV12 so we need to connvert to YV12 and then manually changed it to NV12
//    Mat imageYV12;
//    cvtColor(image, imageYV12, CV_BGR2YUV_YV12);
//
//    //convert from YV12 to NV12
//    size_t yPlaneSize = width * height;
//    size_t vPlaneSize = yPlaneSize / 4;
//
//    Mat channels[2] = {
//            Mat( vPlaneSize, 1, CV_8UC1, imageYV12.ptr() + yPlaneSize + vPlaneSize ).clone(),
//            Mat( vPlaneSize, 1, CV_8UC1, imageYV12.ptr() + yPlaneSize ).clone()
//    };
//    Mat vuMat( vPlaneSize, 1, CV_8UC2, imageYV12.ptr() + yPlaneSize );
//    merge(channels, 2, vuMat);
//
//    writeBytes( imageYV12.ptr(), imageYV12.rows * imageYV12.cols );
//#endif
//
//    drainEncoder(false);
//
//    frameIndex++;
//}

int VideoEncoder::close_m(){
    if (videoTrackIndex >= 0 && NULL != muxer) {
        drainEncoder(true);
        //        AMediaMuxer_stop(muxer.get());
    }

    //    if (NULL != encoder) AMediaCodec_delete(encoder.get());
    //    if (NULL != muxer) AMediaMuxer_delete(muxer.get());

#if __ANDROID_API__ >= 26
    if (NULL != surface) ANativeWindow_release(surface);
#endif

    if (fd >= 0) ::close(fd);
    //    if (NULL != format.get()) AMediaFormat_delete(format);

    format = NULL;
    encoder = NULL;
    muxer = NULL;
#if __ANDROID_API__ >= 26
    surface = NULL;
#endif
    frameIndex = 0;
    width = 0;
    height = 0;
    frameRate = 0.;
    videoTrackIndex = -1;
    fd = -1;
    return 0;
}
int VideoEncoder::open(std::string filename, int fourcc, double fps, cv::Size frameSize ){
    media_status_t status;

    close_m();

    FourCCInfo info = {
            // 这里使用了直接给出的整数值来代表 FOURCC,
            // 因为 CV_FOURCC 宏未定义,你可以直接计算或使用其他方式定义 FOURCC
            ('H' << 24) | ('2' << 16) | ('6' << 8) | '5', // 等同于 CV_FOURCC('H', '2', '6', '5')
            "video/hevc",
            AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4
    };

    //    const FourCCInfo* info = findInfo(fourcc);
    //    if (NULL == info) {
    //        LOGE("ERROR: findInfo");
    //        return false;
    //    }

    //    format = AMediaFormat_new();

    format = std::shared_ptr<AMediaFormat>(AMediaFormat_new(), deleter_AMediaFormat);

    if (NULL == format.get()) {
        LOGE("ERROR: AMediaFormat_new");
        close_m();
        return -1;
    }

    LOGI("mime: %s, width: %d, height: %d, fps: %f", info.mime, frameSize.width, frameSize.height, fps);

    AMediaFormat_setString(format.get(), AMEDIAFORMAT_KEY_MIME, info.mime);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_WIDTH, frameSize.width);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_HEIGHT, frameSize.height);
    AMediaFormat_setFloat(format.get(), AMEDIAFORMAT_KEY_FRAME_RATE, (float)fps);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, 5);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_BIT_RATE, frameSize.width * frameSize.height * 5);
    AMediaFormat_setInt32(format.get(), AMEDIAFORMAT_KEY_COLOR_FORMAT,21);
    //       19     COLOR_FormatYUV420Planar
    //    public static final int COLOR_FormatYUV420SemiPlanar        = 21;

    encoder = std::shared_ptr<AMediaCodec>(AMediaCodec_createEncoderByType(info.mime), deleter_AMediaCodec);
    //    encoder = AMediaCodec_createEncoderByType(info->mime);
    if (NULL == encoder.get()) {
        LOGE("ERROR: AMediaCodec_createEncoderByType");
        close_m();
        return -1;
    }

    status = AMediaCodec_configure(encoder.get(), format.get(), NULL, NULL, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
    if (AMEDIA_OK != status) {
        LOGE("ERROR: AMediaCodec_configure (%d)", status);
        close_m();
        return -1;
    }

    //    status = AMediaCodec_createInputSurface(encoder.get(), &surface);
    //        if (AMEDIA_OK != status || NULL == surface) {
    //            LOGE("ERROR: AMediaCodec_createInputSurface (%d)", status);
    //            close_m();
    //            return -1;
    //        }

    AMediaCodec_start(encoder.get());

    fd = ::open(filename.c_str(), O_WRONLY | O_CREAT | O_TRUNC, 0666);
    if (fd < 0) {
        LOGE("ERROR: open");
        close_m();
        return -1;
    }

    //    muxer = AMediaMuxer_new(fd, info->muxerFormat);

    muxer = std::shared_ptr<AMediaMuxer>(AMediaMuxer_new(fd, info.muxerFormat), deleter_AMediaMuxer);
    if (NULL == muxer) {
        LOGE("ERROR: AMediaMuxer_new");
        close_m();
        return -1;
    }

    //    AMediaMuxer_setOrientationHint(muxer, params.get(CAP_PROP_ORIENTATION_META, 0));

    frameIndex = 0;
    width = frameSize.width;
    height = frameSize.height;
    frameRate = fps;
    videoTrackIndex = -1;

    return 0;

}
VideoEncoder::~ VideoEncoder() {
    //    deleter_AMediaCodec(mediaCodec.get());
    //    deleter_AMediaExtractor(extractor.get());
}



cv::Mat  VideoEncoder::yuv2rgb(cv::Mat& yuv) {
    cv::Mat rgb;
    cv::cvtColor(yuv, rgb, cv::COLOR_YUV2RGB_I420);
    return rgb;
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

AI算法网奇

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值