Scrcpy 硬解码将H264传递到python 端实现

话不多说先来效果

效果展示

1、Pyside6 展示QT端视频流

2、Android 木木模拟器 拉流端展示(将裸流封装到rtmp包体、推流到OSSRS 服务器,Android webrtc 拉流展示)

注意:网页端拉流对tms 没严格要求,移动端拉流对tms有严格要求,不然视频流会变成幻灯片

具体实现

以Scrcpy 2.4 版本为例
主要修改这两个文件

首先我们修改Streamer.java 文件里边的socket 传输实现
 

public final class Streamer {

    private static final long PACKET_FLAG_CONFIG = 1L << 63;
    private static final long PACKET_FLAG_KEY_FRAME = 1L << 62;

    private final FileDescriptor fd;
    private final Codec codec;
    private final boolean sendCodecMeta;
    private final boolean sendFrameMeta;

    private final ByteBuffer headerBuffer = ByteBuffer.allocate(16);

    public Streamer(FileDescriptor fd, Codec codec, boolean sendCodecMeta, boolean sendFrameMeta) {
        this.fd = fd;
        this.codec = codec;
        this.sendCodecMeta = sendCodecMeta;
        this.sendFrameMeta = sendFrameMeta;
    }

    public Codec getCodec() {
        return codec;
    }

    public void writeAudioHeader() throws IOException {
        if (sendCodecMeta) {
            ByteBuffer buffer = ByteBuffer.allocate(4);
            buffer.putInt(codec.getId());
            buffer.flip();
            IO.writeFully(fd, buffer);
        }
    }

    public void writeVideoHeader(Size videoSize) throws IOException {
        if (sendCodecMeta) {
            ByteBuffer buffer = ByteBuffer.allocate(12);
            buffer.putInt(codec.getId());
            buffer.putInt(videoSize.getWidth());
            buffer.putInt(videoSize.getHeight());
            buffer.flip();
            IO.writeFully(fd, buffer);
        }
    }

    public void writeDisableStream(boolean error) throws IOException {
        // Writing a specific code as codec-id means that the device disables the stream
        //   code 0: it explicitly disables the stream (because it could not capture audio), scrcpy should continue mirroring video only
        //   code 1: a configuration error occurred, scrcpy must be stopped
        byte[] code = new byte[4];
        if (error) {
            code[3] = 1;
        }
        IO.writeFully(fd, code, 0, code.length);
    }

    public void writePacket(ByteBuffer buffer, long pts, boolean config, boolean keyFrame) throws IOException {
        if (config) {
            if (codec == AudioCodec.OPUS) {
                fixOpusConfigPacket(buffer);
            }
            else if (codec == AudioCodec.FLAC) {
                fixFlacConfigPacket(buffer);
            }
        }

        if (sendFrameMeta) {
            writeFrameMeta(fd, buffer.remaining(), pts, config, keyFrame);
        }
        IO.writeFully(fd, buffer);
    }

    //==================================================================custom started =================================================
    //custom added
    public void writePacket(ByteBuffer buffer,long tms) throws IOException {
        if (sendFrameMeta) {
            writeFrameMeta(fd, buffer.remaining(), tms);
        }
        IO.writeFully(fd, buffer);
    }

    //custom added
    private void writeFrameMeta(FileDescriptor fd, int packetSize,long tms) throws IOException {
        headerBuffer.clear();
        headerBuffer.putLong(tms);
        //custom added started,for remarking the screen rotation
        int rotation = ServiceManager.getWindowManager().getRotation();
        headerBuffer.putInt(rotation);
        //custom added ended
        headerBuffer.putInt(packetSize);
        headerBuffer.flip();
        IO.writeFully(fd, headerBuffer);
    }

    //==================================================================custom ended =================================================


    public void writePacket(ByteBuffer codecBuffer, MediaCodec.BufferInfo bufferInfo) throws IOException {
        long pts = bufferInfo.presentationTimeUs;
        boolean config = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
        boolean keyFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
        writePacket(codecBuffer, pts, config, keyFrame);
    }

    private void writeFrameMeta(FileDescriptor fd, int packetSize, long pts, boolean config, boolean keyFrame) throws IOException {
        headerBuffer.clear();

        long ptsAndFlags;
        if (config) {
            ptsAndFlags = PACKET_FLAG_CONFIG; // non-media data packet
        } else {
            ptsAndFlags = pts;
            if (keyFrame) {
                ptsAndFlags |= PACKET_FLAG_KEY_FRAME;
            }
        }

        headerBuffer.putLong(ptsAndFlags);
        //custom added started,for remarking the screen rotation
        int rotation = ServiceManager.getWindowManager().getRotation();
        headerBuffer.putInt(rotation);
        //custom added ended
        headerBuffer.putInt(packetSize);
        headerBuffer.flip();
        IO.writeFully(fd, headerBuffer);
    }

    private static void fixOpusConfigPacket(ByteBuffer buffer) throws IOException {
        // Here is an example of the config packet received for an OPUS stream:
        //
        // 00000000  41 4f 50 55 53 48 44 52  13 00 00 00 00 00 00 00  |AOPUSHDR........|
        // -------------- BELOW IS THE PART WE MUST PUT AS EXTRADATA  -------------------
        // 00000010  4f 70 75 73 48 65 61 64  01 01 38 01 80 bb 00 00  |OpusHead..8.....|
        // 00000020  00 00 00                                          |...             |
        // ------------------------------------------------------------------------------
        // 00000020           41 4f 50 55 53  44 4c 59 08 00 00 00 00  |   AOPUSDLY.....|
        // 00000030  00 00 00 a0 2e 63 00 00  00 00 00 41 4f 50 55 53  |.....c.....AOPUS|
        // 00000040  50 52 4c 08 00 00 00 00  00 00 00 00 b4 c4 04 00  |PRL.............|
        // 00000050  00 00 00                                          |...|
        //
        // Each "section" is prefixed by a 64-bit ID and a 64-bit length.
        //
        // <https://developer.android.com/reference/android/media/MediaCodec#CSD>

        if (buffer.remaining() < 16) {
            throw new IOException("Not enough data in OPUS config packet");
        }

        final byte[] opusHeaderId = {'A', 'O', 'P', 'U', 'S', 'H', 'D', 'R'};
        byte[] idBuffer = new byte[8];
        buffer.get(idBuffer);
        if (!Arrays.equals(idBuffer, opusHeaderId)) {
            throw new IOException("OPUS header not found");
        }

        // The size is in native byte-order
        long sizeLong = buffer.getLong();
        if (sizeLong < 0 || sizeLong >= 0x7FFFFFFF) {
            throw new IOException("Invalid block size in OPUS header: " + sizeLong);
        }

        int size = (int) sizeLong;
        if (buffer.remaining() < size) {
            throw new IOException("Not enough data in OPUS header (invalid size: " + size + ")");
        }

        // Set the buffer to point to the OPUS header slice
        buffer.limit(buffer.position() + size);
    }

    private static void fixFlacConfigPacket(ByteBuffer buffer) throws IOException {
        // 00000000  66 4c 61 43 00 00 00 22                           |fLaC..."        |
        // -------------- BELOW IS THE PART WE MUST PUT AS EXTRADATA  -------------------
        // 00000000                           10 00 10 00 00 00 00 00  |        ........|
        // 00000010  00 00 0b b8 02 f0 00 00  00 00 00 00 00 00 00 00  |................|
        // 00000020  00 00 00 00 00 00 00 00  00 00                    |..........      |
        // ------------------------------------------------------------------------------
        // 00000020                                 84 00 00 28 20 00  |          ...( .|
        // 00000030  00 00 72 65 66 65 72 65  6e 63 65 20 6c 69 62 46  |..reference libF|
        // 00000040  4c 41 43 20 31 2e 33 2e  32 20 32 30 32 32 31 30  |LAC 1.3.2 202210|
        // 00000050  32 32 00 00 00 00                                 |22....|
        //
        // <https://developer.android.com/reference/android/media/MediaCodec#CSD>

        if (buffer.remaining() < 8) {
            throw new IOException("Not enough data in FLAC config packet");
        }

        final byte[] flacHeaderId = {'f', 'L', 'a', 'C'};
        byte[] idBuffer = new byte[4];
        buffer.get(idBuffer);
        if (!Arrays.equals(idBuffer, flacHeaderId)) {
            throw new IOException("FLAC header not found");
        }

        // The size is in big-endian
        buffer.order(ByteOrder.BIG_ENDIAN);

        int size = buffer.getInt();
        if (buffer.remaining() < size) {
            throw new IOException("Not enough data in FLAC header (invalid size: " + size + ")");
        }

        // Set the buffer to point to the FLAC header slice
        buffer.limit(buffer.position() + size);
    }
}

private final ByteBuffer headerBuffer = ByteBuffer.allocate(16); 是消息头,放置socket 传输拆包和粘包

然后SurfaceEncoder.java 文件修改 encode 方法如下,streamer.writePacket(codecBuffer, bufferInfo) 修改成我们在 Streamer.java 里边新增的 

streamer.writePacket(codecBuffer, tms) 方法
 private boolean encode(MediaCodec codec, Streamer streamer) throws IOException {
        boolean eof = false;
        boolean alive = true;
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        long ptsOrigin = 0;
        long lastKeyFrameMs = System.currentTimeMillis();
        long keyFrameIntervalMs = DEFAULT_I_FRAME_INTERVAL * 1000L;
        while (!capture.consumeReset() && !eof) {
            if (stopped.get()) {
                alive = false;
                break;
            }
            int outputBufferId = codec.dequeueOutputBuffer(bufferInfo, 2000_000);
            try {
                if (capture.consumeReset()) {
                    // must restart encoding with new size
                    break;
                }

                eof = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
                if (outputBufferId >= 0) {
                    ByteBuffer codecBuffer = codec.getOutputBuffer(outputBufferId);
                    if(ptsOrigin == 0){
                        //设置下一帧为关键帧
                        setKeyFrame(codec,"first_frame");
                        ptsOrigin = bufferInfo.presentationTimeUs;
                    }

                    boolean isConfig = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
                    if (!isConfig) {
                        // If this is not a config packet, then it contains a frame
                        firstFrameSent = true;
                        consecutiveErrors = 0;
                    }

                    long ctMs = System.currentTimeMillis();
                    boolean isKeyFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
                    if(isKeyFrame){
                        lastKeyFrameMs = ctMs;
                    }
                    long tms = (bufferInfo.presentationTimeUs / 1000) - ptsOrigin/1000;
//                    streamer.writePacket(codecBuffer, bufferInfo);
                    streamer.writePacket(codecBuffer, tms); //将tms 传递到agent
                    if((ctMs - lastKeyFrameMs) > keyFrameIntervalMs){
                        setKeyFrame(codec,"exceed max key frame interval:" + keyFrameIntervalMs);
                    }
                }else{
                    setKeyFrame(codec,"outputBufferId < 0");
                }
            } finally {
                if (outputBufferId >= 0) {
                    codec.releaseOutputBuffer(outputBufferId, false);
                }
            }
        }

        if (capture.isClosed()) {
            // The capture might have been closed internally (for example if the camera is disconnected)
            alive = false;
        }

        return !eof && alive;
    }

python 接收端实现如下:
 

    def __stream_receive_loop(self) -> None:
        """
        Core loop for video parsing
        Surface.ROTATION_0:== 0 屏幕没有旋转,即屏幕的自然方向,通常是手机的默认方向。
        Surface.ROTATION_90:== 1屏幕顺时针旋转90度,适用于横向模式。
        Surface.ROTATION_180:== 2屏幕旋转180度,即上下颠倒。
        Surface.ROTATION_270:== 3 屏幕顺时针旋转270度,即从90度旋转的基础上再旋转180度。
        """
        logger.info(f'client {self.device.serial} start to receive video stream ...')
        codec = CodecContext.create("h264", "r")
        while self.alive:
            try:
                # send_frame_meta = true 详见 scrcpy 中 Streamer.writeFrameMeta
                pts_and_flags_bytes = self._recv_fully(8, 8)
                pts_and_flags = struct.unpack('>q', pts_and_flags_bytes)[0]
                rotation_bytes = self._recv_fully(4, 4)
                rotation = struct.unpack('>i', rotation_bytes)[0]
                pkg_size_bytes = self._recv_fully(4, 4)
                pkg_size = struct.unpack('>i', pkg_size_bytes)[0]
                raw_h264 = self._recv_fully(pkg_size)
                if raw_h264 == b'':
                    raise ConnectionError("raw_h264 is empty considered as video stream is disconnected")
                if self.stream_mode != 'local':
                    self.h264_pkt_queue.put({
                        'pts': pts_and_flags,
                        'raw_h264': raw_h264,
                        'rotation': rotation
                    })
                packets = codec.parse(raw_h264)
                for packet in packets:
                    frames = codec.decode(packet)
                    for frame in frames:
                        frame = frame.to_ndarray(format="bgr24")
                        if self.flip:
                            frame = cv2.flip(frame, 1)
                        self.resolution = (frame.shape[1], frame.shape[0])
                        self.last_frame = frame
                        if self.stream_mode != 'remote':
                            self.__send_to_listeners(EVENT_FRAME, frame)

            except (BlockingIOError, InvalidDataError):
                logger.error(f" BlockingIOError client = {self.device.serial}")
                time.sleep(0.01)
                if not self.block_frame:
                    self.__send_to_listeners(EVENT_FRAME, None)
            except (ConnectionError, OSError, socket.timeout, struct.error) as e:  # Socket Closed
                logger.error(f" ConnectionError client = {self.device.serial} cause {e}")
                if self.alive:
                    self.__send_to_listeners(EVENT_DISCONNECT)
                    self.restart()

    def _recv_fully(self, total_bytes, buffer_size=1024):
        data = b''
        while len(data) < total_bytes:
            chunk = self.__video_socket.recv(min(buffer_size, total_bytes - len(data)))
            if not chunk:
                raise ConnectionError("Socket connection broken.")
            data += chunk
        return data

我这边是用PySide6 展示的画面,当然您也可以用rtmpdump 编译成dll 将 h264包体传输到推拉流服务器,这里推荐OSSRS

QT裸流转图片实现:

 # 线程安全的队列
    image_frame_queue = queue.Queue()

    last_frame = None

    # 消费者线程,从队列中读取数据
    def start_receiving_h264_frame(self):
        while self.alive:
            data = None
            try:
                data = self.image_frame_queue.get(timeout=0.01)  # 尝试从队列中取出数据
            except _queue.Empty:
                pass
            if data is None and self.last_frame is not None:
                data = self.last_frame
            if data is not None:
                self.show_image(data)

    def show_image(self, frame):
        app.processEvents()
        if frame is not None and frame.size > 0 and self.main_control.resolution:
            self.last_frame = frame
            max_size = max(self.main_control.resolution)
            if max_size > 0:
                ratio = self.qt_window_size / max_size
                image = QImage(
                    self.last_frame,
                    self.last_frame.shape[1],
                    self.last_frame.shape[0],
                    self.last_frame.shape[1] * 3,
                    QImage.Format_BGR888,
                )
                pix = QPixmap(image)
                pix.setDevicePixelRatio(1 / ratio)
                self.ui.label.setPixmap(pix)
                self.resize(1, 1)

rtmpdump rtmp 推流实现

    def __stream_push_loop(self) -> None:
        """
        Core loop for video push
        """
        try:
            self.__start_stream()
            while self.alive:
                # 5s 不给srs 发数据srs 和 client 会断连
                if self.h264_pkt_queue and self.rtmp_client and self.rtmp_client.is_connected:
                    pkt = None
                    try:
                        pkt = self.h264_pkt_queue.get(timeout=4)
                    except _queue.Empty:
                        pass
                    if pkt is None and self.last_h264_pkt is not None:
                        pkt = self.last_h264_pkt
                    if pkt is not None:
                        self.last_h264_pkt = pkt
                        raw_h264 = pkt['raw_h264']
                        pts_and_flags = pkt['pts']
                        rotation = pkt['rotation']
                        if self.last_rotation is not None and self.last_rotation != rotation:
                            logger.info("last_rotation ==%d  new rotation == %d" % (self.last_rotation, rotation))
                            logger.info("device rotation changed restart rtmp stream ...")
                            self.h264_pkt_queue.put(pkt)
                            self.last_rotation = rotation
                            self.restart_stream()
                            continue
                        self.last_rotation = rotation
                        send_ok = self.rtmp_client.send_h264_frame(raw_h264, pts_and_flags)
                        if not send_ok:
                            self.h264_pkt_queue.put(pkt)
                            if self.h264_pkt_queue.qsize() > 5:
                                self.restart()
        except Exception:
            logger.error(f"rtmp push error = {traceback.print_exc()}")
            self.restart()



远程流媒体拉流方式建议

Android :

google-webrtc = { module = "org.webrtc:google-webrtc", version.ref = "googleWebrtc" }

gradle 依赖即可

iOS:
https://github.com/shogo4405/HaishinKit.swift  
cocoapods 依赖即可

Android (Kotlin 版本)和 iOS (Swift 版本) 后续有空给大家讲解下具体实现

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值