【基于netty和recoderMedia录制视频 mediasource 实时回放实现视频直播】

本文详细描述了如何使用Java的Netty框架结合RecorderMedia库实现实时视频录制和Mediasource的回放功能,涉及后端依赖管理、WebSocket通信以及前端的用户媒体流处理。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

实现的基本原理

  1. netty视频流转化
  2. recoderMedia录制视频
  3. mediasource 实时回放

实现具体功能

这里采用直播的方式,录制端点击开始录制,播放段可以实时接收到,或者录制中接受进入,延时在1s-3s左右

后端代码

pom.xml依赖

<dependencies>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.10</version>
            <scope>test</scope>
        </dependency>
        <dependency>
            <groupId>io.netty</groupId>
            <artifactId>netty-all</artifactId>
            <version>4.1.38.Final</version>
        </dependency>
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <version>1.16.18</version>
        </dependency>
        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-api</artifactId>
            <version>1.7.25</version>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-classic</artifactId>
            <version>1.1.7</version>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-core</artifactId>
            <version>1.1.7</version>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-access</artifactId>
            <version>1.1.7</version>
        </dependency>
        <dependency>
            <groupId>io.protostuff</groupId>
            <artifactId>protostuff-api</artifactId>
        </dependency>
        <dependency>
            <groupId>io.protostuff</groupId>
            <artifactId>protostuff-core</artifactId>
        </dependency>
        <dependency>
            <groupId>io.protostuff</groupId>
            <artifactId>protostuff-runtime</artifactId>
        </dependency>
        <dependency>
            <groupId>com.google.protobuf</groupId>
            <artifactId>protobuf-java</artifactId>
            <version>3.6.1</version>
        </dependency>
    </dependencies>
    <dependencyManagement>
        <dependencies>
            <dependency>
                <groupId>io.protostuff</groupId>
                <artifactId>protostuff-bom</artifactId>
                <version>1.4.4</version>
                <type>pom</type>
                <scope>import</scope>
            </dependency>
        </dependencies>
    </dependencyManagement>

转发视频帧核心代码


/**
 * 处理二进制消息
 *
 * @author huan.fu
 * @date 2018/11/8 - 14:37
 */
public class BinaryWebSocketFrameHandler extends SimpleChannelInboundHandler<BinaryWebSocketFrame> {
    private static final Logger log = LoggerFactory.getLogger(BinaryWebSocketFrameHandler.class);
    public static ChannelGroup channelGroup = new DefaultChannelGroup(GlobalEventExecutor.INSTANCE);
    private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    public static Map<ChannelId, byte[]> stringByteBufMap=Collections.synchronizedMap(new HashMap<>());
    public static List<ChannelId> newUser=Collections.synchronizedList(new ArrayList<>());
    @Override
    public void channelActive(ChannelHandlerContext ctx) throws Exception {
        Channel channel = ctx.channel();
        String remoteAddress = channel.remoteAddress().toString();
        //加入全局变量中
        channelGroup.add(channel);
        newUser.add(ctx.channel().id());

        //将当前channel加入到ChannelGroup
        System.out.println("【客户端】" + remoteAddress + "上线啦");
    }

    @Override
    public void channelInactive(ChannelHandlerContext ctx) throws Exception {
        Channel channel = ctx.channel();
        String remoteAddress = channel.remoteAddress().toString();
        channelGroup.remove(channel);
        newUser.remove(ctx.channel().id());

        //  channelGroup.writeAndFlush("【客户端】" + remoteAddress + "已下线 " + format.format(new Date()));
        System.out.println("【客户端】" + remoteAddress + "已下线");
    }

    @Override
    protected void channelRead0(ChannelHandlerContext ctx, BinaryWebSocketFrame msg) throws InterruptedException {
        Channel channel = ctx.channel();
        channelGroup.forEach(ch -> {

                try {
                    BinaryWebSocketFrame binaryWebSocketFrame = msg.copy();
                    ch.writeAndFlush(binaryWebSocketFrame).sync();
                } catch (InterruptedException e) {
                    throw new RuntimeException(e);
                }

        });

    }

    @Override
    public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
        newUser.remove(ctx.channel().id());
        channelGroup.remove(ctx.channel());
        ctx.close();
        log.error("服务器发生了异常:", cause);
    }

    @Override
    public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
        if (evt instanceof WebSocketServerProtocolHandler.HandshakeComplete) {
            log.info("web socket 握手成功。");
            WebSocketServerProtocolHandler.HandshakeComplete handshakeComplete = (WebSocketServerProtocolHandler.HandshakeComplete) evt;
            String requestUri = handshakeComplete.requestUri();
            log.info("requestUri:[{}]", requestUri);
            String subproTocol = handshakeComplete.selectedSubprotocol();
            log.info("subproTocol:[{}]", subproTocol);
            handshakeComplete.requestHeaders().forEach(entry ->
                    log.info("header key:[{}] value:[{}]", entry.getKey(), entry.getValue()));
        } else {
            super.userEventTriggered(ctx, evt);
        }
    }
}

前端录制端代码

交互逻辑main.js

'use strict';

var ws = new WebSocket("ws://127.0.0.1:9898/chat");
ws.onopen = function (ev) {

};

ws.onerror = function () {
    console.log("发生异常");
};
ws.onclose = function () {
    console.log("webSocket 关闭");
}

const mediaSource = new MediaSource();
const callbackQueue = [];
let sourceBuffer;
let mediaRecorder;
let duration;
let isMediaInit = false;

const localVideo = document.querySelector('video#localVideo');
const remoteVideo = document.querySelector('video#remoteVideo');
const streamingBtn = document.querySelector('button#streamingBtn');
streamingBtn.onclick = toggleStreaming;


navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia

const constraints = {
    audio: true,
    video: true
};

navigator.getUserMedia(constraints, successCallback, errorCallback);

mediaSource.addEventListener('sourceopen', function (e) {
    // const mimeCodec = 'video/mp4; codecs="avc1.42E01E, opus"';
    const mimeCodec = 'video/webm; codecs="vp9, opus"';
    sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
    // sourceBuffer.mode = 'segments';
    sourceBuffer.addEventListener('updateend', function () {

        // Update if currentTime is slower than 1 second from the time currently buffered in sourceBuffer
        if (isMediaInit) {
            const ranges = sourceBuffer.buffered;
            const bufferLength = ranges.length;
            if (bufferLength != 0) {
                if (sourceBuffer.buffered.end(0) - remoteVideo.currentTime > 0.5) {
                    remoteVideo.currentTime = sourceBuffer.buffered.end(0);
                    console.log("Update currentTime!!!!");
                }
            }
        } else {
            isMediaInit = true;
        }

        // Append buffer to sourceBuffer if sourceBuffer is not updating 
        if (callbackQueue.length > 0 && !sourceBuffer.updating) {
            sourceBuffer.appendBuffer(callbackQueue.shift());
            console.log('Delayed buffer fix');
        }
    });
}, false);

remoteVideo.src = window.URL.createObjectURL(mediaSource);


console.log("start")
ws.onmessage = function (ev) {
    var data = ev.data;
    console.log(data)
    if (mediaSource.readyState == 'open') {
        var reader = new FileReader();
        reader.onload = e => {
            var buffer = e.target.result;
            const arrayBuffer = new Int8Array(buffer);

            if (!sourceBuffer.updating && callbackQueue.length == 0) {
                sourceBuffer.appendBuffer(arrayBuffer);
            } else {
                callbackQueue.push(arrayBuffer);
            }
        }
        reader.readAsArrayBuffer(data);
    }
};


function eventTest(event) {
    console.log('event Test', event);
}

function successCallback(stream) {
    console.log('getUserMedia() got stream: ', stream);
    stream.inactive = eventTest;
    window.stream = stream;
    localVideo.srcObject = stream;
    localVideo.onloadedmetadata = function (event) {
        console.log("onloadedmetadata", event);
    }
    localVideo.addEventListener('play', (event) => {
        console.log("play", event);
    });
}

function errorCallback(error) {
    console.log('navigator.getUserMedia error: ', error);
}

function handleDataAvailable(event) {
    if (event.data && event.data.size > 0) {
        ws.send(event.data);
    }
}

function handleStop(event) {
    console.log('Recorder stopped: ', event);
}

function toggleStreaming() {
    if (streamingBtn.textContent === 'Start Streaming') {
        startStreaming();
    } else {
        stopStreaming();
        streamingBtn.textContent = 'Start Streaming';
    }
}

function startStreaming() {
    // const options = { mimeType: 'video/webm; codecs="h264, opus"' };
    let options = {mimeType: 'video/webm; codecs="vp9, opus"'};
    try {
        mediaRecorder = new MediaRecorder(window.stream, options);
    } catch (e0) {
        console.log('Unable to create MediaRecorder with options Object: ', e0);
        try {
            options = {mimeType: 'video/webm,codecs=vp8', bitsPerSecond: 100000};
            mediaRecorder = new MediaRecorder(window.stream, options);
        } catch (e1) {
            console.log('Unable to create MediaRecorder with options Object: ', e1);
            try {
                options = 'video/vp8'; // Chrome 47
                mediaRecorder = new MediaRecorder(window.stream, options);
            } catch (e2) {
                alert('MediaRecorder is not supported by this browser.\n\n' +
                    'Try Firefox 29 or later, or Chrome 47 or later, with Enable experimental Web Platform features enabled from chrome://flags.');
                console.error('Exception while creating MediaRecorder:', e2);
                return;
            }
        }
    }
    console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
    streamingBtn.textContent = 'Stop Streaming';
    mediaRecorder.onstop = handleStop;
    mediaRecorder.ondataavailable = handleDataAvailable;
   // mediaRecorder.start(1);//聊天视频用
    RecordLoop() // time slice 1ms直播用
    console.log('MediaRecorder started', mediaRecorder);
}

function RecordLoop(){
    mediaRecorder.stop();
    mediaRecorder.start();
    setTimeout(RecordLoop,1000)
}
function stopStreaming() {
    ws.close();
    mediaRecorder.stop();
}

前端界面

这里分为录制端和播放端,直播的方式进行,播放端进去就可以观看,后期可以查用压缩视频流的方式

<body>
<div id="container">
    <h1>mediaRecorder-webSocket-mediaSource</h1>
    <video id="localVideo" autoplay ></video>
    <video id="remoteVideo" autoplay ></video>
    <div>
        <button id="streamingBtn">Start Streaming</button>
    </div>
</div>
<script src="https://cdnjs.cloudflare.com/ajax/libs/pako/2.0.4/pako.min.js"></script>
<script src="js/main.js"></script>
</body>

客户端播放代码client.js

'use strict';

var ws = new WebSocket("ws://127.0.0.1:9898/chat");
ws.onopen = function (ev) {

};

ws.onerror = function () {
    console.log("发生异常");
};
ws.onclose = function () {
    console.log("webSocket 关闭");
}

const mediaSource = new MediaSource();
const callbackQueue = [];
let sourceBuffer;
let isMediaInit = false;

const remoteVideo = document.querySelector('video#remoteVideo');
const streamingBtn = document.querySelector('button#streamingBtn');
// streamingBtn.onclick = toggleStreaming;


const constraints = {
    audio: true,
    video: true
};

mediaSource.addEventListener('sourceopen', function (e) {
    window.URL.revokeObjectURL(remoteVideo.src);
    const mimeCodec = 'video/webm; codecs="vp9, opus"';

    sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
    sourceBuffer.mode = 'sequence';
    console.log("sourceopen");

    // sourceBuffer.mode = 'segments';
    // sourceBuffer.addEventListener('updateend', function () {
    //     if (!sourceBuffer.updating && mediaSource.readyState == 'open') {
    //         mediaSource.endOfStream()
    //     }
    // });
    sourceBuffer.addEventListener("error", function (error) {
        console.log(error);
    });
    ws.onmessage = function (ev) {
        var data = ev.data;
        if (mediaSource.readyState == 'open') {
            var reader = new FileReader();
            reader.readAsArrayBuffer(data);
            reader.onload = e => {
                var buffer = e.target.result;
                const arrayBuffer = new Int8Array(buffer);
                sourceBuffer.appendBuffer(arrayBuffer);
                if (remoteVideo.buffered.length && remoteVideo.buffered.start(0) > 30) {
                    sourceBuffer.remove(0, remoteVideo.buffered.end(0) - 30)
                }
            }
        } else {
            console.log("mediaSource.readyState closed")
        }

    }
}, false);
mediaSource.addEventListener('sourceended', function () {
    console.log('Delayed buffer sourceended');
    console.log("mediaSource.readyState :" + mediaSource)
});
remoteVideo.src = window.URL.createObjectURL(mediaSource);



function toggleStreaming() {
    if (streamingBtn.textContent === 'Start Streaming') {
        ws.send("live");
    } else {
        stopStreaming();
        streamingBtn.textContent = 'Start Streaming';
    }
}


function stopStreaming() {
    ws.close();
}

播放端前端界面

<body>
<div id="container">
    <h1>mediaRecorder-webSocket-mediaSource</h1>
    <video id="remoteVideo" autoplay ></video>
    <div>
        <button id="streamingBtn">Start Streaming</button>
    </div>
</div>
<script src="js/client.js"></script>
</body>
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值