参考文章:https://blog.youkuaiyun.com/qq_38795209/article/details/113942389?login=from_csdn
摄像头语音对讲/广播实现,需要发送g711a的RTP包,参考网上资料 , 示例代码如下,已验证基于GB28181语音广播流程,最后通过该工具类发送语音数据,华为摄像头可进行语音播放
RtpPacket.java
package com.genersoft.iot.vmp.vmanager.gb28181.talk.packet;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.*;
/**
* 参考出处:https://blog.youkuaiyun.com/qq_38795209/article/details/113942389?login=from_csdn
* g711a a-law rtp 数据包
* 如何计算打包发送间隔、打包字节数:
* 音频的帧率 fps = 20
* 采样率 sample_rate = 8000 HZ
* 码率 bitrate = 64000 bps
* 打包发送间隔 send_interval = 1 / 20 = 0.05s = 50000us
* 每次打包需要音频数据长度 audio_need_len = 64000 bps * 0.05s = 3200 bit = 400 bytes 码率/帧率/8
* udp发送数据长度 send_len = (rtp包头12 bytes )+ audio_need_len
*/
public class RtpPacket {
public static int RTP_PAYLOAD_TYPE_PCMU = 0; // u-law
public static int RTP_PAYLOAD_TYPE_PCMA = 8; // a-law
private byte version = 2;
private byte padding = 0;
private byte extension = 0;
private byte csrc_count = 0;
private byte marker = 1;
private byte playload_type = 0;
private int sequence_number = 0;
private long timestamp = 0;
private long ssrc = 0;
public RtpPacket(long ssrcVal) {
ssrc = ssrcVal;
}
public byte[] packet_g711a(byte[] audioData, int audioLen) {
int rtp_head_len = 12;
byte[] payload = new byte[rtp_head_len + audioLen];
playload_type = (byte) RTP_PAYLOAD_TYPE_PCMA; // g711a a-law
sequence_number++;
timestamp += audioLen;
// 12位 RTP 包头
payload[0] = (byte) ((version << 6) + (padding << 5) + (extension << 4) + csrc_count);
payload[1] = (byte) ((marker << 7) + playload_type);
payload[2] = (byte) (sequence_number / (0xff + 1));
payload[3] = (byte) (sequence_number % (0xff + 1));
payload[4] = (byte) ((timestamp / (0xffff + 1)) / (0xff + 1));
payload[5] = (byte) ((timestamp / (0xffff + 1)) % (0xff + 1));
payload[6] = (byte) ((timestamp % (0xffff + 1)) / (0xff + 1));
payload[7] = (byte) ((timestamp % (0xffff + 1)) % (0xff + 1));
payload[8] = (byte) ((ssrc / (0xffff + 1)) / (0xff + 1));
payload[9] = (byte) ((ssrc / (0xffff + 1)) % (0xff + 1));
payload[10] = (byte) ((ssrc % (0xffff + 1)) / (0xff + 1));
payload[11] = (byte) ((ssrc % (0xffff + 1)) % (0xff + 1));
System.arraycopy(audioData, 0, payload, rtp_head_len, audioLen);
return payload;
}
public static void main(String[] args) throws IOException {
long ssrc = 255;
int send_interval_ms = 50;
int audio_need_len = 400;
int localPort = 56200;
int peerPort = 15062;
String peerIP = "192.168.1.101";
DatagramSocket ds = null;
InetAddress peerAddress = null;
InputStream inputStream = new FileInputStream("./mq.g711a");
try {
ds = new DatagramSocket(localPort);
} catch (SocketException e) {
e.printStackTrace();
System.exit(1);
}
try {
peerAddress = InetAddress.getByName(peerIP);
} catch (UnknownHostException e) {
e.printStackTrace();
System.exit(1);
}
RtpPacket rtp = new RtpPacket(ssrc);
byte[] audio_data = new byte[audio_need_len];
while (inputStream.read(audio_data) == audio_need_len) {
byte[] payload = rtp.packet_g711a(audio_data, audio_data.length);
DatagramPacket dp = new DatagramPacket(payload, payload.length, peerAddress, peerPort);
ds.send(dp);
//System.out.println(Arrays.toString(payload));
try {
Thread.sleep(send_interval_ms);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
ds.close();
}
}
工具类
package com.genersoft.iot.vmp.vmanager.gb28181.talk.packet;
import org.apache.commons.compress.utils.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
/**
* 发送rtp数据包
*/
public class SendRTPUtil {
private static final Logger log = LoggerFactory.getLogger(SendRTPUtil.class);
/**
* 将指定文件打包成rtp下发到摄像机,注意文件格式 g711a a-law
*
* @param localPort 本机rtp服务端口
* @param cameraIp
* @param cameraPort
* @param ssrc
* @param audioFile
*/
public static void send(int localPort, String cameraIp, int cameraPort, Long ssrc, String audioFile) {
DatagramSocket ds = null;
InputStream audioStream = null;
int send_interval_ms = 50; // 50ms一帧, 帧率 20 fps
int rtpPacketSize = 400; // raw data size = 码率/帧率/8
ssrc = ssrc == null ? 255 : ssrc;
try {
ds = new DatagramSocket(localPort);
InetAddress peerAddress = InetAddress.getByName(cameraIp);
audioStream = new FileInputStream(audioFile);
RtpPacket rtpPacket = new RtpPacket(ssrc);
byte[] audio_data = new byte[rtpPacketSize];
log.info(">>>>>开始发送rtp: localPort={},peerAddress={},peerPort={},ssrc={},audioFile={}", localPort, cameraIp, cameraPort, ssrc, audioFile);
int i = 0;
while (audioStream.read(audio_data) == rtpPacketSize) {
byte[] payload = rtpPacket.packet_g711a(audio_data, audio_data.length);
DatagramPacket dp = new DatagramPacket(payload, payload.length, peerAddress, cameraPort);
ds.send(dp);
log.debug(">>>>>>>发送rtp包:{}",i);
try {
Thread.sleep(send_interval_ms);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}
log.info(">>>>>发送完成: localPort={},peerAddress={},peerPort={},ssrc={},audioFile={}", localPort, cameraIp, cameraPort, ssrc, audioFile);
} catch (Exception e) {
e.printStackTrace();
log.error(">>>>>发送rtp异常: localPort={},peerAddress={},peerPort={},ssrc={},audioFile={},{}", localPort, cameraIp, cameraPort, ssrc, audioFile, e.getMessage());
} finally {
if (ds != null) {
log.info(">>>>>释放本地端口:{}",ds.getLocalPort());
ds.disconnect();
ds.close();
}
IOUtils.closeQuietly(audioStream);
}
}
}