获取SPS、PPS
ByteBuffer spsb = videoEncodec.getOutputFormat().getByteBuffer("csd-0");
byte[] sps = new byte[spsb.remaining()];
spsb.get(sps, 0, sps.length);
ByteBuffer ppsb = videoEncodec.getOutputFormat().getByteBuffer("csd-1");
byte[] pps = new byte[ppsb.remaining()];
ppsb.get(pps, 0, pps.length);
LivePushActivity
package com.example.glivepush;
import android.os.Bundle;
import android.os.Environment;
import android.se.omapi.SEService;
import android.util.Log;
import android.view.View;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.example.glivepush.camera.GCameraView;
import com.example.glivepush.push.GConnectListener;
import com.example.glivepush.push.PushEncodec;
import com.example.glivepush.push.PushVideo;
import com.example.glivepush.util.DisplayUtil;
public class LivePushActivity extends AppCompatActivity {
private PushVideo pushVideo;
/***************************************获取MediaCodec sps pps******************start***********/
private GCameraView gCameraView;
private boolean start = false;
private PushEncodec pushEncodec;
/***************************************获取MediaCodec sps pps*******************end************/
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_live_push);
pushVideo = new PushVideo();
/***************************************获取MediaCodec sps pps******************start***********/
gCameraView = findViewById(R.id.cameraView);
/***************************************获取MediaCodec sps pps*******************end************/
pushVideo.setgConnectListener(new GConnectListener() {
@Override
public void onConnecting() {
Log.d("godv", "链接服务器中");
}
@Override
public void onConnectSuccess() {
Log.d("godv", "链接服务器成功");
/***************************************获取MediaCodec sps pps******************start***********/
pushEncodec = new PushEncodec(LivePushActivity.this, gCameraView.getTextureId());
pushEncodec.initEncodec(
gCameraView.getEglContext(),
DisplayUtil.getScreenWidth(LivePushActivity.this),
DisplayUtil.getScreenHeight(LivePushActivity.this),
44100,
2
);
pushEncodec.startRecord();
/***************************************获取MediaCodec sps pps*******************end************/
}
@Override
public void onConnectFail(String msg) {
Log.d("godv", msg);
}
});
}
public void startPush(View view) {
/***************************************获取MediaCodec sps pps******************start***********/
start = !start;
if (start) {
pushVideo.initLivePush("rtmp://192.168.0.14/myapp/mystream");
} else {
if (pushEncodec != null) {
pushEncodec.stopRecord();
pushEncodec = null;
}
}
/***************************************获取MediaCodec sps pps*******************end************/
}
}
BasePushEncoder
package com.example.glivepush.push;
import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;
import android.view.Surface;
import com.example.glivepush.egl.EglHelper;
import com.example.glivepush.egl.GEGLSurfaceView;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
import javax.microedition.khronos.egl.EGLContext;
public abstract class BasePushEncoder {
private Surface surface;
private EGLContext eglContext;
private int width;
private int height;
//视频的编码器
private MediaCodec videoEncodec;
private MediaFormat videoFormat;
private MediaCodec.BufferInfo videoBufferinfo;
//音频的编码器
private MediaCodec audioEncodec;
private MediaFormat audioFormat;
private MediaCodec.BufferInfo audioBufferinfo;
private long audioPts = 0;
private int sampleRate = 0;
//渲染视频的线程
private GEGLMediaThread geglMediaThread;
//编码视频的线程
private VideoEncodecThread videoEncodecThread;
//编码音频的线程
private AudioEncodecThread audioEncodecThread;
private GEGLSurfaceView.GGLRender gGLRender;
public final static int RENDERMODE_WHEN_DIRTY = 0;
public final static int RENDERMODE_CONTINUOUSLY = 1;
private int mRenderMode = RENDERMODE_CONTINUOUSLY;
private OnMediaInfoListener onMediaInfoListener;
public void setOnMediaInfoListener(OnMediaInfoListener onMediaInfoListener) {
this.onMediaInfoListener = onMediaInfoListener;
}
public BasePushEncoder(Context context) {
}
public void setRender(GEGLSurfaceView.GGLRender gGLRender) {
this.gGLRender = gGLRender;
}
public void setRenderMode(int mRenderMode) {
if (gGLRender == null) {
throw new RuntimeException("must set render before");
}
this.mRenderMode = mRenderMode;
}
//初始化方法
public void initEncodec(EGLContext eglContext, int width, int height, int sampleRate, int channelCount) {
this.width = width;
this.height = height;
this.eglContext = eglContext;
initMediaEncodc(width, height, sampleRate, channelCount);
}
//开始编码
public void startRecord() {
if (surface != null && eglContext != null) {
audioPts = 0;
geglMediaThread = new GEGLMediaThread(new WeakReference<BasePushEncoder>(this));
videoEncodecThread = new VideoEncodecThread(new WeakReference<BasePushEncoder>(this));
audioEncodecThread = new AudioEncodecThread(new WeakReference<BasePushEncoder>(this));
geglMediaThread.isCreate = true;
geglMediaThread.isChange = true;
geglMediaThread.start();
videoEncodecThread.start();
//audioEncodecThread.start();
}
}
//结束编码
public void stopRecord() {
if (geglMediaThread != null && videoEncodecThread != null && audioEncodecThread != null) {
videoEncodecThread.exit();
audioEncodecThread.exit();
geglMediaThread.onDestory();
videoEncodecThread = null;
geglMediaThread = null;
audioEncodecThread = null;
}
}
private void initMediaEncodc(int width, int height, int sampleRate, int channelCount) {
//参数二录制类型
initVideoEncodec(MediaFormat.MIMETYPE_VIDEO_AVC, width, height);
initAudioEncodec(MediaFormat.MIMETYPE_AUDIO_AAC, sampleRate, channelCount);
}
//初始化video的编码器
private void initVideoEncodec(String mimeType, int width, int height) {
try {
videoBufferinfo = new MediaCodec.BufferInfo();
videoFormat = MediaFormat.createVideoFormat(mimeType, width, height);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
//码率
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 4);
//帧率
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
//关键帧间隔
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
videoEncodec = MediaCodec.createEncoderByType(mimeType);
//录制没有Surface
//最后一个参数传的是编码
videoEncodec.configure(videoFormat, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
//得到Surface
surface = videoEncodec.createInputSurface();
} catch (IOException e) {
e.printStackTrace();
videoEncodec = null;
videoFormat = null;
videoBufferinfo = null;
}
}
//初始化音频编码器
private void initAudioEncodec(String mimeType, int simpleRate, int channelCount) {
try {
this.sampleRate = simpleRate;
audioBufferinfo = new MediaCodec.BufferInfo();
audioFormat = MediaFormat.createAudioFormat(mimeType, simpleRate, channelCount);
//设置比特率
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, 96000);
//设置aac格式等级
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
//设置最大输入缓存
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 4096);
//生成encodec
audioEncodec = MediaCodec.createEncoderByType(mimeType);
audioEncodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
} catch (IOException e) {
e.printStackTrace();
audioEncodec = null;
audioFormat = null;
audioBufferinfo = null;
}
}
//传递PCM的方法
public void putPCMDate(byte[] buffer, int size) {
if (audioEncodecThread != null && !audioEncodecThread.isExit && buffer != null && size > 0) {
int inputBufferindex = audioEncodec.dequeueInputBuffer(0);
if (inputBufferindex >= 0) {
ByteBuffer byteBuffer = audioEncodec.getInputBuffers()[inputBufferindex];
byteBuffer.clear();
byteBuffer.put(buffer);
long pts = getAudioPts(size, sampleRate);
audioEncodec.queueInputBuffer(inputBufferindex, 0, size, pts, 0);
}
}
}
private long getAudioPts(int size, int sampleRate) {
audioPts += (long) (1.0 * size / (sampleRate * 2 * 2) * 1000000.0);
return audioPts;
}
//渲染视频
static class GEGLMediaThread extends Thread {
private WeakReference<BasePushEncoder> encoder;
private EglHelper eglHelper;
private Object object;
private boolean isExit = false;
private boolean isCreate = false;
private boolean isChange = false;
private boolean isStart = false;
public GEGLMediaThread(WeakReference<BasePushEncoder> encoder) {
this.encoder = encoder;
}
@Override
public void run() {
super.run();
isExit = false;
isStart = false;
object = new Object();
eglHelper = new EglHelper();
eglHelper.initEgl(encoder.get().surface, encoder.get().eglContext);
while (true) {
if (isExit) {
release();
break;
}
//刷新模式
if (isStart) {
if (encoder.get().mRenderMode == RENDERMODE_WHEN_DIRTY) {
synchronized (object) {
try {
object.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} else if (encoder.get().mRenderMode == RENDERMODE_CONTINUOUSLY) {
try {
Thread.sleep(1000 / 60);
} catch (InterruptedException e) {
e.printStackTrace();
}
} else {
throw new RuntimeException("mRenderMode is wrong value");
}
}
onCreate();
onChange(encoder.get().width, encoder.get().height);
onDraw();
isStart = true;
}
}
public void release() {
if (eglHelper != null) {
eglHelper.destoryEgl();
eglHelper = null;
object = null;
encoder = null;
}
}
private void onCreate() {
if (isCreate && encoder.get().gGLRender != null) {
isCreate = false;
encoder.get().gGLRender.onSurfaceCreated();
}
}
private void onChange(int width, int height) {
if (isChange && encoder.get().gGLRender != null) {
isChange = false;
encoder.get().gGLRender.onSurfaceChanged(width, height);
}
}
private void onDraw() {
if (encoder.get().gGLRender != null && eglHelper != null) {
encoder.get().gGLRender.onDrawFrame();
if (!isStart) {
encoder.get().gGLRender.onDrawFrame();
}
eglHelper.swapBuffers();
}
}
private void requestRender() {
if (object != null) {
synchronized (object) {
object.notifyAll();
}
}
}
public void onDestory() {
isExit = true;
requestRender();
}
}
//编码视频
static class VideoEncodecThread extends Thread {
private WeakReference<BasePushEncoder> encoder;
private boolean isExit;
private MediaCodec videoEncodec;
private MediaFormat videoFormat;
private MediaCodec.BufferInfo videoBufferinfo;
//pts
private long pts;
/***************************************获取MediaCodec sps pps******************start***********/
//sps
private byte[] sps;
private byte[] pps;
/***************************************获取MediaCodec sps pps******************end***********/
public VideoEncodecThread(WeakReference<BasePushEncoder> encoder) {
this.encoder = encoder;
videoEncodec = encoder.get().videoEncodec;
videoFormat = encoder.get().videoFormat;
videoBufferinfo = encoder.get().videoBufferinfo;
}
@Override
public void run() {
super.run();
pts = 0;
isExit = false;
videoEncodec.start();
while (true) {
if (isExit) {
videoEncodec.stop();
videoEncodec.release();
videoEncodec = null;
Log.d("godv", "录制完成");
break;
}
//视频编码开始
//输出队列索引
int outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.d("godv", "INFO_OUTPUT_FORMAT_CHANGED");
/***************************************获取MediaCodec sps pps******************start***********/
ByteBuffer spsb = videoEncodec.getOutputFormat().getByteBuffer("csd-0");
sps = new byte[spsb.remaining()];
spsb.get(sps, 0, sps.length);
ByteBuffer ppsb = videoEncodec.getOutputFormat().getByteBuffer("csd-1");
pps = new byte[ppsb.remaining()];
ppsb.get(pps, 0, pps.length);
Log.d("godv", "sps : " + byteToHex(sps));
Log.d("godv", "pps : " + byteToHex(pps));
/***************************************获取MediaCodec sps pps******************end***********/
} else {
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = videoEncodec.getOutputBuffers()[outputBufferIndex];
outputBuffer.position(videoBufferinfo.offset);
outputBuffer.limit(videoBufferinfo.offset + videoBufferinfo.size);
if (pts == 0) {
pts = videoBufferinfo.presentationTimeUs;
}
videoBufferinfo.presentationTimeUs = videoBufferinfo.presentationTimeUs - pts;
/***************************************获取MediaCodec sps pps******************start***********/
byte[] data = new byte[outputBuffer.remaining()];
outputBuffer.get(data, 0, data.length);
Log.d("godv", "data : " + byteToHex(data));
/***************************************获取MediaCodec sps pps******************end***********/
//回调
if (encoder.get().onMediaInfoListener != null) {
encoder.get().onMediaInfoListener.onMediaTime(
(int) videoBufferinfo.presentationTimeUs / 1000000);
}
videoEncodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = videoEncodec.dequeueOutputBuffer(videoBufferinfo, 0);
}
}
}
}
public void exit() {
isExit = true;
}
}
//编码音频
static class AudioEncodecThread extends Thread {
//外层类的引用
private WeakReference<BasePushEncoder> encoder;
//是否退出
private boolean isExit;
//编码
private MediaCodec audioEncodec;
private MediaCodec.BufferInfo bufferInfo;
//pts
private long pts;
public AudioEncodecThread(WeakReference<BasePushEncoder> encoder) {
this.encoder = encoder;
audioEncodec = encoder.get().audioEncodec;
bufferInfo = encoder.get().audioBufferinfo;
}
@Override
public void run() {
super.run();
//初始化
pts = 0;
isExit = false;
//编码器开始编码
audioEncodec.start();
while (true) {
if (isExit) {
//回收资源
audioEncodec.stop();
audioEncodec.release();
audioEncodec = null;
break;
}
int outputBufferIndex = audioEncodec.dequeueOutputBuffer(bufferInfo, 0);
//格式改变
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
} else {
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = audioEncodec.getOutputBuffers()[outputBufferIndex];
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if (pts == 0) {
pts = bufferInfo.presentationTimeUs;
}
bufferInfo.presentationTimeUs = bufferInfo.presentationTimeUs - pts;
audioEncodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = audioEncodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
}
}
public void exit() {
isExit = true;
}
}
public interface OnMediaInfoListener {
void onMediaTime(int times);
}
/***************************************获取MediaCodec sps pps******************start***********/
//byte 转 16进制
public static String byteToHex(byte[] bytes) {
StringBuffer stringBuffer = new StringBuffer();
for (int i = 0; i < bytes.length; i++) {
String hex = Integer.toHexString(bytes[i]);
if (hex.length() == 1) {
stringBuffer.append("0" + hex);
} else {
stringBuffer.append(hex);
}
if (i > 20) {
break;
}
}
return stringBuffer.toString();
}
/***************************************获取MediaCodec sps pps******************end***********/
}
PushEncodec
package com.example.glivepush.push;
import android.content.Context;
import com.example.glivepush.encodec.GBaseMediaEncoder;
public class PushEncodec extends BasePushEncoder {
private PushRender gEncodecRender;
public PushEncodec(Context context, int textureid) {
super(context);
gEncodecRender = new PushRender(context, textureid);
setRender(gEncodecRender);
setRenderMode(BasePushEncoder.RENDERMODE_CONTINUOUSLY);//持续渲染
}
}
PushRender
package com.example.glivepush.push;
import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import com.example.glivepush.R;
import com.example.glivepush.egl.GEGLSurfaceView;
import com.example.glivepush.egl.GShaderUtil;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public class PushRender implements GEGLSurfaceView.GGLRender {
private Context context;
private Bitmap bitmap;
private int bitmapTextureId;
private float[] vertexData = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f,
0f, 0f,
0f, 0f,
0f, 0f,
0f, 0f
};
private FloatBuffer vertexBuffer;
private float[] fragmentData = {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};
private FloatBuffer fragmentBuffer;
private int program;
private int vPosition;
private int fPosition;
private int textureid;
private int vboId;
public PushRender(Context context, int textureid) {
this.context = context;
this.textureid = textureid;
bitmap = GShaderUtil.createTextImage("视频直播推流:godv", 50, "#ff0000",
"#00000000", 0);
float r = 1.0f * bitmap.getWidth() / bitmap.getHeight();
float w = r * 0.1f;
vertexData[8] = 0.8f - w;
vertexData[9] = -0.8f;
vertexData[10] = 0.8f;
vertexData[11] = -0.8f;
vertexData[12] = 0.8f - w;
vertexData[13] = -0.7f;
vertexData[14] = 0.8f;
vertexData[15] = -0.7f;
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
fragmentBuffer = ByteBuffer.allocateDirect(fragmentData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(fragmentData);
fragmentBuffer.position(0);
}
@Override
public void onSurfaceCreated() {
//开启argb透明
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
String vertexSource = GShaderUtil.getRawResource(context, R.raw.vertex_shader_screen);
String fragmentSource = GShaderUtil.getRawResource(context, R.raw.fragment_shader_screen);
program = GShaderUtil.createProgram(vertexSource, fragmentSource);
vPosition = GLES20.glGetAttribLocation(program, "v_Position");
fPosition = GLES20.glGetAttribLocation(program, "f_Position");
int[] vbos = new int[1];
GLES20.glGenBuffers(1, vbos, 0);
vboId = vbos[0];
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4 + fragmentData.length * 4, null, GLES20.GL_STATIC_DRAW);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, vertexData.length * 4, vertexBuffer);
GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, vertexData.length * 4, fragmentData.length * 4, fragmentBuffer);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
bitmapTextureId = GShaderUtil.loadBitmapTexture(bitmap);
}
@Override
public void onSurfaceChanged(int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
@Override
public void onDrawFrame() {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glClearColor(1f, 0f, 0f, 1f);
GLES20.glUseProgram(program);
//先设置vbo再绑定纹理
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vboId);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureid);
GLES20.glEnableVertexAttribArray(vPosition);
GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8,
0);
GLES20.glEnableVertexAttribArray(fPosition);
GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8,
vertexData.length * 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
//bitmap
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bitmapTextureId);
GLES20.glEnableVertexAttribArray(vPosition);
GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 8,
32);
GLES20.glEnableVertexAttribArray(fPosition);
GLES20.glVertexAttribPointer(fPosition, 2, GLES20.GL_FLOAT, false, 8,
vertexData.length * 4);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
//解绑
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);
}
}