实例介绍
【实例简介】
本项目提供Android平台点对点音视频对讲功能,项目的基本架构如下:
1. Android Camera2 API 采集预览视频
2. MediaCodec 进行硬编码,编码成h264视频文件
3. AudioRecord采集音频PCM数据,同样利用MediaCodec编码成AAC数据
4. jrtplib库进行视频音频数据发送,本项目修改jrtplib库,添加了大数据切片功能,方便进行网络传输
5. ffmpeg对接收到的数据进行解码,利用ffmpeg软解码的原因是减少延时,Android硬解码由于机制问题,会存在
缓冲队列,导致延时过大,ffmpeg能够很好解决这个问题。
6. 利用SurfaceTexture进行绘制,在JNI层直接将解码的RGB数据拷贝到Surface中,减少数据传递
【实例截图】
【核心代码】
package com.forrest.ui;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import com.forrest.encoder.MediaEncoderHandler;
import com.forrest.gles.GlUtil;
import com.forrest.gles.Rectangle;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class CameraGLSurfaceView extends GLSurfaceView {
private final static String TAG = "CameraGLSurfaceView";
private boolean surfaceAvailable = false;
public final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
private static final int RECORDING_OFF = 0;
private static final int RECORDING_ON = 1;
private static final int RECORDING_RESUMED = 2;
private int mRecordingStatus = -1;
private boolean mRecordingEnabled = false;
private int mRecordWidth;
private int mRecordHeight;
private String mIP;
private MediaEncoderHandler mMediaEncoderHandler;
private boolean mPictureEnable = false;
private int mPictureWidth;
private int mPictureHeight;
private String mPicturePath;
private int mOffsetX = 0;
private int mOffsetY = 0;
private int mWidth = 0;
private int mHeight = 0;
private double mAspectRatio = 16.0f/9.0f;
public interface MySurfaceListener {
void surfaceCreated();
void surfaceChanged();
void surfaceDestroyed();
}
private MyRenderer mRenderer;
private MySurfaceListener mSurfaceListener;
public CameraGLSurfaceView(Context context) {
this(context, null);
}
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
setEGLContextClientVersion(2);
mRenderer = new MyRenderer();
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
@Override
public void onResume() {
super.onResume();
// mRenderer.onResume();
}
@Override
public void onPause() {
super.onPause();
// mRenderer.onPause();
}
public void startRecord(int width, int height, String ip) {
if(mRecordingStatus == RECORDING_ON) {
return ;
}
mRecordingStatus = RECORDING_ON;
this.mIP = ip;
this.mRecordWidth = width;
this.mRecordHeight = height;
}
public void stopRecord() {
mRecordingEnabled = mMediaEncoderHandler.isRecording();
if(mRecordingEnabled) {
mRecordingStatus = RECORDING_OFF;
mMediaEncoderHandler.stopRecording();
}
}
public boolean isRecording() {
if (mRecordingStatus == RECORDING_ON) {
return true;
} else {
return false;
}
}
/**
* 拍照
* @param width 宽
* @param height 高
* @param picturePath 保存路径
*/
public void takePicture(int width, int height, String picturePath) {
this.mPictureWidth = width;
this.mPictureHeight = height;
this.mPicturePath = picturePath;
mPictureEnable = true;
}
public void setAspectRatio(double aspectRatio) {
this.mAspectRatio = aspectRatio;
}
public void setAspectRatio(int width, int height) {
this.mAspectRatio = (double)width / (double)height;
}
public SurfaceTexture getSurfaceTexture() {
return mRenderer.getSurfaceTexture();
}
public void setSurfaceListener(MySurfaceListener surfaceListener) {
this.mSurfaceListener = surfaceListener;
}
public boolean isAvailable() {
return this.surfaceAvailable;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
surfaceAvailable = true;
Log.d(TAG, "[CameraGLSurfaceView] : surfaceCreated");
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
super.surfaceChanged(holder, format, w, h);
Log.d(TAG, "[CameraGLSurfaceView] : surfaceChanged");
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
super.surfaceDestroyed(holder);
if (mSurfaceListener != null) {
mSurfaceListener.surfaceDestroyed();
}
surfaceAvailable = false;
Log.d(TAG, "[CameraGLSurfaceView] : surfaceDestroyed");
}
class MyRenderer implements Renderer, SurfaceTexture.OnFrameAvailableListener {
private Rectangle mRect;
private int previewTextureId;
private SurfaceTexture surfaceTexture;
private boolean updateSurface = false;
private Rectangle bgRect;
private int bgTexureId;
private int frameBufferId;
private int i=0;
private long t;
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
mRect = new Rectangle();
mRect.initVertexDataTexOES();
previewTextureId = GlUtil.generateTextureIdOES();
surfaceTexture = new SurfaceTexture(previewTextureId);
surfaceTexture.setOnFrameAvailableListener(this);
synchronized (this) {
updateSurface = false;
}
if (mSurfaceListener != null) {
mSurfaceListener.surfaceCreated();
}
bgRect = new Rectangle();
bgRect.initVertexDataTex2D();
// bgTexureId = GlUtil.initTextureFromResID(R.drawable.wp, UVCCameraGLSurfaceView.this.getResources());
frameBufferId = GlUtil.genFrameBuffer(1920, 1080);
mMediaEncoderHandler = new MediaEncoderHandler();
Log.d(TAG, "[CameraGLSurfaceView MyRenderer] : onSurfaceCreated");
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
mWidth = width;
mHeight = height;
// mHeight = (int)(mWidth / mAspectRatio);
// mOffsetY = (height - mHeight) / 2;
mOffsetY = height - 720;
if (mSurfaceListener != null) {
mSurfaceListener.surfaceChanged();
}
Log.d(TAG, "[CameraGLSurfaceView MyRenderer] : onSurfaceChanged (width :" width " height : " height ")");
}
@Override
public void onDrawFrame(GL10 gl) {
synchronized (this) {
if (updateSurface) {
surfaceTexture.updateTexImage();
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glViewport(0, mOffsetY, 1280, 720);
// bgRect.drawSelfTex2D(bgTexureId);
mRect.drawSelfOES(previewTextureId);
i ;
if (i == 1) {
t = System.currentTimeMillis();
} else if (i==1000) {
Log.d("test", "1000fps cost time :" (System.currentTimeMillis()-t));
}
//录像相关操作
mRecordingEnabled = mMediaEncoderHandler.isRecording();
if (!mRecordingEnabled && mRecordingStatus == RECORDING_ON) {
mMediaEncoderHandler.startRecording(new MediaEncoderHandler.EncoderConfig(getContext(),
mIP, mRecordWidth, mRecordHeight, 10000000, EGL14.eglGetCurrentContext()));
}
if (mRecordingEnabled) {
mMediaEncoderHandler.setTextureId(previewTextureId);
mMediaEncoderHandler.frameAvailable(surfaceTexture);
}
//拍照相关操作
if (mPictureEnable) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glViewport(0, 0, mPictureWidth, mPictureHeight);
mRect.drawSelfOES(previewTextureId);
GlUtil.saveFrame(mPicturePath, mPictureWidth, mPictureHeight);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
mPictureEnable = false;
}
}
}
}
@Override
synchronized public void onFrameAvailable(SurfaceTexture surfaceTexture) {
updateSurface = true;
requestRender();
}
private SurfaceTexture getSurfaceTexture() {
return this.surfaceTexture;
}
}
}
小贴士
感谢您为本站写下的评论,您的评论对其它用户来说具有重要的参考价值,所以请认真填写。
- 类似“顶”、“沙发”之类没有营养的文字,对勤劳贡献的楼主来说是令人沮丧的反馈信息。
- 相信您也不想看到一排文字/表情墙,所以请不要反馈意义不大的重复字符,也请尽量不要纯表情的回复。
- 提问之前请再仔细看一遍楼主的说明,或许是您遗漏了。
- 请勿到处挖坑绊人、招贴广告。既占空间让人厌烦,又没人会搭理,于人于己都无利。
关于好例子网
本站旨在为广大IT学习爱好者提供一个非营利性互相学习交流分享平台。本站所有资源都可以被免费获取学习研究。本站资源来自网友分享,对搜索内容的合法性不具有预见性、识别性、控制性,仅供学习研究,请务必在下载后24小时内给予删除,不得用于其他任何用途,否则后果自负。基于互联网的特殊性,平台无法对用户传输的作品、信息、内容的权属或合法性、安全性、合规性、真实性、科学性、完整权、有效性等进行实质审查;无论平台是否已进行审查,用户均应自行承担因其传输的作品、信息、内容而可能或已经产生的侵权或权属纠纷等法律责任。本站所有资源不代表本站的观点或立场,基于网友分享,根据中国法律《信息网络传播权保护条例》第二十二与二十三条之规定,若资源存在侵权或相关问题请联系本站客服人员,点此联系我们。关于更多版权及免责申明参见 版权及免责申明
网友评论
我要评论