org.lasque.tubeautysetting
下。首先需要调用TXLivePusher
对象setVideoProcessListener()
方法设置 TXLivePusher.VideoCustomProcessListener
接口接收纹理回调
private TXLivePusher.VideoCustomProcessListener mTuSDKVideoCustomProcessListener = new TXLivePusher.VideoCustomProcessListener() {
@Override
public int onTextureCustomProcess(final int textureId, final int width, final int height) {
if (!LivePipeMediator.getInstance().isReady()){
// 初始化渲染管线
final EGLContext currentContext = EGL14.eglGetCurrentContext();
LivePipeMediator pipeMediator = LivePipeMediator.getInstance();
pipeMediator.requestInit(this,currentContext);
}
if (mResult != null) mResult.release();
// 纹理处理
Image out = LivePipeMediator.getInstance().process(textureId,width,height,System.currentTimeMillis());
mResult = out;
// 返回处理后的纹理
return mResult.getGLTexture();
}
@Override
public void onDetectFacePoints(float[] floats) {
}
@Override
public void onTextureDestoryed() {
// 释放渲染管线
LivePipeMediator pipeMediator = LivePipeMediator.getInstance();
pipeMediator.release();
Engine.getInstance().release();
}
};
通过实现com.qiniu.pili.droid.streaming.StreamingPreviewCallback
与com.qiniu.pili.droid.streaming.SurfaceTextureCallback
接口,进行TuSDK与七牛SDK之间的纹理信息交互
核心处理
@Override
public void onSurfaceCreated() {
// 初始化渲染管线
EGLContext currentContext = EGL14.eglGetCurrentContext();
boolean hasContext = currentContext != EGL14.EGL_NO_CONTEXT;
mPipe = LivePipeMediator.getInstance();
mPipe.requestInit(this,hasContext ? currentContext : null);
mModuleFragment.setFilterPipe(mPipe);
mRenderPool.submit(new Runnable() {
@Override
public void run() {
mPipe.getGLContext().makeCurrent();
}
});
}
@Override
public int onDrawFrame(final int texId, final int texWidth, final int texHeight, final float[] transformMatrix) {
boolean enable = mEnableProcess;
mCount++;
long startTime = System.currentTimeMillis();
Future<Image> res = mRenderPool.submit(new Callable<Image>() {
@Override
public Image call() throws Exception {
//初始化OES转Texture2D纹理渲染器
if (mTextureRender == null){
mTextureRender = new TextureRender(true);
mTextureRender.create(texHeight,texWidth);
}
// 初始化Texture2D纹理旋转输出器
if (mOutputTextureRender == null){
mOutputTextureRender = new TextureRender(false);
mOutputTextureRender.create(texWidth,texHeight);
}
mTextureRender.setSTMatrix(transformMatrix);
mTextureRender.drawFrame(texId,texHeight,texWidth);
Image output = mPipe.process(mTextureRender.getTextureID(),texHeight,texWidth,System.currentTimeMillis());
float[] stMatrixInvert = new float[16];
Matrix.invertM(stMatrixInvert,0,transformMatrix,0);
mOutputTextureRender.setSTMatrix(stMatrixInvert);
mOutputTextureRender.drawFrame(output.getGLTexture(),texWidth,texHeight);
Image finalOutput = new Image(mOutputTextureRender.getTextureID(),texWidth,texHeight,System.currentTimeMillis());
output.release();
return finalOutput;
}
});
try {
synchronized(this){
if (mResult != null) mResult.release();
mResult = res.get();
mCurrentBuffer = mResult.getBuffer(mCurrentFormat == PLFourCC.FOURCC_I420 ? Image.Format.I420 : Image.Format.NV21);
}
} catch (ExecutionException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
long duration = System.currentTimeMillis() - startTime;
sum += duration;
count++;
return mResult.getGLTexture();
}
@Override
public boolean onPreviewFrame(final byte[] bytes, final int width, final int height, int rotation, final int fmt, long tsInNanoTime) {
if (mPipe == null &&!mEnableProcess) return false;
if (mCurrentBuffer == null) return false;
boolean enable = mEnableProcess;
if (mPipe != null && enable) {
if (mCurrentBuffer != null) {
// 获取处理后的YUV数据
byte[] bufferClone = mCurrentBuffer.clone();
System.arraycopy(bufferClone,0,bytes,0,bytes.length);
return true;
}
}
return false;
}
目前Android端接入声网推流,无法使用正常的声网采集流程,需要使用自采集模式配合声网旁路推流模式进行实现 详细自采集实现需参照Demo 渲染与推流
public void onDrawFrame()
{
if (mSurfaceTexture == null) return;
// 预备纹理池
if (isNeedCreateTextures){
GLES20.glGenTextures(mTexCount, mTextures, 0);
for (int idx = 0; idx < mTexCount; idx++) {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[idx]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D,
0,
GLES20.GL_RGBA,
mPreviewSize.height, mPreviewSize.width,
0,
GLES20.GL_RGBA,
GLES20.GL_UNSIGNED_BYTE,
null);
}
mTextureRender.create(mPreviewSize.height,mPreviewSize.width);
isNeedCreateTextures = false;
}
mTexture = mTextures[mTexIdx];
mTexIdx = (mTexIdx + 1) % mTexCount;
// 更新帧
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(mMTX);
// OES纹理转Texture2D纹理
int drawRes = mOutputSurface.drawImageTo(mTexture,mPreviewSize.height,mPreviewSize.width);
if (drawRes < 0) return;
// 处理送入纹理 获取处理结果
Image output = mPipe.process(mTexture,mPreviewSize.height,mPreviewSize.width,System.currentTimeMillis());
// 预览处理后结果
mLocalVideoSurfaceView.updateImage(output);
// 旋转处理后结果 为推流做准备
mTextureRender.drawFrame(output.getGLTexture(),mPreviewSize.height,mPreviewSize.width);
Image out = new Image(mTextureRender.getTextureID(),mPreviewSize.height,mPreviewSize.width,System.currentTimeMillis());
// 获取推流数据
pushExternalVideoYuv(out.getBuffer(Image.Format.RGBA8888));
// pushExternalVideoFrame(output.getGLTexture());
// 释放单次资源
out.release();
output.release();
}
目前阿里云推流SDK的三方美颜接入需通过自行实现AlivcLivePushCustomFilter
接口来进行实现
阿里云SDK自定义滤镜实现
/**
* 美颜回调
**/
mAlivcLivePusher.setCustomFilter(new AlivcLivePushCustomFilter() {
@Override
public void customFilterCreate() {
Log.d(TAG, "customFilterCreate start");
// 初始化渲染管线
EGLContext currentContext = EGL14.eglGetCurrentContext();
boolean hasContext = currentContext != EGL14.EGL_NO_CONTEXT;
mPipe = LivePipeMediator.getInstance();
mPipe.requestInit(this,hasContext ? currentContext : null);
Log.d(TAG, "customFilterCreate end");
}
@Override
public void customFilterUpdateParam(float fSkinSmooth, float fWhiten, float fWholeFacePink, float fThinFaceHorizontal, float fCheekPink, float fShortenFaceVertical, float fBigEye) {
}
@Override
public void customFilterSwitch(boolean on) {
}
@Override
public int customFilterProcess(int inputTexture, int textureWidth, int textureHeight, long extra) {
Log.d(TAG, "customFilterProcess start: textureId" + inputTexture + ",width:" + textureWidth + ",height:" + textureHeight);
if (mResult != null) mResult.release();
// 纹理处理
Image out = LivePipeMediator.getInstance().process(textureId,width,height,System.currentTimeMillis());
mResult = out;
// 返回处理后的纹理
return mResult.getGLTexture();
}
@Override
public void customFilterDestroy() {
// 释放渲染管线
LivePipeMediator pipeMediator = LivePipeMediator.getInstance();
pipeMediator.release();
Engine.getInstance().release();
}
});
通过 RCRTCVideoOutputStream.setVideoFrameListener 注册要处理的视频流的采集监听:
RCRTCEngine.getInstance().getDefaultVideoStream().setVideoFrameListener(new IRCRTCVideoOutputFrameListener() {
@Override
public RCRTCVideoFrame processVideoFrame(RCRTCVideoFrame rtcVideoFrame) {
// 使用数据进行美颜/录像等处理后,需要把数据再返回给 SDK 做发送。
if (!LivePipeMediator.getInstance().isReady()){
// 初始化渲染管线
final EGLContext currentContext = EGL14.eglGetCurrentContext();
LivePipeMediator pipeMediator = LivePipeMediator.getInstance();
pipeMediator.requestInit(this,currentContext);
}
if (mResult != null) mResult.release();
// 纹理处理
Image out = LivePipeMediator.getInstance().process(textureId,width,height,System.currentTimeMillis());
mResult = out;
rtcVideoFrame.setTextureId(mResult.getGLTexture());
return rtcVideoFrame;
}
});
// 释放渲染管线
LivePipeMediator pipeMediator = LivePipeMediator.getInstance();
pipeMediator.release();
Engine.getInstance().release();
启动方法可参考自定义视频前处理
// 回调方法获取原始数据
// 回调处理
express.setCustomVideoProcessHandler(new IZegoCustomVideoProcessHandler() {
...
// Receive texture from ZegoExpressEngine
@Override
public void onCapturedUnprocessedTextureData(int textureID, int width, int height, long referenceTimeMillisecond, ZegoPublishChannel channel) {
ZegoEffectsVideoFrameParam param = new ZegoEffectsVideoFrameParam();
param.format = ZegoEffectsVideoFrameFormat.BGRA32;
param.width = width;
param.height = height;
if (!LivePipeMediator.getInstance().isReady()){
// 初始化渲染管线
final EGLContext currentContext = EGL14.eglGetCurrentContext();
LivePipeMediator pipeMediator = LivePipeMediator.getInstance();
pipeMediator.requestInit(this,currentContext);
}
if (mResult != null) mResult.release();
// 纹理处理
Image out = LivePipeMediator.getInstance().process(textureId,width,height,System.currentTimeMillis());
mResult = out;
// Send processed texture to ZegoExpressEngine
express.sendCustomVideoProcessedTextureData(mResult.getGLTexture(), width, height, referenceTimeMillisecond);
}
}