目录
一,借助MediaCodec封装解码工具类VideoCodec
二,使用OpenGl绘制视频封装SoulFilter
一,借助MediaCodec封装解码工具类VideoCodec
/*** 解码工具类* 解码完成后的数据 通过 ISurface 回调出去*/
public class VideoCodec {private ISurface mISurface;private String mPath;private MediaExtractor mMediaExtractor;private int mWidth;private int mHeight;private int mFps;private MediaCodec mMediaCodec;private boolean isCodeing;private byte[] outData;private CodecTask mCodecTask;/*** 要在prepare之前调用* @param surface*/public void setDisplay(ISurface surface){mISurface = surface;}/*** 设置要解码的视频地址* @param path*/public void setDataSource(String path){mPath = path;}/**** 准备方法*/public void prepare(){//MediaMuxer:复用器 封装器//解复用(解封装)mMediaExtractor = new MediaExtractor();try {//把视频给到 解复用器mMediaExtractor.setDataSource(mPath);} catch (IOException e) {e.printStackTrace();}int videoIndex = -1;MediaFormat videoMediaFormat = null;// mp4 1路音频 1路视频int trackCount = mMediaExtractor.getTrackCount();for (int i = 0; i < trackCount; i++) {//获得这路流的格式MediaFormat mediaFormat = mMediaExtractor.getTrackFormat(i);//选择视频 获得格式// video/ audio/String mime = mediaFormat.getString(MediaFormat.KEY_MIME);if(mime.startsWith("video/")){videoIndex = i;videoMediaFormat = mediaFormat;break;}}//默认是-1if (null != videoMediaFormat){//解码 videoIndex 这一路流mWidth = videoMediaFormat.getInteger(MediaFormat.KEY_WIDTH);mHeight = videoMediaFormat.getInteger(MediaFormat.KEY_HEIGHT);mFps = 20;if (videoMediaFormat.containsKey(MediaFormat.KEY_FRAME_RATE)) {mFps = videoMediaFormat.getInteger(MediaFormat.KEY_FRAME_RATE);}// 个别手机 小米(x型号) 解码出来不是yuv420p//所以设置 解码数据格式 指定为yuv420videoMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);try {//创建一个解码器mMediaCodec = MediaCodec.createDecoderByType(videoMediaFormat.getString(MediaFormat.KEY_MIME));mMediaCodec.configure(videoMediaFormat,null,null,0);} catch (IOException e) {e.printStackTrace();}//选择流 后续读取这个流mMediaExtractor.selectTrack(videoIndex);}if (null != mISurface){mISurface.setVideoParamerters(mWidth,mHeight,mFps);}}/*** 开始解码*/public void start(){isCodeing = true;//接收 解码后的数据 yuv数据大小是 w*h*3/2outData = new byte[mWidth * mHeight * 3 / 2];mCodecTask = new CodecTask();mCodecTask.start();}/*** 停止*/public void stop(){isCodeing = false;if (null != mCodecTask && mCodecTask.isAlive()){try {mCodecTask.join(3_000);} catch (InterruptedException e) {e.printStackTrace();}//3s后线程还没结束if (mCodecTask.isAlive()){//中断掉mCodecTask.interrupt();}mCodecTask = null;}}/*** 解码线程*/private class CodecTask extends Thread{@Overridepublic void run() {if (null == mMediaCodec) {return;}// 开启mMediaCodec.start();boolean isEOF = false;MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();//是否中断线程while(!isInterrupted()){if (!isCodeing){break;}// 如果 eof是true 就表示读完了,就不执行putBuffer2Codec方法了//并不代表解码完了if (!isEOF) {isEOF = putBuffer2Codec();}//...//从输出缓冲区获取数据 解码之后的数据int status = mMediaCodec.dequeueOutputBuffer(bufferInfo, 100);//获取到有效的输出缓冲区 意味着能够获取到解码后的数据了if (status >= 0){ByteBuffer outputBuffer = mMediaCodec.getOutputBuffer(status);if (bufferInfo.size == outData.length){//取出数据 存到outData yuv420outputBuffer.get(outData);if (null != mISurface){mISurface.offer(outData);}}//交付掉这个输出缓冲区 释放mMediaCodec.releaseOutputBuffer(status,false);}//干完活了 ,全部解码完成了if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0){//解码完了break;}}mMediaCodec.stop();mMediaCodec.release();mMediaCodec = null;mMediaExtractor.release();mMediaExtractor = null;}/**** @return true:没有更多数据了* false:还有*/private boolean putBuffer2Codec(){// -1 就一直等待int status = mMediaCodec.dequeueInputBuffer(100);//有效的输入缓冲区 indexif (status >=0 ){//把待解码数据加入MediaCodecByteBuffer inputBuffer = mMediaCodec.getInputBuffer(status);//清理脏数据inputBuffer.clear();// ByteBuffer当成byte数组 ,读数据存入 ByteBuffer 存到byte数组的第0个开始存int size = mMediaExtractor.readSampleData(inputBuffer, 0);//没读到数据 已经没有数据可读了if (size < 0){//给个标记 表示没有更多数据可以从输出缓冲区获取了mMediaCodec.queueInputBuffer(status,0,0,0,MediaCodec.BUFFER_FLAG_END_OF_STREAM);return true;}else{//把噻了数据的输入缓冲区噻回去mMediaCodec.queueInputBuffer(status,0,size,mMediaExtractor.getSampleTime(), 0);//丢掉已经加入解码的数据 (不丢就会读重复的数据)mMediaExtractor.advance();}}return false;}}}
二,使用OpenGl绘制视频封装SoulFilter
/*** 灵魂出窍滤镜*/
public class SoulFilter extends AbstractFilter {private int[] mTextures;//肉体private GLImage bodyImage;//灵魂private GLImage soulImage;private int mAlpha;private int mSamplerV;private int mSamplerU;private int mSamplerY;private int mFps;private float[] matrix = new float[16];private int interval;public SoulFilter(Context context) {super(context, R.raw.soul_vertex, R.raw.soul_frag);bodyImage = new GLImage();soulImage = new GLImage();mSamplerY = GLES20.glGetUniformLocation(mGLProgramId, "sampler_y");mSamplerU = GLES20.glGetUniformLocation(mGLProgramId,"sampler_u");mSamplerV = GLES20.glGetUniformLocation(mGLProgramId,"sampler_v");mAlpha = GLES20.glGetUniformLocation(mGLProgramId, "alpha");//3个纹理 yuvmTextures = new int[3];OpenGLUtils.glGenTextures(mTextures);}public void onReady2(int width,int height,int fps){super.onReady(width,height);mFps = fps;bodyImage.initSize(width,height);soulImage.initSize(width,height);}public void onDrawFrame(byte[] yuv) {//把yuv分离出来 保存在 image中的 y、u、v三个变量中bodyImage.initData(yuv);//分离出的数据有效if (!bodyImage.hasImage()){return;}//启用着色器程序GLES20.glUseProgram(mGLProgramId);//初始化矩阵 不进行任何缩放平移Matrix.setIdentityM(matrix,0);//给肉体的 无变化矩阵GLES20.glUniformMatrix4fv(vMatrix,1,false,matrix,0);//透明度 肉体不透明GLES20.glUniform1f(mAlpha,1);//传值//画画onDrawBody(bodyImage);//混合灵魂onDrawSoul(yuv);}private void onDrawBody(GLImage image){//传递坐标mGLVertexBuffer.position(0);GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 0, mGLVertexBuffer);GLES20.glEnableVertexAttribArray(vPosition);mGLTextureBuffer.position(0);GLES20.glVertexAttribPointer(vCoord, 2, GLES20.GL_FLOAT, false, 0, mGLTextureBuffer);GLES20.glEnableVertexAttribArray(vCoord);//传递yuv数据GLES20.glActiveTexture(GLES20.GL_TEXTURE0);GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[0]);//把y数据与 0纹理绑定// GL_LUMINANCE: yuv 给这个GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D,0,GLES20.GL_LUMINANCE,mOutputWidth,mOutputHeight,0,GLES20.GL_LUMINANCE,GLES20.GL_UNSIGNED_BYTE,image.getY());GLES20.glUniform1i(mSamplerY, 0);//u数据GLES20.glActiveTexture(GLES20.GL_TEXTURE1);GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[1]);//把y数据与 0纹理绑定// GL_LUMINANCE: yuv 给这个GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D,0,GLES20.GL_LUMINANCE,mOutputWidth/2,mOutputHeight/2,0,GLES20.GL_LUMINANCE,GLES20.GL_UNSIGNED_BYTE,image.getU());GLES20.glUniform1i(mSamplerU, 1);GLES20.glActiveTexture(GLES20.GL_TEXTURE2);GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures[2]);//把y数据与 0纹理绑定// GL_LUMINANCE: yuv 给这个GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D,0,GLES20.GL_LUMINANCE,mOutputWidth/2,mOutputHeight/2,0,GLES20.GL_LUMINANCE,GLES20.GL_UNSIGNED_BYTE,image.getV());GLES20.glUniform1i(mSamplerV, 2);GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);}private void onDrawSoul(byte[] yuv){interval++;//没保存一个灵魂 或者使用次数已经达到上限了// 灵魂只能使用x次 使用完了之后就要更新灵魂if (!soulImage.hasImage() || interval > mFps){//次数重置为1interval = 1;//记录新灵魂soulImage.initData(yuv);}if (!soulImage.hasImage()){return;}//画灵魂GLES20.glEnable(GLES20.GL_BLEND);//1:源 灵魂 GL_ONE:画灵魂自己//2: 肉体 也是肉体自己//两个都是用自己原本的颜色去混合
// GLES20.glBlendFunc(GLES20.GL_ONE,GLES20.GL_ONE);//让灵魂整体颜色变淡// GL_SRC_ALPHA: 取源(灵魂)的alpha 作为因子// 假设alpha是0.2 rgb都是1 -> 混合就是用 rgb都是 0.2*1 整体变淡GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA,GLES20.GL_ONE);//初始化矩阵 不进行任何缩放平移Matrix.setIdentityM(matrix,0);//设置缩放大小 本次放大为 1+当前灵魂次数占总次数*2的比例//不一次放太大 为了达到较好的表现效果 fps*2//所以这里值为 1+1/60 ---> 1+20/40 1.025... ---> 1.5float scale = 1.0f + interval / (mFps * 2.f);Matrix.scaleM(matrix,0,scale,scale,0);//给肉体的 无变化矩阵GLES20.glUniformMatrix4fv(vMatrix,1,false,matrix,0);//传递透明度 透明度值为0-1 渐渐降低 0.1+x/100 x为fps-[0~fps]//这里值为0.29 ---> 0.1GLES20.glUniform1f(mAlpha, 0.1f + (mFps - interval) / 100.f);//画灵魂onDrawBody(soulImage);}
}