image.png
之前我们结合相机和视频,结合滤镜,做了实时的预览和录制。
这期,我们来试试利用OpenGL+MediaCodc,不进行预览直接录制成视频的情况。
录制视频的开始,我们先来思考两个问题:
OpenGL绘制通过之前的学习,我们通过阅读源码和文章,能够了解到整个OpenGL绘制的流程时这样的。
image.png
之前文章中写到的这些部分,都是直接由GLSurfaceView帮我们完成了。
之前的预览部分都是直接使用GLSurfaceView。
因为GLSurfaceView已经为我们当前的线程准备好了EGL的环境。所以我们只要生成自己的纹理texture,并进行绘制就可以了。
绘制的结果,就会出现在准备好的EGLSurface当中。
GLSurfaceView的EGLSurface是怎么关联的呢?GLSurfaceView直接继承了SurfaceView
继承SurfaceView.png
mSurfaceHolder来创建EGLSurface
创建ElgSurface.png
这样,使用draw之后,通过eglSwapBuffers,就会将内容绘制到GLSurfaceView当中。
通过预览部分的回顾,我们知道,通过用SurfaceView进行创建和关联EGLSurface,就可以绘制到整个SurfaceView上。er实际上,录制就是同时输入到了Encoder的Surface当中了。
EglContext绑定。
所以,我们只要能得到这个结果的纹理,保持相同的EglContext,重新绘制一次,就有相同的结果了。
这样我们就可以利用Encoder的InputSurface和相同的EglContext,来再次创建一个EglSurface。在这里绘制相同的纹理,就可以得到相同的结果。//1 . 创建
//得到当前线程的EGLContext
EGL14.eglGetCurrentContext();
//在新的线程中,进行创建新的 EGLSurface
mEglCore = new EglCore(sharedContext, EglCore.FLAG_RECORDABLE);
mInputWindowSurface = new WindowSurface(mEglCore, mVideoEncoder.getInputSurface(), true);
mInputWindowSurface.makeCurrent();
//2. 绘制
mFullScreen.drawFrame(mTextureId, transform);
mInputWindowSurface.setPresentationTime(timestampNanos);
mInputWindowSurface.swapBuffers();对比,我们就能发现。
SurfaceView或其他Android原生的View来创建对应的EGLSurfaceEncoder进行录制,我们只需要利用它的InputSurface来创建,EGLSurface就可以了。这里有个问题。如果我们想要使用FFmpeg,并且不使用Camera的回调来接受数据的话,要怎么办呢?
通常的影片的帧数(fps)都是30。所以我们只要保持编码时,输入的时间戳是相隔30fps就可以完成这样。
//fps 30
private long computePresentationTimeNsec(int frameIndex) {
final long ONE_BILLION = 1000000000;
return frameIndex * ONE_BILLION / 30;
}直接使用了HandlerThread。和使用MainLooper来创建Handler就可以完成。
这里需要注意的是,进行线程通信时,要确保内部的Handler已经创建,需要进行getLooper()之后,来创建Handler.
这里的getLooper()是一个同步的方法,只要当前的Thread不是结束的状态,就能确保得到非空的Looper.
private MovieHandler getMovieHandler() {
if (mMovieHandler == null) {
mMovieHandler = new MovieHandler(getLooper(), this);
}
return mMovieHandler;
}Render,将绘制的流程解耦出来这样就可以自由的进行绘制。
同时我们需要Duration的属性,这样我们能在正确的时间范围内,取到我们想要的Render和让Render针对时间进行变形。
绘制的方法,同时加上当前的时间戳
public interface MovieMaker {
long ONE_BILLION = 1000000000;
void onGLCreate();
void setSize(int width, int height);
long getDurationAsNano();
void generateFrame(long curTime);
void release();
}private void makeMovie() {
//不断绘制。
boolean isCompleted = false;
try {
//初始化GL环境
mEglCore = new EglCore(null, EglCore.FLAG_RECORDABLE);
mVideoEncoder = new VideoEncoderCore(width, height, bitRate, outputFile);
Surface encoderInputSurface = mVideoEncoder.getInputSurface();
mWindowSurface = new WindowSurface(mEglCore, encoderInputSurface, true);
mWindowSurface.makeCurrent();
//绘制
// 计算时长
long totalDuration = 0;
timeSections = new long[movieMakers.size()];
for (int i = 0; i < movieMakers.size(); i++) {
MovieMaker movieMaker = movieMakers.get(i);
movieMaker.onGLCreate();
movieMaker.setSize(width, height);
timeSections[i] = totalDuration;
totalDuration += movieMaker.getDurationAsNano();
}
if (listener != null) {
uiHandler.post(() -> {
listener.onStart();
});
}
long tempTime = 0;
int frameIndex = 0;
while (tempTime <= totalDuration) {
mVideoEncoder.drainEncoder(false);
generateFrame(tempTime);
long presentationTimeNsec = computePresentationTimeNsec(frameIndex);
submitFrame(presentationTimeNsec);
updateProgress(tempTime, totalDuration);
frameIndex++;
tempTime = presentationTimeNsec;
if (stop) {
break;
}
}
//finish
mVideoEncoder.drainEncoder(true);
isCompleted = true;
} catch (Exception e) {
e.printStackTrace();
} finally {
//结束
try {
releaseEncoder();
} catch (Exception e) {
e.printStackTrace();
}
if (isCompleted && listener != null) {
uiHandler.post(() -> {
listener.onCompleted(outputFile.getAbsolutePath());
});
}
}
}同样是先创建对应的EGL环境。然后在给定的时长下,调用对应的Render进行绘制。
MovieMaker
就是使用之前创建好的Render在对应的生命周期方法调用。因为是静态图片。所以这里没有进行变化。public class StaticPhotoMaker implements MovieMaker {
PhotoFilter photoFilter;
String filePath;
public StaticPhotoMaker(String filePath) {
this.filePath = filePath;
}
@Override
public void onGLCreate() {
photoFilter = new PhotoFilter();
photoFilter.onCreate();
}
@Override
public void setSize(int width, int height) {
photoFilter.onSizeChange(width, height);
Bitmap bitmap = BitmapFactory.decodeFile(filePath);
photoFilter.setBitmap(bitmap);
}
@Override
public long getDurationAsNano() {
return 3 * ONE_BILLION;
}
@Override
public void generateFrame(long curTime) {
photoFilter.onDrawFrame();
}
@Override
public void release() {
photoFilter.release();
}
} @SuppressLint("StaticFieldLeak")
public void startGenerate(View view) {
engine = new MovieEngine.MovieBuilder()
.maker(new TestMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1529734446397.png"))
.maker(new TestMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1529911150337.png"))
.maker(new TestMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1531208871527.png"))
.width(720)
.height(1280)
.listener(new MovieEngine.ProgressListener() {
private long startTime;
@Override
public void onStart() {
startTime = System.currentTimeMillis();
Toast.makeText(GenerateMovieActivity.this, "onStart!!", Toast.LENGTH_SHORT).show();
}
@Override
public void onCompleted(String absolutePath) {
long endTime = System.currentTimeMillis();
Toast.makeText(GenerateMovieActivity.this, "file path=" + absolutePath + ",cost time = " + (endTime - startTime), Toast.LENGTH_SHORT).show();
}
@Override
public void onProgress(long current, long totalDuration) {
String text = "当前进度是" + (current * 1f / totalDuration * 1f);
textView.setText(text);
}
}).build();
engine.make();
}movie-ge-1.gif
因为动画效果,需要同时对两图进行效果。所以需要两个不同的Render进行变化。
MovieMaker public AnimateGroupPhotoMaker(String... filePaths) {
this.filePaths = new ArrayList<>();
this.filePaths.addAll(Arrays.asList(filePaths));
}@Override
public void generateFrame(long curTime) {
if (curTime == 0) {
startTime = curTime;
}
float dif = (curTime - startTime) * 1f / getDurationAsNano();
for (int i = 0; i < photoFilters.size(); i++) {
PhotoAlphaFilter2 photoFilter = photoFilters.get(i);
transform(photoFilter, dif, i);
photoFilter.onDrawFrame();
}
}
//进行动画的变化
private void transform(PhotoAlphaFilter2 photoFilter, float dif, int i) {
System.out.println("dif = " + dif);
if (srcMatrix == null) {
srcMatrix = photoFilter.getMVPMatrix();
}
float[] mModelMatrix = Arrays.copyOf(srcMatrix, 16);
float v;
switch (i) {
//第一个做缩小的动画
case 0:
v = 1f - dif * 0.1f;
Matrix.scaleM(mModelMatrix, 0, v, v, 0f);
photoFilter.setAlpha(1 - dif * 0.5f);
break;
//第二个做平移的动画
case 1:
v = 2 - dif * 2f;
int offset = (int) (width * (v / 2));
System.out.println("translateM v = " + v);
Matrix.translateM(mModelMatrix, 0, v, 0f, 0f);
break;
}
photoFilter.setMVPMatrix(mModelMatrix);
} @SuppressLint("StaticFieldLeak")
public void startGenerate(View view) {
engine = new MovieEngine.MovieBuilder()
//结合原来静态的图片显示。组成幻灯片的效果
.maker(new StaticPhotoMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1529734446397.png"))
.maker(new AnimateGroupPhotoMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1529734446397.png", "/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1531208871527.png"))
.maker(new StaticPhotoMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1531208871527.png"))
.maker(new AnimateGroupPhotoMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1531208871527.png", "/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1529911150337.png"))
.maker(new StaticPhotoMaker("/storage/emulated/0/tencent/MicroMsg/WeiXin/mmexport1529911150337.png"))
.width(720)
.height(1280)
.listener(new MovieEngine.ProgressListener() {
private ProgressDialog progressDialog;
private long startTime;
@Override
public void onStart() {
startTime = System.currentTimeMillis();
Toast.makeText(GenerateMovieActivity.this, "onStart!!", Toast.LENGTH_SHORT).show();
progressDialog = new ProgressDialog(GenerateMovieActivity.this);
progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
progressDialog.show();
progressDialog.setMax(100);
}
@Override
public void onCompleted(String absolutePath) {
progressDialog.hide();
long endTime = System.currentTimeMillis();
Toast.makeText(GenerateMovieActivity.this, "file path=" + absolutePath + ",cost time = " + (endTime - startTime), Toast.LENGTH_SHORT).show();
}
@Override
public void onProgress(long current, long totalDuration) {
float progress = current * 1f / totalDuration * 1f;
progressDialog.setProgress((int) (progress * 100));
}
}).build();
engine.make();
}movie-ge-2.gif
文中Demo源码的github地址