01 前言
大家好,本文是 iOS/Android 音视频开发专题 的第八篇,该专题中 AVPlayer 项目代码将在 Github 进行托管,你可在微信公众号(GeekDev)后台回复 资料 获取项目地址。
在上篇文章 使用AudioTrack播放音频轨道 中我们使用 AudioTrack 播放了视频音轨数据。本篇文章中我们将为 AVPlayer 添加音效,并实现音视频同步。
本期内容:
02 封装解码器代码
首先,我们对 DemoMediaPlayerActivity 进行改造,将解码器相关代码进行封装,以便音频解码可以完美复用。
AVAssetTrackDecoder :
public class AVAssetTrackDecoder {
/** 解码的轨道类型 */
private String mDecodeMimeType;
private Context mContext;
private Uri mUri;
private AVAssetTrackDecoderDelegate mDelegate;
private boolean mRuning;
public interface AVAssetTrackDecoderDelegate {
void newFrameReady(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo);
void outputFormatChaned(MediaFormat mediaFormat);
}
public AVAssetTrackDecoder(Context context,Uri uri,String mimeType) {
this.mContext = context;
this.mUri = uri;
this.mDecodeMimeType = mimeType;
}
/**
* 设置委托
* @param delegate
*/
public void setDelegate(AVAssetTrackDecoderDelegate delegate) {
this.mDelegate = delegate;
}
/**
* 喂入数据到解码器
*
* @return true 喂入成功
* @since v3.0.1
*/
private boolean feedInputBuffer(MediaExtractor source, MediaCodec codec) {
if (source == null || codec == null) return false;
int inIndex = codec.dequeueInputBuffer(0);
if (inIndex < 0) return false;
ByteBuffer codecInputBuffer = codec.getInputBuffers()[inIndex];
codecInputBuffer.position(0);
int sampleDataSize = source.readSampleData(codecInputBuffer,0);
if (sampleDataSize <=0 ) {
// 通知解码器结束
if (inIndex >= 0)
codec.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
return false;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
bufferInfo.offset = 0;
bufferInfo.presentationTimeUs = source.getSampleTime();
bufferInfo.size = sampleDataSize;
bufferInfo.flags = source.getSampleFlags();
switch (inIndex)
{
case INFO_TRY_AGAIN_LATER: return true;
default:
{
codec.queueInputBuffer(inIndex,
bufferInfo.offset,
bufferInfo.size,
bufferInfo.presentationTimeUs,
bufferInfo.flags
);
source.advance();
return true;
}
}
}
/**
* 吐出解码后的数据
*
* @return true 有可用数据吐出
* @since v3.0.1
*/
private boolean drainOutputBuffer(MediaCodec mediaCodec) {
if (mediaCodec == null) return false;
final
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int outIndex = mediaCodec.dequeueOutputBuffer(info, 0);
if ((info.flags & BUFFER_FLAG_END_OF_STREAM) != 0) {
mediaCodec.releaseOutputBuffer(outIndex, false);
return false;
}
switch (outIndex)
{
case INFO_OUTPUT_BUFFERS_CHANGED: return true;
case INFO_TRY_AGAIN_LATER: return true;
case INFO_OUTPUT_FORMAT_CHANGED: {
MediaFormat outputFormat = mediaCodec.getOutputFormat();
if(mDelegate != null)
mDelegate.outputFormatChaned(outputFormat);
return true;
}
default:
{
if (outIndex >= 0 && info.size > 0)
{
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
bufferInfo.presentationTimeUs = info.presentationTimeUs;
bufferInfo.size = info.size;
bufferInfo.flags = info.flags;
bufferInfo.offset = info.offset;
ByteBuffer outputBuffer = mediaCodec.getOutputBuffers()[outIndex];
outputBuffer.position(bufferInfo.offset);
outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if (mDelegate != null && mDecodeMimeType.equalsIgnoreCase("audio/")) {
mDelegate.newFrameReady(outputBuffer,bufferInfo);
mediaCodec.releaseOutputBuffer(outIndex,true);
}else
{
mediaCodec.releaseOutputBuffer(outIndex,true);
mDelegate.newFrameReady(outputBuffer,bufferInfo);
}
}
return true;
}
}
}
/**
* 启动解码器
*/
public void doDecoder(Surface surface){
// step 1:创建一个媒体分离器
MediaExtractor extractor = new MediaExtractor();
// step 2:为媒体分离器装载媒体文件路径
// 指定文件路径
try {
extractor.setDataSource(mContext, mUri, null);
} catch (IOException e) {
e.printStackTrace();
}
// step 3:获取并选中指定类型的轨道
// 媒体文件中的轨道数量 (一般有视频,音频,字幕等)
int trackCount = extractor.getTrackCount();
// mime type 指示需要分离的轨道类型
String extractMimeType = mDecodeMimeType;
MediaFormat trackFormat = null;
// 记录轨道索引id,MediaExtractor 读取数据之前需要指定分离的轨道索引
int trackID = -1;
for (int i = 0; i < trackCount; i++) {
trackFormat = extractor.getTrackFormat(i);
if (trackFormat.getString(MediaFormat.KEY_MIME).startsWith(extractMimeType)) {
trackID = i;
break;
}
}
// 媒体文件中存在视频轨道
// step 4:选中指定类型的轨道
if (trackID != -1)
extractor.selectTrack(trackID);
// step 5:根据 MediaFormat 创建解码器
MediaCodec mediaCodec = null;
try {
mediaCodec = MediaCodec.createDecoderByType(trackFormat.getString(MediaFormat.KEY_MIME));
mediaCodec.configure(trackFormat,surface,null,0);
mediaCodec.start();
} catch (IOException e) {
e.printStackTrace();
}
mRuning = true;
while (mRuning) {
// step 6: 向解码器喂入数据
boolean ret = feedInputBuffer(extractor,mediaCodec);
// step 7: 从解码器吐出数据
boolean decRet = drainOutputBuffer(mediaCodec);
if (!ret && !decRet)break;;
}
// step 8: 释放资源
// 释放分离器,释放后 extractor 将不可用
extractor.release();
// 释放解码器
mediaCodec.release();
}
public void stop(){
mRuning = false;
}
}
AVAssetTrackDecoder 只是将之前的代码进行了封装,并没有任何新的内容。
03 实现音视频同步
音视频同步通常有三种方式:一种是参考视频,第二种是参考音频,第三种时互相参考。我们示例 demo 使用的为第一种和第二种,音视频自身完成同步。
说简单点音视频同步就是根据帧的显示时间,对解码线程进行锁定,已达到视频同步效果。下面是完整同步器代码:
public class AVMediaSyncClock {
private static final long TIME_UNSET = Long.MIN_VALUE + 1;
private static final long TIME_END_OF_SOURCE = Long.MIN_VALUE;
/** 帧基准时间 */
private long mBasePositionUs;
/** 指示当前播放速度 */
private float mSpeed = 1;
/** 运行基准时间 */
private long mBaseElapsedMs;
/** 当前时钟是否已开始计时 */
private boolean mStarted;
/** 启动时钟 */
public void start() {
if (mStarted) return;
this.reset();
mStarted = true;
}
/** 停止时钟 */
public void stop() {
mBasePositionUs = 0;
mStarted = false;
mBaseElapsedMs = 0;
}
private void reset() {
mBasePositionUs = 0;
mBaseElapsedMs = SystemClock.elapsedRealtime();
}
/**
* 锁定
*
* @param positionUs 必须保证真实显示时间 (连续递增)
*/
public void lock(long positionUs,long diff) {
if (!mStarted) {
return;
}
if (mBasePositionUs == 0)
mBasePositionUs = positionUs;
long speedPositionUs = (long)((positionUs - mBasePositionUs) * (1.f/mSpeed));
long duraitonMs = usToMs(speedPositionUs) + diff;
long endTimeMs = mBaseElapsedMs + duraitonMs;
long sleepTimeMs = endTimeMs - SystemClock.elapsedRealtime();
if (sleepTimeMs > 0) {
try {
// 睡眠 锁定线程
TimeUnit.MILLISECONDS.sleep(sleepTimeMs);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
/**
* 设置播放速度
*
* @param speed
*/
public void setSpeed(float speed) {
/** 设置速率时必须重置相关基数 */
reset();
mSpeed = speed;
}
/**
* 获取当前播放速度
*
* @return
*/
public float getSpeed() {
return mSpeed;
}
public static long usToMs(long timeUs) {
return (timeUs == TIME_UNSET || timeUs == TIME_END_OF_SOURCE) ? timeUs : (timeUs / 1000);
}
public static long msToUs(long timeMs) {
// 防止越界
return (timeMs == TIME_UNSET || timeMs == TIME_END_OF_SOURCE) ? timeMs : (timeMs * 1000);
}
}
代码其实很简单,我就不在详细赘述了。
现在我们整合 AVAssetTrackDecoder 及 AVMediaSyncClock 实现完整播放器功能。
/**
* 启动解码器
*/
private void doDecoder() {
final Uri videoPathUri = Uri.parse("android.resource://" + getPackageName() + "/" + R.raw.demo_video);
mMediaSyncClock = new AVMediaSyncClock();
mMediaSyncClock.start();
new Thread(new Runnable() {
@Override
public void run() {
mVideoDecoder = new AVAssetTrackDecoder(DemoAVPlayer01Activity.this, videoPathUri, "video/");
mVideoDecoder.setDelegate(mVideoDecoderDelegate);
mVideoDecoder.doDecoder(mSurfaceTexture.getSurface());
}
}).start();
new Thread(new Runnable() {
@Override
public void run() {
mAudioDecoder = new AVAssetTrackDecoder(DemoAVPlayer01Activity.this, videoPathUri, "audio/");
mAudioDecoder.setDelegate(mAudioDecoderDelegate);
mAudioDecoder.doDecoder(null);
}
}).start();
}
/** 视频解码器回调 */
private AVAssetTrackDecoder.AVAssetTrackDecoderDelegate mVideoDecoderDelegate = new AVAssetTrackDecoder.AVAssetTrackDecoderDelegate() {
@Override
public void newFrameReady(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
// 锁定时钟
mMediaSyncClock.lock(bufferInfo.presentationTimeUs,0);
}
@Override
public void outputFormatChaned(MediaFormat mediaFormat) {
}
};
/** 音频解码器回调 */
private AVAssetTrackDecoder.AVAssetTrackDecoderDelegate mAudioDecoderDelegate = new AVAssetTrackDecoder.AVAssetTrackDecoderDelegate() {
@RequiresApi(api = Build.VERSION_CODES.M)
@Override
public void newFrameReady(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
// 锁定时钟
mMediaSyncClock.lock(bufferInfo.presentationTimeUs,0);
mAudioTrack.write(byteBuffer,bufferInfo.size,WRITE_BLOCKING,bufferInfo.presentationTimeUs);
}
@Override
public void outputFormatChaned(MediaFormat outputFormat) {
int sampleRate = 44100;
if (outputFormat.containsKey(MediaFormat.KEY_SAMPLE_RATE))
sampleRate = outputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int channelConfig = AudioFormat.CHANNEL_OUT_MONO;
if (outputFormat.containsKey(MediaFormat.KEY_CHANNEL_COUNT))
channelConfig = outputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT) == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
if (outputFormat.containsKey("bit-width"))
audioFormat = outputFormat.getInteger("bit-width") == 8 ? AudioFormat.ENCODING_PCM_8BIT : AudioFormat.ENCODING_PCM_16BIT;
mBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) * 2;
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRate,channelConfig,audioFormat,mBufferSize,AudioTrack.MODE_STREAM);
mAudioTrack.play();
}
};
关键音视频代码在 newFrameRead 据帧显示时间计算线程暂停时间。
具体代码见:DemoAVPlayer01Activity
04 结束语
在公众号后台经常催我实现音视频同步 ,为了大家能尽早看到这部分内容恕我偷懒了。
来源: GeekDev 公众号