-
Notifications
You must be signed in to change notification settings - Fork 246
PlayRecord
wshichang edited this page Mar 30, 2017
·
6 revisions
从融合版v2.2.6开始,播放端开始支持播放录屏功能。由于录屏需要用到编码模块所以此功能只能在融合版环境中使用。支持输出的视频格式为MP4或者FLV。
录播主要是通过KSYMediaPlayer的setVideoRawDataListener和setOnAudioPCMAvailableListener两个接口获取解码后的视频数据和音频数据,然后将数据交给编码器进行编码、封装最后存储为我们所需的视频数据;
如PlayRecordActivity.java所示在创建好播放器后,创建playerRecord
对象
//创建playerRecord对象用于进行具体的录制工作
playerRecord = new KSYPlayerRecord(this);
//采用软编方式
playerRecord.setEncodeMethod(StreamerConstants.ENCODE_METHOD_SOFTWARE);
//设置帧率等参数
playerRecord.setTargetFps(15);
//......
//播放设置为软件,注意必须设置成软解,否则没有解码数据输出
mVideoView.setDecodeMode(KSYMediaPlayer.KSYDecodeMode.KSY_DECODE_MODE_SOFTWARE);
//设置视频数据输出格式为YV12
mVideoView.setOption(KSYMediaPlayer.OPT_CATEGORY_PLAYER, "overlay-format", KSYMediaPlayer.SDL_FCC_YV12);
//注册数据回调以便将解码后的音视频数据传给playerRecord进行录制
mVideoView.setVideoRawDataListener(playerRecord.getPlayerCapture());
mVideoView.setOnAudioPCMAvailableListener(playerRecord.getPlayerCapture());
//.......
//完成前面的初始化工作后启动播放
mVideoView.prepareAsync();
由于我们使用了setVideoRawDataListener
来获得解码后的视频数据,所以需要在onPrepared中设置缓存buffer,如下所示:
private IMediaPlayer.OnPreparedListener mOnPreparedListener = new IMediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(IMediaPlayer mp) {
mVideoWidth = mVideoView.getVideoWidth();
mVideoHeight = mVideoView.getVideoHeight();
//将宽高信息设置给playerRecord
playerRecord.setTargetResolution(mVideoWidth, mVideoHeight);
//创建图像缓存buffer并设置给player,由于输出的图像格式为YV12所以buffer的分配如下所示:
ByteBuffer rawBuffer[] = new ByteBuffer[5]; //5 buffers is just an example
for (int index = 0; index < rawBuffer.length; index++) {
int yStride = (mVideoWidth + 15) / 16 * 16;
int cStride = ((yStride / 2) + 15) / 16 * 16;
rawBuffer[index] = ByteBuffer.allocate(yStride * mVideoHeight + cStride * mVideoHeight);
mVideoView.addVideoRawBuffer(rawBuffer[index].array());
}
// Set Video Scaling Mode
mVideoView.setVideoScalingMode(KSYMediaPlayer.VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING);
//start player
mVideoView.start();
//其它播放相关逻辑,此处省略
//......
}
};
至此播放端的初始化工作基本完成,在视频开播后调用playerRecord的startRecord(String savepath)
函数设置存储文件路径名称启动录制。调用playerRecord的stopRecord
即可在制定位置找到录制的视频文件。
具体的录制逻辑主要在KSYPlayerRecord.java和PlayerCapture.java中,具体处理逻辑如下所示:
编码器初始化
mPlayerCapture = new PlayerCapture(mContext);
//创建音频filter
mAudioResampleFilter = new AudioResampleFilter();
//创建 encoder
mVideoEncoder = new AVCodecVideoEncoder();
mAudioEncoderMgt = new AudioEncoderMgt();
//音频数据输入于音频filter进行通道建联
mPlayerCapture.getAudioSrcPin().connect(mAudioResampleFilter.getSinkPin());
//音频filter于音频编码器进行通道建联
mAudioResampleFilter.getSrcPin().connect(mAudioEncoderMgt.getSinkPin());
//视频数据输入于视频编码器进行通道建联
mPlayerCapture.getVideoSrcPin().connect(mVideoEncoder.mSinkPin);
//创建FilePublisher负责封装及文件存储
mFilePublisher = new FilePublisher();
//decoder与FilePublisher进行通道建联
mAudioEncoderMgt.getSrcPin().connect(mFilePublisher.getAudioSink());
mVideoEncoder.mSrcPin.connect(mFilePublisher.getVideoSink());
// set listeners
Encoder.EncoderListener encoderListener = new Encoder.EncoderListener() {
@Override
public void onError(Encoder encoder, int err) {
if (err != 0) {
//stopRecord();
}
boolean isVideo = true;
if (encoder instanceof MediaCodecAudioEncoder ||
encoder instanceof AVCodecAudioEncoder) {
isVideo = false;
}
int what;
switch (err) {
case Encoder.ENCODER_ERROR_UNSUPPORTED:
what = isVideo ?
StreamerConstants.KSY_STREAMER_VIDEO_ENCODER_ERROR_UNSUPPORTED :
StreamerConstants.KSY_STREAMER_AUDIO_ENCODER_ERROR_UNSUPPORTED;
break;
case Encoder.ENCODER_ERROR_UNKNOWN:
default:
what = isVideo ?
StreamerConstants.KSY_STREAMER_VIDEO_ENCODER_ERROR_UNKNOWN :
StreamerConstants.KSY_STREAMER_AUDIO_ENCODER_ERROR_UNKNOWN;
break;
}
if (mOnErrorListener != null) {
mOnErrorListener.onError(what, 0, 0);
}
}
};
mVideoEncoder.setEncoderListener(encoderListener);
mAudioEncoderMgt.setEncoderListener(encoderListener);
mFilePublisher.setPubListener(new Publisher.PubListener() {
@Override
public void onInfo(int type, long msg) {
switch (type) {
case FilePublisher.INFO_OPENED:
//start audio encoder first
if (!mAudioEncoderMgt.getEncoder().isEncoding()) {
mAudioEncoderMgt.getEncoder().start();
}
if (mOnInfoListener != null) {
mOnInfoListener.onInfo(
StreamerConstants.KSY_STREAMER_OPEN_STREAM_SUCCESS, 0, 0);
}
break;
case FilePublisher.INFO_AUDIO_HEADER_GOT:
if (!mIsAudioOnly) {
// start video encoder after audio header got
if (!mVideoEncoder.isEncoding()) {
mVideoEncoder.start();
}
mVideoEncoder.forceKeyFrame();
}
break;
default:
break;
}
}
@Override
public void onError(int err, long msg) {
Log.e(TAG, "FilePublisher err=" + err);
if (err != 0) {
stopRecord();
}
if (mOnErrorListener != null) {
int status;
switch (err) {
case FilePublisher.FILE_PUBLISHER_ERROR_OPEN_FAILED:
status = StreamerConstants.KSY_STREAMER_FILE_PUBLISHER_OPEN_FAILED;
break;
case FilePublisher.FILE_PUBLISHER_ERROR_WRITE_FAILED:
status = StreamerConstants.KSY_STREAMER_FILE_PUBLISHER_WRITE_FAILED;
break;
case FilePublisher.FILE_PUBLISHER_ERROR_CLOSE_FAILED:
status = StreamerConstants.KSY_STREAMER_FILE_PUBLISHER_CLOSE_FAILED;
break;
default:
status = StreamerConstants.KSY_STREAMER_FILE_PUBLISHER_ERROR_UNKNOWN;
break;
}
mOnErrorListener.onError(status, (int) msg, 0);
}
//do not need to restart
}
});
编码参数设置“详见”
private void setAudioParams() {
//设置audio filter冲采样参数
mAudioResampleFilter.setOutFormat(new AudioBufFormat(AVConst.AV_SAMPLE_FMT_S16,
mAudioSampleRate, mAudioChannels));
}
private void setRecordingParams() {
//设置视频编码参数
VideoEncodeFormat videoEncodeFormat = new VideoEncodeFormat(mVideoCodecId,
mTargetWidth, mTargetHeight, mInitVideoBitrate);
videoEncodeFormat.setFramerate(mTargetFps);
videoEncodeFormat.setIframeinterval(mIFrameInterval);
videoEncodeFormat.setScene(mEncodeScene);
videoEncodeFormat.setProfile(mEncodeProfile);
//demo中使用的是YV12所以此处需要设置
videoEncodeFormat.setPixFmt(ImgBufFormat.FMT_YV12);
mVideoEncoder.configure(videoEncodeFormat);
//设置音频编码参数
AudioEncodeFormat audioEncodeFormat = new AudioEncodeFormat(AudioEncodeFormat.MIME_AAC,
AVConst.AV_SAMPLE_FMT_S16, mAudioSampleRate, mAudioChannels, mAudioBitrate);
mAudioEncoderMgt.setEncodeFormat(audioEncodeFormat);
}
编码初始化完成后,还需要处理重要的一步就是对播放器输出的视频数据和音频数据进行编码处理。具体如下所示:
@Override
public void onVideoRawDataAvailable(IMediaPlayer iMediaPlayer, byte[] bytes, int size, int width, int height, int format, long pts) {
if (iMediaPlayer == null)
return ;
if (mStarted) {
if (mVideoFormat == null) {
mVideoFormat = new ImgBufFormat(ImgBufFormat.FMT_YV12, width, height, 0);
mVideoSrcPin.onFormatChanged(mVideoFormat);
}
if (mVideoOutBuffer == null || mVideoOutBuffer.capacity() < size) {
mVideoOutBuffer = ByteBuffer.allocateDirect(size);
mVideoOutBuffer.order(ByteOrder.nativeOrder());
}
mVideoOutBuffer.clear();
mVideoOutBuffer.put(bytes, 0, size);
mVideoOutBuffer.flip();
ImgBufFrame frame = new ImgBufFrame(mVideoFormat, mVideoOutBuffer, pts);
if (mVideoSrcPin.isConnected()) {
mVideoSrcPin.onFrameAvailable(frame);
}
}
KSYMediaPlayer ksyMediaPlayer = (KSYMediaPlayer)iMediaPlayer;
ksyMediaPlayer.addVideoRawBuffer(bytes);
}
@Override
public void onAudioPCMAvailable(IMediaPlayer iMediaPlayer, ByteBuffer byteBuffer, long timestamp, int channels, int samplerate, int samplefmt) {
if (iMediaPlayer == null)
return ;
if (!mStarted)
return;
if (mAudioFormat == null) {
mAudioFormat = new AudioBufFormat(samplefmt, samplerate, channels);
mAudioSrcPin.onFormatChanged(mAudioFormat);
}
if (byteBuffer == null) {
return;
}
ByteBuffer pcmBuffer = byteBuffer;
int msBufferSize = 1 * samplerate * channels * 2 / 1000;//1 ms
int len = byteBuffer.limit();
audioBufferSize += len;
long bufferTime = audioBufferSize / msBufferSize;
if (!gotFirstAudioBuffer) {
firstAudioBufferTime = System.nanoTime() / 1000 / 1000;
gotFirstAudioBuffer = true;
}
if (mAudioBuffer == null || mAudioBuffer.capacity() < len) {
mAudioBuffer = ByteBuffer.allocateDirect( len * 20 );
mAudioBuffer.order(ByteOrder.nativeOrder());
mAudioBuffer.clear();
}
mAudioBuffer.put(byteBuffer);
//音频数据攒够一定量再送给编码器
if (frameNum >= 7) {
mAudioBuffer.flip();
pcmBuffer = mAudioBuffer;
AudioBufFrame frame = new AudioBufFrame(mAudioFormat, pcmBuffer, timestamp);
if (mAudioSrcPin.isConnected()) {
mAudioSrcPin.onFrameAvailable(frame);
}
frameNum = 0;
mAudioBuffer.clear();
} else {
frameNum++;
}
}