请先参考一些概念的解读:https://www.cnblogs.com/madaha/p/9713170.html
#import "LFHardwareVideoEncoder.h"
#import
@interface LFHardwareVideoEncoder (){
VTCompressionSessionRef compressionSession; // 编码器
NSInteger frameCount; // 帧数(用于设置关键帧)
NSData *sps;
NSData *pps;
FILE *fp;
BOOL enabledWriteVideoFile;
}
@property (nonatomic, strong) LFLiveVideoConfiguration *configuration;
@property (nonatomic, weak) id h264Delegate;
@property (nonatomic) NSInteger currentVideoBitRate;
@property (nonatomic) BOOL isBackGround;
@end
@implementation LFHardwareVideoEncoder
#pragma mark -- LifeCycle
- (instancetype)initWithVideoStreamConfiguration:(LFLiveVideoConfiguration *)configuration {
if (self = [super init]) {
NSLog(@"USE LFHardwareVideoEncoder");
_configuration = configuration;
[self resetCompressionSession];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterBackground:) name:UIApplicationWillResignActiveNotification object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterForeground:) name:UIApplicationDidBecomeActiveNotification object:nil];
#ifdef DEBUG
enabledWriteVideoFile = NO;
[self initForFilePath];
#endif
}
return self;
}
- (void)resetCompressionSession {
if (compressionSession) {
/*
1.强制完成一些或全部未处理的视频帧数据
2.编码器失效(类似于让Timer失效)
3.释放内存(VTCompressionSession是一套C函数,需要开发者手动管理内存)
4.编码器置空
*/
VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(compressionSession);
CFRelease(compressionSession);
compressionSession = NULL;
}
/*
创建编码器
1.分配器,NULL标识使用默认的
2.视频帧的像素宽
3.视频帧的像素高
4.编码类型
5.如果使用指定的视频编码器就传值,NULL表示VideoToolBox自己创建一个
6.源缓存,NULL表示不使用VideoToolBox创建的,而使用费VTB分配的内存,这样可以拷贝图片数据
7.压缩数据分配器,NULL表示使用默认的
8.回调函数(编码后的视频帧数据)
9.回调方法所在的对象类实例,也会传递给回调函数(回调函数会被VTCompressionSessionEncodeFrame函数唤醒并且异步执行)
10.指向编码器的内存地址
*/
OSStatus status = VTCompressionSessionCreate(NULL,
_configuration.videoSize.width,
_configuration.videoSize.height,
kCMVideoCodecType_H264,
NULL,
NULL,
NULL,
VideoCompressonOutputCallback,
(__bridge void *)self,
&compressionSession);
if (status != noErr) {
return;
}
_currentVideoBitRate = _configuration.videoBitRate; // 值越大效果越好,帧数据越大
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_MaxKeyFrameInterval,
(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval));
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration,
(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval/_configuration.videoFrameRate));
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_ExpectedFrameRate,
(__bridge CFTypeRef)@(_configuration.videoFrameRate));
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_AverageBitRate,
(__bridge CFTypeRef)@(_configuration.videoBitRate));
NSArray *limit = @[@(_configuration.videoBitRate * 1.5/8), @(1)]; // 关键帧间隔,越低效果越好,帧数据越大
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_DataRateLimits,
(__bridge CFArrayRef)limit);
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_RealTime, // 实施编码输出,降低编码延迟
kCFBooleanTrue);
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_ProfileLevel,
kVTProfileLevel_H264_Main_AutoLevel);
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_AllowFrameReordering,
kCFBooleanTrue);
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_H264EntropyMode,
kVTH264EntropyMode_CABAC);
VTCompressionSessionPrepareToEncodeFrames(compressionSession);
}
- (void)setVideoBitRate:(NSInteger)videoBitRate {
if(_isBackGround) return;
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_AverageBitRate,
(__bridge CFTypeRef)@(videoBitRate));
NSArray *limit = @[@(videoBitRate * 1.5/8), @(1)];
VTSessionSetProperty(compressionSession,
kVTCompressionPropertyKey_DataRateLimits,
(__bridge CFArrayRef)limit);
_currentVideoBitRate = videoBitRate;
}
- (NSInteger)videoBitRate {
return _currentVideoBitRate;
}
- (void)dealloc {
if (compressionSession != NULL) {
VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(compressionSession);
CFRelease(compressionSession);
compressionSession = NULL;
}
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
#pragma mark -- LFVideoEncoder
- (void)encodeVideoData:(CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp {
if(_isBackGround) return;
frameCount++;
/*
CMTime CMTimeMake (
int64_t value, //表示 当前视频播放到的第几桢数
int32_t timescale //每秒的帧数
);
*/
CMTime presentationTimeStamp = CMTimeMake(frameCount, (int32_t)_configuration.videoFrameRate);
VTEncodeInfoFlags flags;
CMTime duration = CMTimeMake(1, (int32_t)_configuration.videoFrameRate);
// 设置当前帧为 关键帧.关键帧间隔在config文件中定义了,是帧率24(fps)*2,即 frameCount % (24*2) = 0就设置为关键帧
NSDictionary *properties = nil;
if (frameCount % (int32_t)_configuration.videoMaxKeyframeInterval == 0) {
properties = @{(__bridge NSString *)kVTEncodeFrameOptionKey_ForceKeyFrame: @YES};
}
NSNumber *timeNumber = @(timeStamp);
/* 开启帧编码
1.编码器
2.一个将要被压缩的视频帧,不能为空
3.展示当前帧的时间戳
4.播放24帧需要多长时间,默认kCMTimeInvalid
5.对当前编码帧添加附加属性(主要判断是否为关键帧)
6.当前帧关联的值(时间戳),会传递给回调函数
7.编码操作的信息
*/
OSStatus status = VTCompressionSessionEncodeFrame(compressionSession,
pixelBuffer,
presentationTimeStamp,
duration,
(__bridge CFDictionaryRef)properties,
(__bridge_retained void *)timeNumber,
&flags);
if(status != noErr){
[self resetCompressionSession];
}
}
- (void)stopEncoder {
VTCompressionSessionCompleteFrames(compressionSession, kCMTimeIndefinite);
}
- (void)setDelegate:(id)delegate {
_h264Delegate = delegate;
}
#pragma mark -- Notification
- (void)willEnterBackground:(NSNotification*)notification{
_isBackGround = YES;
}
- (void)willEnterForeground:(NSNotification*)notification{
[self resetCompressionSession];
_isBackGround = NO;
}
#pragma mark -- VideoCallBack
/*
1.可以引用到帧编码函数中的参数
2.原始帧数据的引用值(未编码之前)
3.编码是否成功
4.编码操作的信息
5.编码后的帧(如果编码成功并且没有丢帧,反之参数为NULL)
*/
static void VideoCompressonOutputCallback(void *VTref,
void *VTFrameRef,
OSStatus status,
VTEncodeInfoFlags infoFlags,
CMSampleBufferRef sampleBuffer){
// 编码失败 或 丢帧
if (!sampleBuffer) return;
/*
返回一个不可变数组(元素是dictionary)
1.访问的对象
2.如果样本为空是否创建一个空数组
*/
CFArrayRef array = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
if (!array) return;
// 获取数组中第一个dictionary数据
CFDictionaryRef dic = (CFDictionaryRef)CFArrayGetValueAtIndex(array, 0);
if (!dic) return;
// 是否是关键帧
BOOL keyframe = !CFDictionaryContainsKey(dic, kCMSampleAttachmentKey_NotSync);
// 获取帧编码函数中的时间戳参数
uint64_t timeStamp = [((__bridge_transfer NSNumber *)VTFrameRef) longLongValue];
// 获取回调函数所在类的实例对象
LFHardwareVideoEncoder *videoEncoder = (__bridge LFHardwareVideoEncoder *)VTref;
if (status != noErr) {
return;
}
// 关键帧 且 尚未设置sps
if (keyframe && !videoEncoder->sps) {
// 获取样本缓存中的一个样本的格式描述(获取帧描述信息)
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
// sps和pps一般保存在视频文件头中。sps在前pps在后
size_t sparameterSetSize, sparameterSetCount;
const uint8_t *sparameterSet;
/*
获取sps信息(序列参数集) H.264码流中第一个NALU单元,保存了一组编码视频序列的全局参数.
1. 格式信息
2. sps信息在format中的索引
3. 指向参数集的指针,如果不需要这些信息传NULL
4. 指向参数字节多少的指针,
5. 指向参数数量的指针,
6.
*/
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format,
0,
&sparameterSet,
&sparameterSetSize,
&sparameterSetCount,
0);
if (statusCode == noErr) {
size_t pparameterSetSize, pparameterSetCount;
const uint8_t *pparameterSet;
// 获取pps信息(图像参数集)H.264码流中第二个NALU单元,
OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format,
1,
&pparameterSet,
&pparameterSetSize,
&pparameterSetCount,
0);
// 写入本地
if (statusCode == noErr) {
// 设置sps、pps
videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
/*
处理数据时,sps、pps放在H264视频流的最前端
编码的视频写入本地时,需要添加4个字节的头信息并且卸载H264文件最前面
如果推流,sps、pps、头信息放入flv数据区即可。
*/
if (videoEncoder->enabledWriteVideoFile) {
NSMutableData *data = [[NSMutableData alloc] init];
uint8_t header[] = {0x00, 0x00, 0x00, 0x01};
[data appendBytes:header length:4];
[data appendData:videoEncoder->sps];
[data appendBytes:header length:4];
[data appendData:videoEncoder->pps];
fwrite(data.bytes, 1, data.length, videoEncoder->fp);
}
}
}
}
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char *dataPointer;
/*
获取IDR信息
1.媒体信息数据
2.IDR信息在媒体信息数据中的索引
3.IDR信息数据长度
4.媒体信息数据长度
5.媒体信息数据的字节数(前4个字节是数据长度)
*/
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer,
0,
&length,
&totalLength,
&dataPointer);
if (statusCodeRet == noErr) {
size_t bufferOffset = 0;
static const int AVCCHeaderLength = 4;
// 循环获取NALU数据,真正用来播放的视频帧数据
while (bufferOffset < totalLength - AVCCHeaderLength) {
// Read the NAL unit length
uint32_t NALUnitLength = 0;
memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
// 大小端转换
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
LFVideoFrame *videoFrame = [LFVideoFrame new];
videoFrame.timestamp = timeStamp;
videoFrame.data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
videoFrame.isKeyFrame = keyframe;
videoFrame.sps = videoEncoder->sps;
videoFrame.pps = videoEncoder->pps;
if (videoEncoder.h264Delegate && [videoEncoder.h264Delegate respondsToSelector:@selector(videoEncoder:videoFrame:)]) {
[videoEncoder.h264Delegate videoEncoder:videoEncoder videoFrame:videoFrame];
}
if (videoEncoder->enabledWriteVideoFile) {
NSMutableData *data = [[NSMutableData alloc] init];
if (keyframe) {
uint8_t header[] = {0x00, 0x00, 0x00, 0x01};
[data appendBytes:header length:4];
} else {
uint8_t header[] = {0x00, 0x00, 0x01};
[data appendBytes:header length:3];
}
[data appendData:videoFrame.data];
fwrite(data.bytes, 1, data.length, videoEncoder->fp);
}
bufferOffset += AVCCHeaderLength + NALUnitLength;
}
}
}
- (void)initForFilePath {
NSString *path = [self GetFilePathByfileName:@"IOSCamDemo.h264"];
NSLog(@"%@", path);
self->fp = fopen([path cStringUsingEncoding:NSUTF8StringEncoding], "wb");
}
- (NSString *)GetFilePathByfileName:(NSString*)filename {
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *writablePath = [documentsDirectory stringByAppendingPathComponent:filename];
return writablePath;
}