From fd7802b9532867c0ea7ad2c697786acb457ca05d Mon Sep 17 00:00:00 2001 From: february29 Date: Fri, 29 Sep 2017 12:22:47 +0800 Subject: [PATCH 1/8] =?UTF-8?q?=E9=9F=B3=E9=A2=91=E7=BC=96=E7=A0=81bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- LFLiveKit/coder/LFHardwareAudioEncoder.m | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/LFLiveKit/coder/LFHardwareAudioEncoder.m b/LFLiveKit/coder/LFHardwareAudioEncoder.m index 64f97225..207db3b0 100755 --- a/LFLiveKit/coder/LFHardwareAudioEncoder.m +++ b/LFLiveKit/coder/LFHardwareAudioEncoder.m @@ -60,30 +60,32 @@ - (void)encodeAudioData:(nullable NSData*)audioData timeStamp:(uint64_t)timeStam return; } + if(leftLength + audioData.length >= self.configuration.bufferLength){ - ///<  发送 + ///<  发送 达到发送大小 NSInteger totalSize = leftLength + audioData.length; - NSInteger encodeCount = totalSize/self.configuration.bufferLength; + NSInteger encodeCount = totalSize/self.configuration.bufferLength;//audioData.length比较大,totalSize可能会是bufferLengthleft多倍 char *totalBuf = malloc(totalSize); char *p = totalBuf; - memset(totalBuf, (int)totalSize, 0); - memcpy(totalBuf, leftBuf, leftLength); - memcpy(totalBuf + leftLength, audioData.bytes, audioData.length); + memset(totalBuf, 0, (int)totalSize);//初始化totalBuf + memcpy(totalBuf, leftBuf, leftLength);//copy上次剩余 + memcpy(totalBuf + leftLength, audioData.bytes, audioData.length);////copy此次数据 for(NSInteger index = 0;index < encodeCount;index++){ + //从p的位置开始发送,发送bufferLength大小 [self encodeBuffer:p timeStamp:timeStamp]; p += self.configuration.bufferLength; } - leftLength = totalSize%self.configuration.bufferLength; - memset(leftBuf, 0, self.configuration.bufferLength); - memcpy(leftBuf, totalBuf + (totalSize -leftLength), leftLength); + leftLength = totalSize%self.configuration.bufferLength;//改变发送剩余数据大小leftLength + memset(leftBuf, 0, self.configuration.bufferLength);//将leftBuf起始位置到bufferLength置0 + memcpy(leftBuf, totalBuf + (totalSize -leftLength), leftLength);//将totalBuf剩余的放倒leftBuf当中,下次发送 free(totalBuf); }else{ - ///< 积累 + ///< 积累 未达到发送大小 memcpy(leftBuf+leftLength, audioData.bytes, audioData.length); leftLength = leftLength + audioData.length; } From 46801e24b9650dcfe80f28adcaf0bf6e75134f7e Mon Sep 17 00:00:00 2001 From: february29 Date: Mon, 9 Oct 2017 13:42:06 +0800 Subject: [PATCH 2/8] =?UTF-8?q?=E9=9F=B3=E9=A2=91=E7=BC=96=E7=A0=81?= =?UTF-8?q?=E6=B3=A8=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- LFLiveKit/coder/LFHardwareAudioEncoder.m | 47 +++++++++++++----------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/LFLiveKit/coder/LFHardwareAudioEncoder.m b/LFLiveKit/coder/LFHardwareAudioEncoder.m index 207db3b0..34a746b8 100755 --- a/LFLiveKit/coder/LFHardwareAudioEncoder.m +++ b/LFLiveKit/coder/LFHardwareAudioEncoder.m @@ -98,35 +98,36 @@ - (void)encodeBuffer:(char*)buf timeStamp:(uint64_t)timeStamp{ inBuffer.mData = buf; inBuffer.mDataByteSize = (UInt32)self.configuration.bufferLength; + // 初始化一个输入缓冲列表 AudioBufferList buffers; - buffers.mNumberBuffers = 1; + buffers.mNumberBuffers = 1;//只有一个inBuffer buffers.mBuffers[0] = inBuffer; - // 初始化一个输出缓冲列表 + // 初始化一个输出缓冲列表 AudioBufferList outBufferList; - outBufferList.mNumberBuffers = 1; + outBufferList.mNumberBuffers = 1;//只有一个outBuffer outBufferList.mBuffers[0].mNumberChannels = inBuffer.mNumberChannels; outBufferList.mBuffers[0].mDataByteSize = inBuffer.mDataByteSize; // 设置缓冲区大小 - outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区 + outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区 编码后数据存放的位置 UInt32 outputDataPacketSize = 1; if (AudioConverterFillComplexBuffer(m_converter, inputDataProc, &buffers, &outputDataPacketSize, &outBufferList, NULL) != noErr) { return; } - + //封装为LFAudioFrame方便以后推流使用 LFAudioFrame *audioFrame = [LFAudioFrame new]; audioFrame.timestamp = timeStamp; audioFrame.data = [NSData dataWithBytes:aacBuf length:outBufferList.mBuffers[0].mDataByteSize]; - char exeData[2]; + char exeData[2];//flv编码音频头 44100 为0x12 0x10 exeData[0] = _configuration.asc[0]; exeData[1] = _configuration.asc[1]; audioFrame.audioInfo = [NSData dataWithBytes:exeData length:2]; if (self.aacDeleage && [self.aacDeleage respondsToSelector:@selector(audioEncoder:audioFrame:)]) { - [self.aacDeleage audioEncoder:self audioFrame:audioFrame]; + [self.aacDeleage audioEncoder:self audioFrame:audioFrame];//调用编码完成后代理 } - if (self->enabledWriteVideoFile) { + if (self->enabledWriteVideoFile) {//写入本地文件中,debug时调用 NSData *adts = [self adtsData:_configuration.numberOfChannels rawDataLength:audioFrame.data.length]; fwrite(adts.bytes, 1, adts.length, self->fp); fwrite(audioFrame.data.bytes, 1, audioFrame.data.length, self->fp); @@ -143,45 +144,47 @@ - (BOOL)createAudioConvert { //根据输入样本初始化一个编码转换器 if (m_converter != nil) { return TRUE; } - + // 音频输入描述 AudioStreamBasicDescription inputFormat = {0}; - inputFormat.mSampleRate = _configuration.audioSampleRate; - inputFormat.mFormatID = kAudioFormatLinearPCM; - inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; - inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels; - inputFormat.mFramesPerPacket = 1; - inputFormat.mBitsPerChannel = 16; - inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame; - inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket; + inputFormat.mSampleRate = _configuration.audioSampleRate;// 采样率 + inputFormat.mFormatID = kAudioFormatLinearPCM;// 数据格式 + inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;// 格式标识 + inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;// 声道数 + inputFormat.mFramesPerPacket = 1;//packet中包含的frame数目,无压缩时为1,可变比特率时,一个大点儿的固定值例如在ACC中1024。 + inputFormat.mBitsPerChannel = 16;// 每个声道比特数,语音每采样点占用位数 + inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame;// 每帧多少字节 + inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket;// 一个packet中的字节数目,如果时可变的packet则为0 + // 音频输出描述 AudioStreamBasicDescription outputFormat; // 这里开始是输出音频格式 - memset(&outputFormat, 0, sizeof(outputFormat)); + memset(&outputFormat, 0, sizeof(outputFormat));// 初始化 outputFormat.mSampleRate = inputFormat.mSampleRate; // 采样率保持一致 outputFormat.mFormatID = kAudioFormatMPEG4AAC; // AAC编码 kAudioFormatMPEG4AAC kAudioFormatMPEG4AAC_HE_V2 outputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;; outputFormat.mFramesPerPacket = 1024; // AAC一帧是1024个字节 const OSType subtype = kAudioFormatMPEG4AAC; + //两种编码方式 软编码 硬编码 AudioClassDescription requestedCodecs[2] = { { kAudioEncoderComponentType, subtype, - kAppleSoftwareAudioCodecManufacturer + kAppleSoftwareAudioCodecManufacturer// 软编码 }, { kAudioEncoderComponentType, subtype, - kAppleHardwareAudioCodecManufacturer + kAppleHardwareAudioCodecManufacturer// 硬编码 } }; - OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);; + OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);//创建AudioConverter :输入描述,输出描述,requestedCodecs的数量,支持的编码方式,AudioConverter UInt32 outputBitrate = _configuration.audioBitrate; UInt32 propSize = sizeof(outputBitrate); if(result == noErr) { - result = AudioConverterSetProperty(m_converter, kAudioConverterEncodeBitRate, propSize, &outputBitrate); + result = AudioConverterSetProperty(m_converter, kAudioConverterEncodeBitRate, propSize, &outputBitrate);//设置码率 } return YES; From 1719505498c99f129a385cb138a57c617147271e Mon Sep 17 00:00:00 2001 From: february29 Date: Mon, 9 Oct 2017 16:26:08 +0800 Subject: [PATCH 3/8] Update LFHardwareVideoEncoder.m --- LFLiveKit/coder/LFHardwareVideoEncoder.m | 35 +++++++++++++++++++----- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/LFLiveKit/coder/LFHardwareVideoEncoder.m b/LFLiveKit/coder/LFHardwareVideoEncoder.m index 6c3d20fe..9bd85df8 100755 --- a/LFLiveKit/coder/LFHardwareVideoEncoder.m +++ b/LFLiveKit/coder/LFHardwareVideoEncoder.m @@ -43,7 +43,7 @@ - (instancetype)initWithVideoStreamConfiguration:(LFLiveVideoConfiguration *)con return self; } -- (void)resetCompressionSession { +- (void)resetCompressionSession {//重置VTCompressionSessionRef if (compressionSession) { VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid); @@ -52,22 +52,34 @@ - (void)resetCompressionSession { compressionSession = NULL; } + //创建VTCompressionSessionRef用于编码h.264 VideoCompressonOutputCallback为编码完成后回掉 OSStatus status = VTCompressionSessionCreate(NULL, _configuration.videoSize.width, _configuration.videoSize.height, kCMVideoCodecType_H264, NULL, NULL, NULL, VideoCompressonOutputCallback, (__bridge void *)self, &compressionSession); if (status != noErr) { return; } + //设置VTCompressionSessionRef参数 _currentVideoBitRate = _configuration.videoBitRate; + // 设置最大关键帧间隔,即gop size VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval)); + // VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration, (__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval/_configuration.videoFrameRate)); + // 设置帧率,只用于初始化session,不是实际FPS VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)@(_configuration.videoFrameRate)); + // 设置编码码率(比特率),如果不设置,默认将会以很低的码率编码,导致编码出来的视频很模糊 VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(_configuration.videoBitRate)); - NSArray *limit = @[@(_configuration.videoBitRate * 1.5/8), @(1)]; + // 设置数据速率限制 + NSArray *limit = @[@(_configuration.videoBitRate * 1.5/8), @(1)];// CFArray[CFNumber], [bytes, seconds, bytes, seconds...] VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit); + // 设置实时编码输出,降低编码延迟 VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue); + // h264 profile, 直播一般使用baseline,可减少由于b帧带来的延时 VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Main_AutoLevel); + // 设置允许帧重新排序 VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanTrue); + // 设置编码类型h.264 VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_H264EntropyMode, kVTH264EntropyMode_CABAC); + // 准备编码 VTCompressionSessionPrepareToEncodeFrames(compressionSession); } @@ -99,6 +111,7 @@ - (void)dealloc { - (void)encodeVideoData:(CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp { if(_isBackGround) return; frameCount++; + // fps CMTime presentationTimeStamp = CMTimeMake(frameCount, (int32_t)_configuration.videoFrameRate); VTEncodeInfoFlags flags; CMTime duration = CMTimeMake(1, (int32_t)_configuration.videoFrameRate); @@ -109,6 +122,7 @@ - (void)encodeVideoData:(CVPixelBufferRef)pixelBuffer timeStamp:(uint64_t)timeSt } NSNumber *timeNumber = @(timeStamp); + //开始编码 OSStatus status = VTCompressionSessionEncodeFrame(compressionSession, pixelBuffer, presentationTimeStamp, duration, (__bridge CFDictionaryRef)properties, (__bridge_retained void *)timeNumber, &flags); if(status != noErr){ [self resetCompressionSession]; @@ -141,6 +155,7 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu CFDictionaryRef dic = (CFDictionaryRef)CFArrayGetValueAtIndex(array, 0); if (!dic) return; + // 判断当前帧是否为关键帧 BOOL keyframe = !CFDictionaryContainsKey(dic, kCMSampleAttachmentKey_NotSync); uint64_t timeStamp = [((__bridge_transfer NSNumber *)VTFrameRef) longLongValue]; @@ -149,21 +164,23 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu return; } - if (keyframe && !videoEncoder->sps) { + if (keyframe && !videoEncoder->sps) {//是关键帧,并且尚未设置sps(序列参数集) CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); size_t sparameterSetSize, sparameterSetCount; + //获取sps const uint8_t *sparameterSet; OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0); if (statusCode == noErr) { size_t pparameterSetSize, pparameterSetCount; + //获取pps const uint8_t *pparameterSet; OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0); if (statusCode == noErr) { - videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize]; - videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize]; + videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];//设置sps + videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];//这只pps - if (videoEncoder->enabledWriteVideoFile) { + if (videoEncoder->enabledWriteVideoFile) {//debug NSMutableData *data = [[NSMutableData alloc] init]; uint8_t header[] = {0x00, 0x00, 0x00, 0x01}; [data appendBytes:header length:4]; @@ -178,9 +195,11 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu } + //获取视频数据 CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); size_t length, totalLength; char *dataPointer; + //获取视频数据指针 数据大小 总数据大小 OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer); if (statusCodeRet == noErr) { size_t bufferOffset = 0; @@ -192,6 +211,7 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); + //封装视频数据LFVideoFrame,方便以后推流 LFVideoFrame *videoFrame = [LFVideoFrame new]; videoFrame.timestamp = timeStamp; videoFrame.data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength]; @@ -199,11 +219,12 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu videoFrame.sps = videoEncoder->sps; videoFrame.pps = videoEncoder->pps; + //调用视频编码完成后的代理 if (videoEncoder.h264Delegate && [videoEncoder.h264Delegate respondsToSelector:@selector(videoEncoder:videoFrame:)]) { [videoEncoder.h264Delegate videoEncoder:videoEncoder videoFrame:videoFrame]; } - if (videoEncoder->enabledWriteVideoFile) { + if (videoEncoder->enabledWriteVideoFile) {//debug NSMutableData *data = [[NSMutableData alloc] init]; if (keyframe) { uint8_t header[] = {0x00, 0x00, 0x00, 0x01}; From cb028c68ed0997dc882f60236252b5105ae1c271 Mon Sep 17 00:00:00 2001 From: february29 Date: Mon, 9 Oct 2017 16:43:26 +0800 Subject: [PATCH 4/8] =?UTF-8?q?=E8=A7=86=E9=A2=91=E7=BC=96=E7=A0=81?= =?UTF-8?q?=E6=B3=A8=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- LFLiveKit/coder/LFHardwareVideoEncoder.m | 1 + 1 file changed, 1 insertion(+) diff --git a/LFLiveKit/coder/LFHardwareVideoEncoder.m b/LFLiveKit/coder/LFHardwareVideoEncoder.m index 9bd85df8..6e4b4be3 100755 --- a/LFLiveKit/coder/LFHardwareVideoEncoder.m +++ b/LFLiveKit/coder/LFHardwareVideoEncoder.m @@ -204,6 +204,7 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu if (statusCodeRet == noErr) { size_t bufferOffset = 0; static const int AVCCHeaderLength = 4; + // 循环获取nalu数据 while (bufferOffset < totalLength - AVCCHeaderLength) { // Read the NAL unit length uint32_t NALUnitLength = 0; From 6241f40e597578fa93fb36314365ed3f9371cc59 Mon Sep 17 00:00:00 2001 From: february29 Date: Mon, 9 Oct 2017 17:02:17 +0800 Subject: [PATCH 5/8] =?UTF-8?q?=E8=A7=86=E9=A2=91=E7=BC=96=E7=A0=81?= =?UTF-8?q?=E6=B3=A8=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- LFLiveKit/coder/LFHardwareVideoEncoder.m | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/LFLiveKit/coder/LFHardwareVideoEncoder.m b/LFLiveKit/coder/LFHardwareVideoEncoder.m index 6e4b4be3..403e391e 100755 --- a/LFLiveKit/coder/LFHardwareVideoEncoder.m +++ b/LFLiveKit/coder/LFHardwareVideoEncoder.m @@ -179,6 +179,9 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu if (statusCode == noErr) { videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];//设置sps videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];//这只pps + //数据处理时,sps pps 数据可以作为一个普通h264帧,放在h264视频流的最前面。 + //如果保存到文件中,需要将此数据前加上 [0 0 0 1] 4个字节,写入到h264文件的最前面。 + //如果推流,将此数据放入flv数据区即可。 if (videoEncoder->enabledWriteVideoFile) {//debug NSMutableData *data = [[NSMutableData alloc] init]; @@ -209,7 +212,9 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu // Read the NAL unit length uint32_t NALUnitLength = 0; memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength); + + //关于大端和小端模式,请参考此网址:http://blog.csdn.net/sunjie886/article/details/54944810 NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); //封装视频数据LFVideoFrame,方便以后推流 From 7228318ed06cc4dbf7efb92884dfe601b74773b3 Mon Sep 17 00:00:00 2001 From: february29 Date: Mon, 9 Oct 2017 17:11:59 +0800 Subject: [PATCH 6/8] =?UTF-8?q?Revert=20"=E9=9F=B3=E9=A2=91=E7=BC=96?= =?UTF-8?q?=E7=A0=81=E6=B3=A8=E9=87=8A"?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 46801e24b9650dcfe80f28adcaf0bf6e75134f7e. --- LFLiveKit/coder/LFHardwareAudioEncoder.m | 47 +++++++++++------------- 1 file changed, 22 insertions(+), 25 deletions(-) diff --git a/LFLiveKit/coder/LFHardwareAudioEncoder.m b/LFLiveKit/coder/LFHardwareAudioEncoder.m index 34a746b8..207db3b0 100755 --- a/LFLiveKit/coder/LFHardwareAudioEncoder.m +++ b/LFLiveKit/coder/LFHardwareAudioEncoder.m @@ -98,36 +98,35 @@ - (void)encodeBuffer:(char*)buf timeStamp:(uint64_t)timeStamp{ inBuffer.mData = buf; inBuffer.mDataByteSize = (UInt32)self.configuration.bufferLength; - // 初始化一个输入缓冲列表 AudioBufferList buffers; - buffers.mNumberBuffers = 1;//只有一个inBuffer + buffers.mNumberBuffers = 1; buffers.mBuffers[0] = inBuffer; - // 初始化一个输出缓冲列表 + // 初始化一个输出缓冲列表 AudioBufferList outBufferList; - outBufferList.mNumberBuffers = 1;//只有一个outBuffer + outBufferList.mNumberBuffers = 1; outBufferList.mBuffers[0].mNumberChannels = inBuffer.mNumberChannels; outBufferList.mBuffers[0].mDataByteSize = inBuffer.mDataByteSize; // 设置缓冲区大小 - outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区 编码后数据存放的位置 + outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区 UInt32 outputDataPacketSize = 1; if (AudioConverterFillComplexBuffer(m_converter, inputDataProc, &buffers, &outputDataPacketSize, &outBufferList, NULL) != noErr) { return; } - //封装为LFAudioFrame方便以后推流使用 + LFAudioFrame *audioFrame = [LFAudioFrame new]; audioFrame.timestamp = timeStamp; audioFrame.data = [NSData dataWithBytes:aacBuf length:outBufferList.mBuffers[0].mDataByteSize]; - char exeData[2];//flv编码音频头 44100 为0x12 0x10 + char exeData[2]; exeData[0] = _configuration.asc[0]; exeData[1] = _configuration.asc[1]; audioFrame.audioInfo = [NSData dataWithBytes:exeData length:2]; if (self.aacDeleage && [self.aacDeleage respondsToSelector:@selector(audioEncoder:audioFrame:)]) { - [self.aacDeleage audioEncoder:self audioFrame:audioFrame];//调用编码完成后代理 + [self.aacDeleage audioEncoder:self audioFrame:audioFrame]; } - if (self->enabledWriteVideoFile) {//写入本地文件中,debug时调用 + if (self->enabledWriteVideoFile) { NSData *adts = [self adtsData:_configuration.numberOfChannels rawDataLength:audioFrame.data.length]; fwrite(adts.bytes, 1, adts.length, self->fp); fwrite(audioFrame.data.bytes, 1, audioFrame.data.length, self->fp); @@ -144,47 +143,45 @@ - (BOOL)createAudioConvert { //根据输入样本初始化一个编码转换器 if (m_converter != nil) { return TRUE; } - // 音频输入描述 + AudioStreamBasicDescription inputFormat = {0}; - inputFormat.mSampleRate = _configuration.audioSampleRate;// 采样率 - inputFormat.mFormatID = kAudioFormatLinearPCM;// 数据格式 - inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;// 格式标识 - inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;// 声道数 - inputFormat.mFramesPerPacket = 1;//packet中包含的frame数目,无压缩时为1,可变比特率时,一个大点儿的固定值例如在ACC中1024。 - inputFormat.mBitsPerChannel = 16;// 每个声道比特数,语音每采样点占用位数 - inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame;// 每帧多少字节 - inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket;// 一个packet中的字节数目,如果时可变的packet则为0 + inputFormat.mSampleRate = _configuration.audioSampleRate; + inputFormat.mFormatID = kAudioFormatLinearPCM; + inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; + inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels; + inputFormat.mFramesPerPacket = 1; + inputFormat.mBitsPerChannel = 16; + inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame; + inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket; - // 音频输出描述 AudioStreamBasicDescription outputFormat; // 这里开始是输出音频格式 - memset(&outputFormat, 0, sizeof(outputFormat));// 初始化 + memset(&outputFormat, 0, sizeof(outputFormat)); outputFormat.mSampleRate = inputFormat.mSampleRate; // 采样率保持一致 outputFormat.mFormatID = kAudioFormatMPEG4AAC; // AAC编码 kAudioFormatMPEG4AAC kAudioFormatMPEG4AAC_HE_V2 outputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;; outputFormat.mFramesPerPacket = 1024; // AAC一帧是1024个字节 const OSType subtype = kAudioFormatMPEG4AAC; - //两种编码方式 软编码 硬编码 AudioClassDescription requestedCodecs[2] = { { kAudioEncoderComponentType, subtype, - kAppleSoftwareAudioCodecManufacturer// 软编码 + kAppleSoftwareAudioCodecManufacturer }, { kAudioEncoderComponentType, subtype, - kAppleHardwareAudioCodecManufacturer// 硬编码 + kAppleHardwareAudioCodecManufacturer } }; - OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);//创建AudioConverter :输入描述,输出描述,requestedCodecs的数量,支持的编码方式,AudioConverter + OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);; UInt32 outputBitrate = _configuration.audioBitrate; UInt32 propSize = sizeof(outputBitrate); if(result == noErr) { - result = AudioConverterSetProperty(m_converter, kAudioConverterEncodeBitRate, propSize, &outputBitrate);//设置码率 + result = AudioConverterSetProperty(m_converter, kAudioConverterEncodeBitRate, propSize, &outputBitrate); } return YES; From 5cf384fc3269e62b3bd0d6433e65a058708773b4 Mon Sep 17 00:00:00 2001 From: february29 Date: Mon, 9 Oct 2017 17:16:31 +0800 Subject: [PATCH 7/8] =?UTF-8?q?Revert=20"Revert=20"=E9=9F=B3=E9=A2=91?= =?UTF-8?q?=E7=BC=96=E7=A0=81=E6=B3=A8=E9=87=8A""?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 7228318ed06cc4dbf7efb92884dfe601b74773b3. --- LFLiveKit/coder/LFHardwareAudioEncoder.m | 47 +++++++++++++----------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/LFLiveKit/coder/LFHardwareAudioEncoder.m b/LFLiveKit/coder/LFHardwareAudioEncoder.m index 207db3b0..34a746b8 100755 --- a/LFLiveKit/coder/LFHardwareAudioEncoder.m +++ b/LFLiveKit/coder/LFHardwareAudioEncoder.m @@ -98,35 +98,36 @@ - (void)encodeBuffer:(char*)buf timeStamp:(uint64_t)timeStamp{ inBuffer.mData = buf; inBuffer.mDataByteSize = (UInt32)self.configuration.bufferLength; + // 初始化一个输入缓冲列表 AudioBufferList buffers; - buffers.mNumberBuffers = 1; + buffers.mNumberBuffers = 1;//只有一个inBuffer buffers.mBuffers[0] = inBuffer; - // 初始化一个输出缓冲列表 + // 初始化一个输出缓冲列表 AudioBufferList outBufferList; - outBufferList.mNumberBuffers = 1; + outBufferList.mNumberBuffers = 1;//只有一个outBuffer outBufferList.mBuffers[0].mNumberChannels = inBuffer.mNumberChannels; outBufferList.mBuffers[0].mDataByteSize = inBuffer.mDataByteSize; // 设置缓冲区大小 - outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区 + outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区 编码后数据存放的位置 UInt32 outputDataPacketSize = 1; if (AudioConverterFillComplexBuffer(m_converter, inputDataProc, &buffers, &outputDataPacketSize, &outBufferList, NULL) != noErr) { return; } - + //封装为LFAudioFrame方便以后推流使用 LFAudioFrame *audioFrame = [LFAudioFrame new]; audioFrame.timestamp = timeStamp; audioFrame.data = [NSData dataWithBytes:aacBuf length:outBufferList.mBuffers[0].mDataByteSize]; - char exeData[2]; + char exeData[2];//flv编码音频头 44100 为0x12 0x10 exeData[0] = _configuration.asc[0]; exeData[1] = _configuration.asc[1]; audioFrame.audioInfo = [NSData dataWithBytes:exeData length:2]; if (self.aacDeleage && [self.aacDeleage respondsToSelector:@selector(audioEncoder:audioFrame:)]) { - [self.aacDeleage audioEncoder:self audioFrame:audioFrame]; + [self.aacDeleage audioEncoder:self audioFrame:audioFrame];//调用编码完成后代理 } - if (self->enabledWriteVideoFile) { + if (self->enabledWriteVideoFile) {//写入本地文件中,debug时调用 NSData *adts = [self adtsData:_configuration.numberOfChannels rawDataLength:audioFrame.data.length]; fwrite(adts.bytes, 1, adts.length, self->fp); fwrite(audioFrame.data.bytes, 1, audioFrame.data.length, self->fp); @@ -143,45 +144,47 @@ - (BOOL)createAudioConvert { //根据输入样本初始化一个编码转换器 if (m_converter != nil) { return TRUE; } - + // 音频输入描述 AudioStreamBasicDescription inputFormat = {0}; - inputFormat.mSampleRate = _configuration.audioSampleRate; - inputFormat.mFormatID = kAudioFormatLinearPCM; - inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked; - inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels; - inputFormat.mFramesPerPacket = 1; - inputFormat.mBitsPerChannel = 16; - inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame; - inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket; + inputFormat.mSampleRate = _configuration.audioSampleRate;// 采样率 + inputFormat.mFormatID = kAudioFormatLinearPCM;// 数据格式 + inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;// 格式标识 + inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;// 声道数 + inputFormat.mFramesPerPacket = 1;//packet中包含的frame数目,无压缩时为1,可变比特率时,一个大点儿的固定值例如在ACC中1024。 + inputFormat.mBitsPerChannel = 16;// 每个声道比特数,语音每采样点占用位数 + inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame;// 每帧多少字节 + inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket;// 一个packet中的字节数目,如果时可变的packet则为0 + // 音频输出描述 AudioStreamBasicDescription outputFormat; // 这里开始是输出音频格式 - memset(&outputFormat, 0, sizeof(outputFormat)); + memset(&outputFormat, 0, sizeof(outputFormat));// 初始化 outputFormat.mSampleRate = inputFormat.mSampleRate; // 采样率保持一致 outputFormat.mFormatID = kAudioFormatMPEG4AAC; // AAC编码 kAudioFormatMPEG4AAC kAudioFormatMPEG4AAC_HE_V2 outputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;; outputFormat.mFramesPerPacket = 1024; // AAC一帧是1024个字节 const OSType subtype = kAudioFormatMPEG4AAC; + //两种编码方式 软编码 硬编码 AudioClassDescription requestedCodecs[2] = { { kAudioEncoderComponentType, subtype, - kAppleSoftwareAudioCodecManufacturer + kAppleSoftwareAudioCodecManufacturer// 软编码 }, { kAudioEncoderComponentType, subtype, - kAppleHardwareAudioCodecManufacturer + kAppleHardwareAudioCodecManufacturer// 硬编码 } }; - OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);; + OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);//创建AudioConverter :输入描述,输出描述,requestedCodecs的数量,支持的编码方式,AudioConverter UInt32 outputBitrate = _configuration.audioBitrate; UInt32 propSize = sizeof(outputBitrate); if(result == noErr) { - result = AudioConverterSetProperty(m_converter, kAudioConverterEncodeBitRate, propSize, &outputBitrate); + result = AudioConverterSetProperty(m_converter, kAudioConverterEncodeBitRate, propSize, &outputBitrate);//设置码率 } return YES; From e12868cbc79186f1bd8732db38a1477ece5def8a Mon Sep 17 00:00:00 2001 From: february29 Date: Mon, 9 Oct 2017 17:17:25 +0800 Subject: [PATCH 8/8] =?UTF-8?q?=E8=A7=86=E9=A2=91=E7=BC=96=E7=A0=81?= =?UTF-8?q?=E6=B3=A8=E9=87=8A=E5=AE=8C=E5=96=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- LFLiveKit/coder/LFHardwareVideoEncoder.m | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LFLiveKit/coder/LFHardwareVideoEncoder.m b/LFLiveKit/coder/LFHardwareVideoEncoder.m index 403e391e..a90fd97a 100755 --- a/LFLiveKit/coder/LFHardwareVideoEncoder.m +++ b/LFLiveKit/coder/LFHardwareVideoEncoder.m @@ -214,7 +214,7 @@ static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatu memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength); - //关于大端和小端模式,请参考此网址:http://blog.csdn.net/sunjie886/article/details/54944810 + //大小端转化,关于大端和小端模式,请参考此网址:http://blog.csdn.net/sunjie886/article/details/54944810 NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); //封装视频数据LFVideoFrame,方便以后推流