@@ -20,9 +20,12 @@ public class AudioEncoder {
2020 private var codec = AudioCodec . AAC
2121 private var inputFormat : AVAudioFormat ? = nil
2222 private var bitrate = 128 * 1000
23+ private var ringBuffer : AudioRingBuffer ? = nil
24+ private let audioTime = AudioTime ( )
2325
2426 public init ( callback: GetAacData ) {
2527 self . callback = callback
28+
2629 }
2730
2831 public func setCodec( codec: AudioCodec ) {
@@ -57,16 +60,17 @@ public class AudioEncoder {
5760 }
5861
5962 public func start( ) {
60- self . initTs = UInt64 ( Date ( ) . millisecondsSince1970 * 1000 )
6163 running = true
6264 syncQueue. clear ( )
6365 thread. async {
6466 while ( self . running) {
6567 let pcmFrame = self . syncQueue. dequeue ( )
6668 if let pcmFrame = pcmFrame {
67- let ts = UInt64 ( pcmFrame. ts * 1000 )
6869 if self . inputFormat == nil {
69- self . inputFormat = pcmFrame. buffer. format
70+ let format = pcmFrame. buffer. format
71+ self . inputFormat = format
72+ self . ringBuffer = AudioRingBuffer ( format)
73+ self . audioTime. reset ( )
7074 }
7175 if self . converter == nil {
7276 if let inputFormat = self . inputFormat, let outputFormat = self . outputFormat {
@@ -78,28 +82,15 @@ public class AudioEncoder {
7882 }
7983 }
8084 var error : NSError ? = nil
85+ guard let outputFormat = self . outputFormat else { continue }
86+ if !self . audioTime. hasAnchor {
87+ self . audioTime. anchor ( pcmFrame. time, sampleRate: outputFormat. sampleRate)
88+ }
8189 if self . codec == AudioCodec . AAC {
82- guard let aacBuffer = self . convertAAC ( inputBuffer: pcmFrame. buffer, error: & error) else {
83- continue
84- }
85- if error != nil {
86- print ( " Encode error: \( error. debugDescription) " )
87- } else {
88- let data = Array < UInt8 > ( UnsafeBufferPointer < UInt8 > ( start: aacBuffer. data. assumingMemoryBound ( to: UInt8 . self) , count: Int ( aacBuffer. byteLength) ) )
89- let elapsedMicroSeconds = ts - self . initTs
90- self . callback? . getAacData ( frame: Frame ( buffer: data, length: UInt32 ( data. count) , timeStamp: elapsedMicroSeconds) )
91- }
90+ self . ringBuffer? . append ( pcmFrame. buffer)
91+ self . convertAAC ( error: & error)
9292 } else if self . codec == AudioCodec . G711 {
93- guard let g711Buffer = self . convertG711 ( inputBuffer: pcmFrame. buffer, error: & error) else {
94- continue
95- }
96- if error != nil {
97- print ( " Encode error: \( error. debugDescription) " )
98- } else {
99- let data = g711Buffer. audioBufferToBytes ( )
100- let elapsedMicroSeconds = ts - self . initTs
101- self . callback? . getAacData ( frame: Frame ( buffer: data, length: UInt32 ( data. count) , timeStamp: elapsedMicroSeconds) )
102- }
93+ self . convertG711 ( inputBuffer: pcmFrame. buffer, error: & error)
10394 }
10495 }
10596 }
@@ -112,43 +103,77 @@ public class AudioEncoder {
112103 outputFormat = nil
113104 initTs = 0
114105 syncQueue. clear ( )
106+ audioTime. reset ( )
115107 }
116108
117- private func convertAAC( inputBuffer : AVAudioPCMBuffer , error: NSErrorPointer ) -> AVAudioCompressedBuffer ? {
109+ private func convertAAC( error: NSErrorPointer ) {
118110 if ( running) {
119- guard let outputFormat = outputFormat else {
120- return nil
121- }
122- let outputBuffer = AVAudioCompressedBuffer ( format: outputFormat, packetCapacity: 1 , maximumPacketSize: 1024 * Int( outputFormat. channelCount) )
111+ guard let inputFormat = inputFormat, let outputFormat = outputFormat else { return }
112+ let inputBuffer = AVAudioPCMBuffer ( pcmFormat: inputFormat, frameCapacity: 1024 * 4 )
113+ guard let inputBuffer = inputBuffer else { return }
123114
124- converter? . convert ( to: outputBuffer, error: nil ) { _, outStatus in
125- outStatus. pointee = . haveData
126- return inputBuffer
127- }
128- return outputBuffer
129- } else {
130- return nil
115+ let outputBuffer = AVAudioCompressedBuffer ( format: outputFormat, packetCapacity: 1 , maximumPacketSize: 1024 * Int( outputFormat. channelCount) )
116+ convert ( inputBuffer: inputBuffer, outputBuffer: outputBuffer, extraTime: 1024 )
131117 }
132118 }
133119
134- private func convertG711( inputBuffer: AVAudioPCMBuffer , error: NSErrorPointer ) -> AVAudioPCMBuffer ? {
120+ private func convertG711( inputBuffer: AVAudioPCMBuffer , error: NSErrorPointer ) {
135121 if ( running) {
136- guard let outputFormat = outputFormat else {
137- return nil
138- }
139- let outputBuffer = AVAudioPCMBuffer ( pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount ( outputFormat. sampleRate) * inputBuffer. frameLength / AVAudioFrameCount( inputBuffer. format. sampleRate) ) !
122+ guard let outputFormat = outputFormat else { return }
123+ let extraTime = AVAudioFrameCount ( outputFormat. sampleRate) * inputBuffer. frameLength / AVAudioFrameCount( inputBuffer. format. sampleRate)
124+ let outputBuffer = AVAudioPCMBuffer ( pcmFormat: outputFormat, frameCapacity: extraTime) !
140125 outputBuffer. frameLength = outputBuffer. frameCapacity
141126
142- converter? . convert ( to: outputBuffer, error: nil ) { _, outStatus in
143- outStatus. pointee = . haveData
144- return inputBuffer
145- }
146- return outputBuffer
147- } else {
148- return nil
127+ convert ( inputBuffer: inputBuffer, outputBuffer: outputBuffer, extraTime: AVAudioFramePosition ( extraTime) , force: true )
149128 }
150129 }
151130
131+ private func convert( inputBuffer: AVAudioPCMBuffer , outputBuffer: AVAudioBuffer , extraTime: AVAudioFramePosition , force: Bool = false ) {
132+ guard let ringBuffer = ringBuffer else { return }
133+ var status : AVAudioConverterOutputStatus ? = . endOfStream
134+
135+ repeat {
136+ status = converter? . convert ( to: outputBuffer, error: nil ) { inNumberFrames, status in
137+ if force {
138+ status. pointee = . haveData
139+ return inputBuffer
140+ } else if inNumberFrames <= ringBuffer. counts {
141+ _ = ringBuffer. render ( inNumberFrames, ioData: inputBuffer. mutableAudioBufferList)
142+ inputBuffer. frameLength = inNumberFrames
143+ status. pointee = . haveData
144+ return inputBuffer
145+ } else {
146+ status. pointee = . noDataNow
147+ return nil
148+ }
149+ }
150+ switch status {
151+ case . haveData:
152+ let data : Array < UInt8 >
153+ switch outputBuffer {
154+ case let outputBuffer as AVAudioCompressedBuffer :
155+ data = Array < UInt8 > ( UnsafeBufferPointer < UInt8 > ( start: outputBuffer. data. assumingMemoryBound ( to: UInt8 . self) , count: Int ( outputBuffer. byteLength) ) )
156+ case let outputBuffer as AVAudioPCMBuffer :
157+ data = outputBuffer. audioBufferToBytes ( )
158+ default :
159+ continue
160+ }
161+
162+ let ts = UInt64 ( self . audioTime. at. makeTime ( ) . seconds * 1000000 )
163+ if self . initTs == 0 {
164+ self . initTs = ts
165+ }
166+ let elapsedMicroSeconds = ts - self . initTs
167+ self . callback? . getAacData ( frame: Frame ( buffer: data, length: UInt32 ( data. count) , timeStamp: elapsedMicroSeconds) )
168+ self . audioTime. advanced ( extraTime)
169+ case . error:
170+ print ( " error " )
171+ default :
172+ break
173+ }
174+ } while ( status == . haveData && !force)
175+ }
176+
152177 private func getAACFormat( sampleRate: Double , channels: UInt32 ) -> AVAudioFormat ? {
153178 var description = AudioStreamBasicDescription ( mSampleRate: sampleRate,
154179 mFormatID: kAudioFormatMPEG4AAC,
0 commit comments