Skip to content

Commit a3286c2

Browse files
committed
Fix desync
1 parent 413fa5e commit a3286c2

File tree

1 file changed

+39
-90
lines changed

1 file changed

+39
-90
lines changed

RecoreonBroadcastUploadExtension/SampleHandler.swift

Lines changed: 39 additions & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ class SampleHandler: RPBroadcastSampleHandler {
3434
var micAudioResampler: AudioResampler?
3535
var micAudioWriter: FragmentedAudioWriter?
3636

37+
var videoFirstTime: CMTime = .invalid
38+
3739
override func broadcastStarted(withSetupInfo setupInfo: [String: NSObject]?) {
3840
let width = 888
3941
let height = 1920
@@ -153,23 +155,31 @@ class SampleHandler: RPBroadcastSampleHandler {
153155
sampleBuffer: sampleBuffer
154156
)
155157
case RPSampleBufferType.audioApp:
158+
let devicePTS = sampleBuffer.presentationTimeStamp
159+
guard videoFirstTime != .invalid else { return }
160+
let outputPTS = devicePTS - videoFirstTime
161+
156162
do {
157163
try write(
158164
audioWriter: appAudioWriter,
159165
audioResampler: appAudioResampler,
160166
sampleBuffer: sampleBuffer,
161-
pts: sampleBuffer.presentationTimeStamp
167+
pts: outputPTS
162168
)
163169
} catch {
164170
print(error)
165171
}
166172
case RPSampleBufferType.audioMic:
173+
let devicePTS = sampleBuffer.presentationTimeStamp
174+
guard videoFirstTime != .invalid else { return }
175+
let outputPTS = devicePTS - videoFirstTime
176+
167177
do {
168178
try write(
169179
audioWriter: micAudioWriter,
170180
audioResampler: micAudioResampler,
171181
sampleBuffer: sampleBuffer,
172-
pts: sampleBuffer.presentationTimeStamp
182+
pts: outputPTS
173183
)
174184
} catch {
175185
print(error)
@@ -180,13 +190,15 @@ class SampleHandler: RPBroadcastSampleHandler {
180190
}
181191

182192
override func broadcastFinished() {
193+
appGroupsUserDefaults?.set(0, forKey: AppGroupsPreferenceService.ongoingRecordingTimestampKey)
194+
videoTranscoder?.close()
195+
183196
let semaphore = DispatchSemaphore(value: 0)
184197
Task { [weak self] in
185198
guard let self = self else {
186199
print("Clean up failed!")
187200
return
188201
}
189-
self.videoTranscoder?.close()
190202
try await self.videoWriter?.close()
191203
try await self.appAudioWriter?.close()
192204
try await self.micAudioWriter?.close()
@@ -200,7 +212,7 @@ class SampleHandler: RPBroadcastSampleHandler {
200212
videoTranscoder: RealtimeVideoTranscoder,
201213
sampleBuffer: CMSampleBuffer
202214
) {
203-
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
215+
guard let pixelBuffer = sampleBuffer.imageBuffer else {
204216
logger.warning("Video sample buffer is not available!")
205217
return
206218
}
@@ -210,15 +222,18 @@ class SampleHandler: RPBroadcastSampleHandler {
210222
firstVideoFrameArrived = true
211223
}
212224

213-
let pts = CMTimeConvertScale(
214-
sampleBuffer.presentationTimeStamp,
215-
timescale: 60,
216-
method: .roundTowardPositiveInfinity
217-
)
225+
let devicePTS = sampleBuffer.presentationTimeStamp
226+
227+
if videoFirstTime == .invalid {
228+
videoFirstTime = devicePTS
229+
}
218230

219-
videoTranscoder.send(imageBuffer: pixelBuffer, pts: pts) { (status, infoFlags, sbuf) in
231+
let elapsedTime = devicePTS - videoFirstTime
232+
let outputPTS = CMTimeConvertScale(elapsedTime, timescale: 60, method: .roundTowardPositiveInfinity)
233+
234+
videoTranscoder.send(imageBuffer: pixelBuffer, pts: outputPTS) { (status, infoFlags, sbuf) in
220235
if let sampleBuffer = sbuf {
221-
try? sampleBuffer.setOutputPresentationTimeStamp(pts)
236+
try? sampleBuffer.setOutputPresentationTimeStamp(outputPTS)
222237
try? videoWriter.send(sampleBuffer: sampleBuffer)
223238
}
224239
}
@@ -230,86 +245,20 @@ class SampleHandler: RPBroadcastSampleHandler {
230245
sampleBuffer: CMSampleBuffer,
231246
pts: CMTime
232247
) throws {
233-
var blockBufferOut: CMBlockBuffer?
234-
var audioBufferList = AudioBufferList()
235-
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
236-
sampleBuffer,
237-
bufferListSizeNeededOut: nil,
238-
bufferListOut: &audioBufferList,
239-
bufferListSize: MemoryLayout<AudioBufferList>.size,
240-
blockBufferAllocator: kCFAllocatorDefault,
241-
blockBufferMemoryAllocator: kCFAllocatorDefault,
242-
flags: 0,
243-
blockBufferOut: &blockBufferOut
244-
)
245-
246-
guard
247-
let format = CMSampleBufferGetFormatDescription(sampleBuffer),
248-
let audioStreamBasicDesc = CMAudioFormatDescriptionGetStreamBasicDescription(format)?.pointee,
249-
audioStreamBasicDesc.mFormatID == kAudioFormatLinearPCM,
250-
let data = audioBufferList.mBuffers.mData
251-
else {
252-
logger.error("Audio input sample could not be gotten!")
253-
return
254-
}
255-
256-
let isSignedInteger = audioStreamBasicDesc.mFormatFlags & kAudioFormatFlagIsSignedInteger != 0
257-
let isMono = audioStreamBasicDesc.mChannelsPerFrame == 1
258-
let isStereo = audioStreamBasicDesc.mChannelsPerFrame == 2
259-
let isBigEndian = audioStreamBasicDesc.mFormatFlags & kAudioFormatFlagIsBigEndian != 0
260-
let bytesPerSample = Int(audioStreamBasicDesc.mBytesPerFrame) / (isStereo ? 2 : 1)
261-
let inputSampleRate = Int(audioStreamBasicDesc.mSampleRate)
262-
if isStereo && isSignedInteger && bytesPerSample == 2 && !isBigEndian {
263-
try audioResampler.append(
264-
stereoInt16Buffer: data.assumingMemoryBound(to: Int16.self),
265-
numInputSamples: Int(audioBufferList.mBuffers.mDataByteSize) / 4,
266-
inputSampleRate: inputSampleRate,
267-
pts: pts
268-
)
269-
} else if isMono && isSignedInteger && bytesPerSample == 2 && !isBigEndian {
270-
try audioResampler.append(
271-
monoInt16Buffer: data.assumingMemoryBound(to: Int16.self),
272-
numInputSamples: Int(audioBufferList.mBuffers.mDataByteSize) / 2,
273-
inputSampleRate: inputSampleRate,
274-
pts: pts
275-
)
276-
} else if isStereo && isSignedInteger && bytesPerSample == 2 && isBigEndian {
277-
try audioResampler.append(
278-
stereoInt16BufferWithSwap: data.assumingMemoryBound(to: Int16.self),
279-
numInputSamples: Int(audioBufferList.mBuffers.mDataByteSize) / 4,
280-
inputSampleRate: inputSampleRate,
281-
pts: pts
248+
try sampleBuffer.withAudioBufferList { (_, blockBuffer) in
249+
var sampleTiming = try sampleBuffer.sampleTimingInfo(at: 0)
250+
sampleTiming.presentationTimeStamp = pts
251+
sampleTiming.decodeTimeStamp = .invalid
252+
253+
let outputSampleBuffer = try CMSampleBuffer(
254+
dataBuffer: blockBuffer,
255+
formatDescription: sampleBuffer.formatDescription,
256+
numSamples: sampleBuffer.numSamples,
257+
sampleTimings: [sampleTiming],
258+
sampleSizes: []
282259
)
283-
} else if isMono && isSignedInteger && bytesPerSample == 2 && isBigEndian {
284-
try audioResampler.append(
285-
monoInt16BufferWithSwap: data.assumingMemoryBound(to: Int16.self),
286-
numInputSamples: Int(audioBufferList.mBuffers.mDataByteSize) / 2,
287-
inputSampleRate: inputSampleRate,
288-
pts: pts
289-
)
290-
} else {
291-
logger.warning("Audio sample format is not supported!")
292-
}
293-
294-
let audioResamplerFrame = audioResampler.getCurrentFrame()
295260

296-
let buffer = UnsafeMutableRawBufferPointer(audioResamplerFrame.data)
297-
let blockBuffer = try CMBlockBuffer(buffer: buffer, allocator: kCFAllocatorNull)
298-
299-
let sampleTiming = CMSampleTimingInfo(
300-
duration: audioResampler.duration,
301-
presentationTimeStamp: pts,
302-
decodeTimeStamp: .invalid
303-
)
304-
305-
let samplerBuffer = try CMSampleBuffer(
306-
dataBuffer: blockBuffer,
307-
formatDescription: audioResampler.outputFormatDesc,
308-
numSamples: audioResamplerFrame.numSamples,
309-
sampleTimings: [sampleTiming],
310-
sampleSizes: []
311-
)
312-
313-
try audioWriter.send(sampleBuffer: sampleBuffer)
261+
try audioWriter.send(sampleBuffer: outputSampleBuffer)
262+
}
314263
}
315264
}

0 commit comments

Comments
 (0)