Home / IOS Development / ios – Take audio sample to push real-time audio to server

ios – Take audio sample to push real-time audio to server



I use LFLiveKit to stream video only from the device and it works well. Now I want to slide an audio file to play with it. We use WOWZA server with rtmp connection to stream and play. The code I use plays the song randomly for 1

0-15 seconds, it also hangs the video stream once. I push sound after the session has started. Any help in fixing this will be greatly appreciated.

lazy var session: LFLiveSession = {
        let audioConfiguration = LFLiveAudioConfiguration.defaultConfiguration(for: .medium)
        audioConfiguration?.numberOfChannels = 1
        let videoConfiguration = LFLiveVideoConfiguration.defaultConfiguration(for: .high3)
        let session = LFLiveSession(audioConfiguration: audioConfiguration, videoConfiguration: videoConfiguration, captureType: .captureMaskVideoInputAudio)
        session?.captureDevicePosition = .back
        session?.delegate = self
        session?.preView = self.videView
        session?.showDebugInfo = true
        return session!
    }()

func documentPicker(_ controller: UIDocumentPickerViewController, didPickDocumentsAt urls: [URL]) {
        if controller.documentPickerMode == .import{
            let firstURL = urls[0] //song path
            let songAsset = AVAsset(url: firstURL)
            loopAmplitudes(audioFileUrl: firstURL)
    }
}

func loopAmplitudes(audioFileUrl: URL) {
        let asset = AVAsset(url: audioFileUrl)
        let reader = try! AVAssetReader(asset: asset)
        let track = asset.tracks(withMediaType: AVMediaType.audio)[0]

        let settings = [
            AVFormatIDKey : kAudioFormatLinearPCM,
            AVNumberOfChannelsKey: 1,
            AVLinearPCMBitDepthKey: 16,
            AVSampleRateKey: track.naturalTimeScale,
            AVLinearPCMIsNonInterleaved: false,
            AVLinearPCMIsFloatKey: false,
            AVLinearPCMIsBigEndianKey: false,
        ] as [String : Any]
        
        let readerOutput = AVAssetReaderTrackOutput(track: track, outputSettings: settings)
        reader.add(readerOutput)
        reader.startReading()

        while let sampleBuffer = readerOutput.copyNextSampleBuffer() {
            
            var audioBufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: AudioBuffer(mNumberChannels: 0, mDataByteSize: 0, mData: nil))
            var blockBuffer: CMBlockBuffer?
            
            CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, bufferListSizeNeededOut: nil, bufferListOut: &audioBufferList, bufferListSize: MemoryLayout.size, blockBufferAllocator: nil, blockBufferMemoryAllocator: nil, flags: kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, blockBufferOut: &blockBuffer)
            
            let buffers = UnsafeBufferPointer(start: &audioBufferList.mBuffers, count: Int(audioBufferList.mNumberBuffers))
            
            for audioBuffer in buffers {
                let audio = audioBuffer.mData!.assumingMemoryBound(to: UInt8.self)   //WORKING PARTIALLY
                let newdata = Data(bytes: audio, count: Int(audioBuffer.mDataByteSize))
                session.pushAudio(newdata)
            }
        }
    } 


Source link