简体   繁体   中英

How to get Audio recording buffer data live?

I am working on get audio data from iPhone mic and send it to socket, I already try AVAudioEngine to get audio buffer but some how its not woking. so can you please suggest me what it better way to get recording buffer data in live.

  override func viewDidLoad() {
        super.viewDidLoad()



        // initialize engine
        engine = AVAudioEngine()
        guard nil != engine?.inputNode else {
            // @TODO: error out
            return
        }



        SocketIOManager.sharedInstance.socket.on("listen") {data, ack in

            let BuffurData:Data = data[0] as! Data

            // let playData = self?.audioBufferToNSData(PCMBuffer: BuffurData as! AVAudioPCMBuffer)
            do {
               // let data = NSData(bytes: &BuffurData, length: BuffurData.count)

                let player = try AVAudioPlayer(data:BuffurData)
                player.play()
            } catch let error as NSError {
                print(error.description)
            }



            print("socket connected \(data)")
        }
    }

 func installTap() {

        engine = AVAudioEngine()
        guard let engine = engine, let input = engine.inputNode else {
            // @TODO: error out
            return
        }

        let format = input.inputFormat(forBus: 0)
        input.installTap(onBus: 0, bufferSize:4096, format:format, block: { [weak self] buffer, when in

            guard let this = self else {
                return
            }

            // writing to file: for testing purposes only
            do {
                try this.file!.write(from: buffer)
            } catch {

            }

            if let channel1Buffer = buffer.floatChannelData?[0] {
               let test = self?.copyAudioBufferBytes(buffer)
                let stram = self?.toNSData(PCMBuffer: buffer)
                SocketIOManager.sharedInstance.socket.emit("talk",stram!);

               // socket.on("listen", function (data)



                /*! @property floatChannelData
                 @abstract Access the buffer's float audio samples.
                 @discussion
                 floatChannelData returns pointers to the buffer's audio samples if the buffer's format is
                 32-bit float, or nil if it is another format.

                 The returned pointer is to format.channelCount pointers to float. Each of these pointers
                 is to "frameLength" valid samples, which are spaced by "stride" samples.

                 If format.interleaved is false (as with the standard deinterleaved float format), then
                 the pointers will be to separate chunks of memory. "stride" is 1.

                 If format.interleaved is true, then the pointers will refer into the same chunk of interleaved
                 samples, each offset by 1 frame. "stride" is the number of interleaved channels.
                 */

                // @TODO: send data, better to pass into separate queue for processing
            }            
        })

        engine.prepare()

        do {
            try engine.start()
        } catch {
            // @TODO: error out
        }
    }

Try this code:

var audioPlayerQueue = DispatchQueue(label: "audioPlayerQueue", qos: DispatchQoS.userInteractive)

var peerAudioPlayer: AVAudioPlayerNode = AVAudioPlayerNode()
var peerInputFormat: AVAudioFormat?

override func viewDidLoad() {
    super.viewDidLoad()
    // initialize engine
    engine = AVAudioEngine()
    guard nil != engine?.inputNode else {
        // @TODO: error out
        return
    }
    engine.attach(self.peerAudioPlayer)
    self.peerInputFormat = AVAudioFormat.init(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
    self.peerAudioEngine.connect(peerAudioPlayer, to: self.peerAudioEngine.mainMixerNode, format: peerInput?.outputFormat(forBus: 0))
    do {
        peerAudioEngine.prepare()
        try peerAudioEngine.start()
    } catch let error {
        print(error.localizedDescription)
    }

    SocketIOManager.sharedInstance.socket.on("listen") { data, ack in
        let pcmBuffer = toPCMBuffer(data: data)
        self.audioPlayerQueue.async {
            self.peerAudioPlayer.scheduleBuffer(pcmBuffer, completionHandler: nil)
            if self.peerAudioEngine.isRunning {
                self.peerAudioPlayer.play()
            } else {
                do {
                    try self.peerAudioEngine.start()
                } catch {
                    print(error.localizedDescription)
                }
            }
        }
    }

    print("socket connected \(data)")
}       

func toPCMBuffer(data: NSData) -> AVAudioPCMBuffer {
    let audioFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)  // given NSData audio format
    let PCMBuffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(data.length) / audioFormat.streamDescription.pointee.mBytesPerFrame)
    PCMBuffer.frameLength = PCMBuffer.frameCapacity
    let channels = UnsafeBufferPointer(start: PCMBuffer.floatChannelData, count: Int(PCMBuffer.format.channelCount))
    data.getBytes(UnsafeMutableRawPointer(channels[0]) , length: data.length)
    return PCMBuffer
}

func installTap() {    
    engine = AVAudioEngine()
    guard let engine = engine, let input = engine.inputNode else {
        // @TODO: error out
        return
    }

    let format = input.inputFormat(forBus: 0)
    input.installTap(onBus: 0, bufferSize: 4410, format: format, block: { (buffer: AVAudioPCMBuffer, AVAudioTime) in
        guard let this = self else {
            return
        }
        let stram = self?.toNSData(PCMBuffer: buffer)
        SocketIOManager.sharedInstance.socket.emit("talk",stram!)
    })

    do {    
        engine.prepare()
        try engine.start()
    } catch {
        // @TODO: error out
    }
}

// **Edit: For Enable Lound Speaker**

func speakerEnabled(_ enabled:Bool) -> Bool {
    let session = AVAudioSession.sharedInstance()
    var options = session.categoryOptions

    if (enabled) {
        options.insert(.defaultToSpeaker)
    } else {
        options.remove(.defaultToSpeaker)
    }

    try! session.setCategory(AVAudioSessionCategoryPlayAndRecord, with: options)
    return true
}

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM