代码之家  ›  专栏  ›  技术社区  ›  JLT Carson

无法从蓝牙设备传输音频

  •  0
  • JLT Carson  · 技术社区  · 6 年前

    下面的代码是我用来传输音频数据的代码。

    func prepareStreamRecording() throws -> OSStatus {
        try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(10)
    
        // Describe the RemoteIO unit
        var audioComponentDescription = AudioComponentDescription()
        audioComponentDescription.componentType = kAudioUnitType_Output;
        audioComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
        audioComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
        audioComponentDescription.componentFlags = 0;
        audioComponentDescription.componentFlagsMask = 0;
    
        // Get the RemoteIO unit
        let remoteIOComponent = AudioComponentFindNext(nil, &audioComponentDescription)
        var status = AudioComponentInstanceNew(remoteIOComponent!, &remoteIOUnit)
    
        if (status != noErr) {
            return status
        }
    
        let bus1 : AudioUnitElement = 1
        var oneFlag : UInt32 = 1
    
        // Configure the RemoteIO unit for input
        status = AudioUnitSetProperty(remoteIOUnit!,
                                      kAudioOutputUnitProperty_EnableIO,
                                      kAudioUnitScope_Input,
                                      bus1,
                                      &oneFlag,
                                      UInt32(MemoryLayout<UInt32>.size));
        if (status != noErr) {
            return status
        }
    
        // Set format for mic input (bus 1) on RemoteIO's output scope
        var asbd = AudioStreamBasicDescription()
        asbd.mSampleRate = Double(16000)
        asbd.mFormatID = kAudioFormatLinearPCM
        asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked
        asbd.mBytesPerPacket = 2
        asbd.mFramesPerPacket = 1
        asbd.mBytesPerFrame = 2
        asbd.mChannelsPerFrame = 1
        asbd.mBitsPerChannel = 16
        status = AudioUnitSetProperty(remoteIOUnit!,
                                      kAudioUnitProperty_StreamFormat,
                                      kAudioUnitScope_Output,
                                      bus1,
                                      &asbd,
                                      UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
    
        if (status != noErr) {
            return status
        }
    
        // Set the recording callback
        var callbackStruct = AURenderCallbackStruct()
        callbackStruct.inputProc = recordingCallback
        callbackStruct.inputProcRefCon = nil
        status = AudioUnitSetProperty(remoteIOUnit!,
                                      kAudioOutputUnitProperty_SetInputCallback,
                                      kAudioUnitScope_Global,
                                      bus1,
                                      &callbackStruct,
                                      UInt32(MemoryLayout<AURenderCallbackStruct>.size));
        if (status != noErr) {
            return status
        }
        // Initialize the RemoteIO unit
        return AudioUnitInitialize(remoteIOUnit!)
    }
    
    func startStreamRecording(handler: ((_ data: Data) -> Void)?) -> OSStatus {
        streamHandler = handler;
        if(remoteIOUnit == nil) {
            return -1
        }
        return AudioOutputUnitStart(remoteIOUnit!)
    }
    

    音频数据将通过此回调接收

    func recordingCallback(inRefCon:UnsafeMutableRawPointer, ioActionFlags:UnsafeMutablePointer<AudioUnitRenderActionFlags>, inTimeStamp:UnsafePointer<AudioTimeStamp>, inBusNumber:UInt32, inNumberFrames:UInt32, ioData:UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
        var status = noErr
        let channelCount : UInt32 = 1
        var bufferList = AudioBufferList()
        bufferList.mNumberBuffers = channelCount
        let buffers = UnsafeMutableBufferPointer<AudioBuffer>(start: &bufferList.mBuffers,
                                                              count: Int(bufferList.mNumberBuffers))
        buffers[0].mNumberChannels = 1
        buffers[0].mDataByteSize = inNumberFrames * 2
        buffers[0].mData = nil
    
        // get the recorded samples
        status = AudioUnitRender(AudioManager.shared.remoteIOUnit!,
                                 ioActionFlags,
                                 inTimeStamp,
                                 inBusNumber,
                                 inNumberFrames,
                                 UnsafeMutablePointer<AudioBufferList>(&bufferList))
        if (status != noErr) {
            return status;
        }
    
        let data = Data(bytes:  buffers[0].mData!, count: Int(buffers[0].mDataByteSize))
    
        NSLog("recorded data length is \(data.count)")
        NSLog("Recorded data part is \(data.subdata(in: 0..<50).hexadecimal())")
    
        AudioManager.shared.streamHandler?(data)
        return noErr
    }
    

    使用电话麦克风录制时,该代码会起作用。但是,当连接到蓝牙麦克风时,记录的数据内容始终是00000000000000000000000….

    请注意,我没有写这段代码。我是从谷歌制作的一个关于使用他们的云语音API的应用程序示例中得到的。

    1 回复  |  直到 6 年前
        1
  •  0
  •   JLT Carson    6 年前

    我通过更改音频会话的首选IO缓冲时间的值来解决这个问题。目前我将其设置为0.01。

    try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(0.01)