import UIKit import PushToTalk import AVFoundation import Kingfisher import Accelerate @available(iOS 16.0, *) class PTTManager: NSObject, PTChannelRestorationDelegate { static let shared = PTTManager() private var channelManager: PTChannelManager? private var channelDescriptor: PTChannelDescriptor! private var channelUUID:UUID! var audioFilename:URL? /// 是否正在说话 private var cs_isTalking = false // private var currentChannel: PTChannel? override init() { super.init() channelDescriptor = PTChannelDescriptor(name: "触说", image: UIImage(named: "logo")) } func setupChannelManager(channelUUID:UUID) { Task { self.channelUUID = channelUUID do { if channelManager == nil { channelManager = try await PTChannelManager.channelManager(delegate: self, restorationDelegate: self) try await channelManager?.setTransmissionMode(.fullDuplex, channelUUID: channelUUID) self.requestJoinChannel() }else { self.requestJoinChannel() } } catch { print("PPT初始化失败") } } } private func requestJoinChannel(){ self.channelManager!.requestJoinChannel(channelUUID: self.channelUUID, descriptor: self.channelDescriptor) self.channelManager!.setChannelDescriptor(channelDescriptor, channelUUID: self.channelUUID) { err in } if #available(iOS 17.0, *) { self.channelManager?.setAccessoryButtonEventsEnabled(true, channelUUID: channelUUID) } else { // Fallback on earlier versions } self.setServiceStatus(statu: .ready) } func leaveChannel(){ if let channelManager = self.channelManager { stopTransmitting() channelManager.leaveChannel(channelUUID: channelUUID) self.setServiceStatus(statu: .unavailable) } } ///开始传输 func requestBeginTransmitting(){ if let channelManager = self.channelManager { channelManager.setActiveRemoteParticipant(nil, channelUUID: channelUUID) { err in } channelManager.requestBeginTransmitting(channelUUID: channelUUID) }else { print("请先PPT初始化") } } ///停止传输 func stopTransmitting(){ if let channelManager = self.channelManager { channelManager.stopTransmitting(channelUUID: channelUUID) }else { print("请先PPT初始化") } } ///恢复活跃的频道 func channelDescriptor(restoredChannelUUID channelUUID: UUID) -> PTChannelDescriptor { return channelDescriptor } ///报告服务状态 func setServiceStatus(statu:PTServiceStatus = .connecting) { Task { if let channelManager = self.channelManager { do { try await channelManager.setServiceStatus(statu, channelUUID: channelUUID) } catch { print("PPT设置状态失败") } } } } func getAudioFileURL(convert:String = ".wav") -> URL { var documentsPath = FileManager.default.urls(for: .libraryDirectory, in: .userDomainMask)[0] documentsPath = documentsPath.appendingPathComponent("Caches") let fileName = "Voice_\(Int(Date().timeIntervalSince1970*1000))\(convert)" return documentsPath.appendingPathComponent(fileName) } var audioPlayer: AVAudioPlayer? func playRecording(url: URL) { do { let player = try AVAudioPlayer(contentsOf: url) player.prepareToPlay() player.play() print("🔊 播放录音: \(url.lastPathComponent)") } catch { print("❌ 播放失败:", error) } } private let engine = AVAudioEngine() private lazy var inputNode = engine.inputNode private let bus = 0 private var audioFile: AVAudioFile? private var converter: AVAudioConverter? /// 开始录音并获取音频流 func startEngineRecording() { let targetSampleRate: Double = 16000.0 let inputFormat = inputNode.inputFormat(forBus: bus) print("🎧 硬件输入格式: \(inputFormat)") // 目标格式:S16、单声道 guard let targetFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: targetSampleRate, channels: 1, interleaved: true) else { print("❌ 创建目标格式失败") return } // 初始化转换器 converter = AVAudioConverter(from: inputFormat, to: targetFormat) // 每 10ms 数据 let frameCountPer10ms = AVAudioFrameCount(targetSampleRate * 0.01) // 创建文件用于保存录音 do { let url = getAudioFileURL() audioFilename = url audioFile = try AVAudioFile(forWriting: url, settings: inputFormat.settings) print("✅ 录音文件路径:\(url.path)") } catch { print("❌ 创建音频文件失败: \(error.localizedDescription)") return } inputNode.removeTap(onBus: bus) // 避免重复安装 // 安装 Tap 捕获音频流 inputNode.installTap(onBus: bus, bufferSize: 1024, format: inputFormat) { [weak self] buffer, time in guard let self = self, let file = self.audioFile else { return } // 实时写入到文件 do { try file.write(from: buffer) } catch { print("❌ 写入文件失败: \(error.localizedDescription)") } guard let converter = self.converter else { return } // 输出缓冲区(Int16) guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: targetFormat, frameCapacity: frameCountPer10ms) else { return } var error: NSError? let inputBlock: AVAudioConverterInputBlock = { inNumPackets, outStatus in outStatus.pointee = .haveData return buffer } converter.convert(to: pcmBuffer, error: &error, withInputFrom: inputBlock) if let error = error { print("⚠️ 转换错误: \(error.localizedDescription)") return } // 获取PCM16数据 let audioBuffer = pcmBuffer.audioBufferList.pointee.mBuffers let data = Data(bytes: audioBuffer.mData!, count: Int(audioBuffer.mDataByteSize)) } do { try engine.start() print("🎤 开始录音并捕获音频流...") } catch { print("❌ 启动 AVAudioEngine 失败: \(error.localizedDescription)") } print("🎧 输入格式:", targetFormat) print("🎧 文件格式:", audioFile?.fileFormat ?? "未知") print("🎧 写入设置:", targetFormat.settings) } /// 停止录音 func stopEngineRecording() { cleanupEngineRecording() } func cleanupEngineRecording(){ inputNode.removeTap(onBus: bus) engine.stop() stopTransmitting() if let audioFilename = self.audioFilename { playRecording(url: audioFilename) } print("录音已停止") } } @available(iOS 16.0, *) extension PTTManager: PTChannelManagerDelegate { func channelManager(_ channelManager: PTChannelManager, didJoinChannel channelUUID: UUID, reason: PTChannelJoinReason) { print("加入通道") } func channelManager(_ channelManager: PTChannelManager, didLeaveChannel channelUUID: UUID, reason: PTChannelLeaveReason) { print("离开通道") } func channelManager(_ channelManager: PTChannelManager, channelUUID: UUID, didBeginTransmittingFrom source: PTChannelTransmitRequestSource) { } func channelManager(_ channelManager: PTChannelManager, channelUUID: UUID, didEndTransmittingFrom source: PTChannelTransmitRequestSource) { print("结束传输成功") } func channelManager(_ channelManager: PTChannelManager, receivedEphemeralPushToken pushToken: Data) { let deviceTokenStr = pushToken.map { String(format: "%02.2hhx", $0) }.joined() print("获取token\(deviceTokenStr)") } func channelManager(_ channelManager: PTChannelManager, didActivate audioSession: AVAudioSession) { print("已激活") DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { self.startEngineRecording() } } func channelManager(_ channelManager: PTChannelManager, didDeactivate audioSession: AVAudioSession) { print("已停用") stopEngineRecording() } ///每次传入推送都会调用此方法。您必须为每个传入的推送实例化并返回nonnil PTPushResult。系统将代表您自动执行PTBushResult指定的操作。 func incomingPushResult(channelManager: PTChannelManager, channelUUID: UUID, pushPayload: [String : Any]) -> PTPushResult { guard let activeSpeaker = pushPayload["触说"] as? String else { // Report that there's no active speaker, so leave the channel. return .leaveChannel } let activeSpeakerImage = UIImage(named: "logo") // Get the cached image for the active speaker. let participant = PTParticipant(name: activeSpeaker, image: activeSpeakerImage) // Report the active participant information to the system. return .activeRemoteParticipant(participant) } }