当AVAudioEngine和AVAudioPlayerNodes具有不同的采样率时,如何配置它们才能以正确的音高播放音频文件?
我读到混频器可以处理采样率转换,但我还没有实现这个行为。我正在使用扩展来播放循环的音频片段,播放器应该可以处理压缩和PCM文件;我不知道这是否会决定解决方案。
我试图在AVAudioPlayerNode上的installTap(onBus:bufferSize:format: block:)块中使用AVAudioConverter,但是遇到了各种各样的崩溃,并且不确定这是否是正确的解决方案。我是在正确的轨道上,还是有一个更简单的解决方案?
import AVFoundation
import SwiftUI
@main
struct SampleRateMixerApp: App {
private let audioEngine = AVAudioEngine()
private let playerA = AVAudioPlayerNode()
private let playerB = AVAudioPlayerNode()
var body: some Scene {
WindowGroup {
Button("Play") { try? playFiles() }
Button("Stop") { playerA.stop(); playerB.stop() }
}
}
func playFiles() throws {
_ = audioEngine.mainMixerNode
try audioEngine.start()
audioEngine.prepare()
let pathA = Bundle.main.path(forResource: "32khz_sample_rate", ofType: "aif")!
try setupPlayerNode(player: playerA, withAudioEngine: audioEngine, atPath: pathA)
let pathB = Bundle.main.path(forResource: "48khz_sample_rate", ofType: "mp3")!
try setupPlayerNode(player: playerB, withAudioEngine: audioEngine, atPath: pathB)
}
func setupPlayerNode(player: AVAudioPlayerNode, withAudioEngine engine: AVAudioEngine, atPath path: String) throws {
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: nil)
let url = URL(string: path)!
let file = try AVAudioFile(forReading: url)
let frameCount = AVAudioFrameCount(file.length)
let buffer = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity: frameCount)!
try file.read(into: buffer)
player.scheduleBufferSegment(buffer, range: 0.25...0.75, looping: true)
player.play()
}
}
extension AVAudioPlayerNode {
func scheduleBufferSegment(_ buffer: AVAudioPCMBuffer, range: ClosedRange<Double>, looping: Bool) {
let length = Double(buffer.frameLength)
let startFrame = AVAudioFramePosition(range.lowerBound * length)
let endFrame = AVAudioFramePosition(range.upperBound * length)
guard let bufferSegment = buffer.segment(from: startFrame, to: endFrame) else { return }
if looping {
scheduleBuffer(bufferSegment, at: nil, options: [.loops, .interrupts])
} else {
scheduleBuffer(bufferSegment, at: nil, options: [.interrupts]) {
DispatchQueue.main.async { [weak self] in
self?.stop()
}
}
}
}
}
extension AVAudioPCMBuffer {
func segment(from startFrame: AVAudioFramePosition, to endFrame: AVAudioFramePosition) -> AVAudioPCMBuffer? {
let framesToCopy = AVAudioFrameCount(endFrame - startFrame)
guard let segment = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: framesToCopy) else { return nil }
let sourcePointer = UnsafeMutableAudioBufferListPointer(mutableAudioBufferList)
let destinationPointer = UnsafeMutableAudioBufferListPointer(segment.mutableAudioBufferList)
let sampleSize = format.streamDescription.pointee.mBytesPerFrame
for (source, destination) in zip(sourcePointer, destinationPointer) {
memcpy(destination.mData,
source.mData?.advanced(by: Int(startFrame) * Int(sampleSize)),
Int(framesToCopy) * Int(sampleSize))
}
segment.frameLength = framesToCopy
return segment
}
}发布于 2021-07-08 03:23:26
在加载每个文件后调用AVAudioEngine的connect(:to:format:),传入AVAudioFiles的processingFormat。这样可以确保AVAudioPlayerNode的输出设置为与它将要播放的文件相同的采样率。
下面的代码演示了如何做到这一点。该问题与播放缓冲区段无关,因此未将其包含在解决方案中。虽然没有先断开播放器就能正确播放音频,但我怀疑它可能会泄漏内存,所以为了安全起见,首先断开了节点。
import AVFoundation
let audioEngine = AVAudioEngine()
let player = AVAudioPlayerNode()
func playFiles() throws {
_ = audioEngine.mainMixerNode // Called to ensure mainMixerNode singleton is instantiated
try audioEngine.start()
audioEngine.prepare()
audioEngine.attach(player)
let path = Bundle.main.path(forResource: "32khz", ofType: "aif")!
let url = URL(string: path)!
let file = try AVAudioFile(forReading: url)
let frameCount = AVAudioFrameCount(file.length)
let buffer = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity: frameCount)!
try file.read(into: buffer)
audioEngine.disconnectNodeOutput(player) // May be unnecessary
audioEngine.connect(player, to: audioEngine.mainMixerNode, format: file.processingFormat)
player.scheduleBuffer(buffer)
player.play()
}https://stackoverflow.com/questions/68278238
复制相似问题