首页
学习
活动
专区
圈层
工具
发布
社区首页 >问答首页 >AudioKit播放破解

AudioKit播放破解
EN

Stack Overflow用户
提问于 2020-06-06 23:20:56
回答 1查看 46关注 0票数 0

我想要分析麦克风的输入频率,然后播放接近所确定的频率的正确音符。我用AudioKit这样做了。

这是现在的工作,但由于我实现了AudioKit来获得频率特征,在频率检测后播放的声音,在播放过程中有时会出现裂缝。这发生在我实现AudioKit之后。在那之前一切都很好。

代码语言:javascript
复制
var mic: AKMicrophone!
var tracker: AKFrequencyTracker!
var silence: AKBooster!

func initFrequencyTracker() {
        AKSettings.channelCount = 2
        AKSettings.audioInputEnabled = true
        AKSettings.defaultToSpeaker = true
        AKSettings.allowAirPlay = true
        AKSettings.useBluetooth = true
        AKSettings.allowHapticsAndSystemSoundsDuringRecording = true
        mic = AKMicrophone()
        tracker = AKFrequencyTracker(mic)
        silence = AKBooster(tracker, gain: 0)
    }

    func deinitFrequencyTracker() {
        AKSettings.audioInputEnabled = false
        plotTimer.invalidate()
        do {
            try AudioKit.stop()
            AudioKit.output = nil
        } catch {
            print(error)
        }
    }

    func initPlotTimer() {
        AudioKit.output = silence
        do {
            try AKSettings.setSession(category: .playAndRecord, with: [.defaultToSpeaker, .allowBluetooth, .allowAirPlay, .allowBluetoothA2DP])
            try AudioKit.start()
        } catch {
            AKLog("AudioKit did not start!")
        }
        setupPlot()
        plotTimer = Timer.scheduledTimer(timeInterval: 0.1, target: self, selector: #selector(updatePlotUI), userInfo: nil, repeats: true)
    }

    func setupPlot() {
        let plot = AKNodeOutputPlot(mic, frame: audioInputPlot.bounds)
        plot.translatesAutoresizingMaskIntoConstraints = false
        plot.alpha = 0.3
        plot.plotType = .rolling
        plot.shouldFill = true
        plot.shouldCenterYAxis = false
        plot.shouldMirror = true
        plot.color = UIColor(named: uiFarbe)
        audioInputPlot.addSubview(plot)

        // Pin the AKNodeOutputPlot to the audioInputPlot
        var constraints = [plot.leadingAnchor.constraint(equalTo: audioInputPlot.leadingAnchor)]
        constraints.append(plot.trailingAnchor.constraint(equalTo: audioInputPlot.trailingAnchor))
        constraints.append(plot.topAnchor.constraint(equalTo: audioInputPlot.topAnchor))
        constraints.append(plot.bottomAnchor.constraint(equalTo: audioInputPlot.bottomAnchor))
        constraints.forEach { $0.isActive = true }
    }

    @objc func updatePlotUI() {
        if tracker.amplitude > 0.3 {
            let trackerFrequency = Float(tracker.frequency)

            guard trackerFrequency < 7_000 else {
                // This is a bit of hack because of modern Macbooks giving super high frequencies
                return
            }



            var frequency = trackerFrequency
            while frequency > Float(noteFrequencies[noteFrequencies.count - 1]) {
                frequency /= 2.0
            }
            while frequency < Float(noteFrequencies[0]) {
                frequency *= 2.0
            }

            var minDistance: Float = 10_000.0
            var index = 0

            for i in 0..<noteFrequencies.count {
                let distance = fabsf(Float(noteFrequencies[i]) - frequency)
                if distance < minDistance {
                    index = i
                    minDistance = distance
                }
                print(minDistance, distance)
            }
            //                let octave = Int(log2f(trackerFrequency / frequency))

            frequencyLabel.text = String(format: "%0.1f", tracker.frequency)

            if frequencyTranspose(note: notesToTanspose[index]) != droneLabel.text {
                momentaneNote = frequencyTranspose(note: notesToTanspose[index])
                droneLabel.text = momentaneNote
                stopSinglePlayer()
                DispatchQueue.main.asyncAfter(deadline: .now() + 0.03, execute: {
                    self.prepareSinglePlayerFirstForStart(note: self.momentaneNote)
                    self.startSinglePlayer()
                })
            }

        }
    }

    func frequencyTranspose(note: String) -> String {
        var indexNote = notesToTanspose.firstIndex(of: note)!
        let chosenInstrument = UserDefaults.standard.object(forKey: "whichInstrument") as! String
        if chosenInstrument == "Bb" {
            if indexNote + 2 >= notesToTanspose.count {
                indexNote -= 12
            }
            return notesToTanspose[indexNote + 2]
        } else if chosenInstrument == "Eb" {
            if indexNote - 3 < 0 {
                indexNote += 12
            }
            return notesToTanspose[indexNote - 3]
        } else {
            return note
        }
    }
EN

回答 1

Stack Overflow用户

发布于 2020-06-07 00:07:44

似乎可以通过将iOS的多线程原理付诸实践来略微改进您的实现。现在,我不是这方面的专家,但如果我们看一看这句话:“在频率检测后播放的声音在回放过程中有时会破裂”。

我想指出的是,“裂缝”的“频率”是随机的或不可预测的,这发生在计算过程中。

因此,将不需要在主线程中计算的代码移到后台线程(https://developer.apple.com/documentation/DISPATCH)

在重构时,您可以通过增加对Timer回调计算的调用频率来测试您的实现,例如,将该值减少到0.05。这意味着如果你把频率提高到0.2,你可能会听到更少的随机爆裂声。

现在,在考虑并发性时,这说起来容易做起来难,但这正是您需要改进的地方。

票数 0
EN
页面原文内容由Stack Overflow提供。腾讯云小微IT领域专用引擎提供翻译支持
原文链接:

https://stackoverflow.com/questions/62233803

复制
相关文章

相似问题

领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档