如何使用swift在ios上监控音频输入-示例?

disbfnqx  于 2023-10-15  发布在  Swift
关注(0)|答案(3)|浏览(165)

我想写一个简单的应用程序,'做一些事情'时,在麦克风的声音水平达到一定的水平,显示音频输入水平的额外学分
在swift中找不到任何这样的例子--不想记录,只想监视
我一直在查看AVFoundation类的文档,但无法启动
谢谢

j8ag8udp

j8ag8udp1#

让你可以使用下面的代码:

func initalizeRecorder ()
 {
    do {

        try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord)
        try AVAudioSession.sharedInstance().setActive(true)

    }catch{
        print(error);
    }

    let stringDir:NSString = self.getDocumentsDirectory();
    let audioFilename = stringDir.stringByAppendingPathComponent("recording.m4a")
    let audioURL = NSURL(fileURLWithPath: audioFilename)
    print("File Path : \(audioFilename)");

    // make a dictionary to hold the recording settings so we can instantiate our AVAudioRecorder

    let settings = [
        AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
        AVSampleRateKey: 12000.0,
        AVNumberOfChannelsKey: 1 as NSNumber,
        AVEncoderBitRateKey:12800 as NSNumber,
        AVLinearPCMBitDepthKey:16 as NSNumber,
        AVEncoderAudioQualityKey: AVAudioQuality.High.rawValue
    ]


do {
        if audioRecorder == nil
        {
            audioRecorder = try AVAudioRecorder(URL: audioURL, settings: settings )
            audioRecorder!.delegate = self
            audioRecorder!.prepareToRecord();
            audioRecorder!.meteringEnabled = true;
        }
        audioRecorder!.recordForDuration(NSTimeInterval(5.0));
     } catch {
        print("Error")
    }

 }

//GET DOCUMENT DIR PATH
 func getDocumentsDirectory() -> String {
    let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
    let documentsDirectory = paths[0]
    return documentsDirectory
}
////START RECORDING
@IBAction func btnStartPress(sender: AnyObject) {

    recordingSession = AVAudioSession.sharedInstance()
    do {

        recordingSession.requestRecordPermission() { [unowned self] (allowed: Bool) -> Void in
            dispatch_async(dispatch_get_main_queue()) {
                if allowed {
                 print("Allowd Permission Record!!")
                    self.initalizeRecorder ()
                    self.audioRecorder!.record()

                    //instantiate a timer to be called with whatever frequency we want to grab metering values
                    self.levelTimer = NSTimer.scheduledTimerWithTimeInterval(0.02, target: self, selector: Selector("levelTimerCallback"), userInfo: nil, repeats: true)

                } else {
                    // failed to record!
                    self.showPermissionAlert();
                    print("Failed Permission Record!!")
                }
            }
        }
    } catch {
        // failed to record!
        print("Failed Permission Record!!")
    }

}

    //This selector/function is called every time our timer (levelTime) fires
func levelTimerCallback() {
    //we have to update meters before we can get the metering values
    if audioRecorder != nil
    {
        audioRecorder!.updateMeters()

        let ALPHA : Double = 0.05;
        let peakPowerForChannel : Double = pow(Double(10.0), (0.05) * Double(audioRecorder!.peakPowerForChannel(0)));
        lowPassResults = ALPHA * peakPowerForChannel + Double((1.0) - ALPHA) * lowPassResults;
        print("low pass res = \(lowPassResults)");
        if (lowPassResults > 0.7 ){
           print("Mic blow detected");
           }
       }

 }
     //STOP RECORDING
    @IBAction func btnStopPress(sender: AnyObject) {

            if audioRecorder != nil
            {
                audioRecorder!.stop()
                self.levelTimer.invalidate()
            }

        }
uidvcgyl

uidvcgyl2#

AVAudioRecorder中,您可以“录制音频”(您不必保存它)并将meteringEnabled设置为使用函数peakPowerForChannel(_:)
它将
返回给定通道的峰值功率,以分贝为单位,用于记录声音。
This link may provide a sample code
如果对你有帮助就告诉我。

lymnna71

lymnna713#

@JayMehta的代码,更新到Swift 5.9(iOS 13到16)

import UIKit
import AVFoundation
import DGCharts

struct Constants {
    static let sampleTime = 0.02  // time between peak power calculations - seconds
    static let filterTimeConstant = 0.4  // time to reach 63% of input - seconds
    static let ALPHA = 1 - exp(-sampleTime / filterTimeConstant)  // zero-order hold low-pass filter coefficient
}

class ViewController: UIViewController, AVAudioRecorderDelegate {
    
    var audioRecorder: AVAudioRecorder?
    var levelTimer = Timer()
    var lowPassResults = 0.0
    
    func initializeRecorder() {
        do {
            try AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord)
            try AVAudioSession.sharedInstance().setActive(true)
        } catch {
            print(error)
        }
        
        let stringDir = NSString(string: self.getDocumentsDirectory())
        let audioFilename = stringDir.appendingPathComponent("recording.m4a")
        let audioURL = URL(fileURLWithPath: audioFilename)
        print("File Path: \(audioFilename)")
        
        // make a dictionary to hold the recording settings so we can instantiate our AVAudioRecorder
        let settings: [String : Any] = [
            AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
            AVSampleRateKey: 12000.0,
            AVNumberOfChannelsKey: 1 as NSNumber,
            AVEncoderBitRateKey: 12800 as NSNumber,
            AVLinearPCMBitDepthKey: 16 as NSNumber,
            AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
        ]
        
        do {
            if audioRecorder == nil {
                audioRecorder = try AVAudioRecorder(url: audioURL, settings: settings)
                audioRecorder!.delegate = self
                audioRecorder!.isMeteringEnabled = true
                audioRecorder!.prepareToRecord()
            }
            audioRecorder!.record(forDuration: TimeInterval(5.0))
        } catch {
            print("Error")
        }
    }
    
    // GET DOCUMENT DIR PATH
    func getDocumentsDirectory() -> String {
        let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }
    
    // START RECORDING
    @IBAction func btnStartPress(sender: AnyObject) {
        
//        AVAudioApplication.requestRecordPermission() { [unowned self] allowed in // iOS 17+
        AVAudioSession.sharedInstance().requestRecordPermission() { [unowned self] allowed in
            if allowed {
                print("Allowed permission to record!")
                initializeRecorder()
                audioRecorder!.record()
                
                // instantiate a timer to be called with whatever frequency we want to grab metering values
                levelTimer = Timer.scheduledTimer(timeInterval: Constants.sampleTime, target: self, selector: #selector(levelTimerCallback), userInfo: nil, repeats: true)
                
            } else {
                // failed to record!
//                showPermissionAlert()
                print("Failed permission to record!")
            }
        }
    }
    
    // This selector/function is called every time our timer (levelTime) fires
    @objc func levelTimerCallback() {
        // we have to update meters before we can get the metering values
        audioRecorder!.updateMeters()
        
        let peakPowerForChannel = pow(10, 0.05 * Double(audioRecorder!.peakPower(forChannel: 0)))
        lowPassResults = Constants.ALPHA * peakPowerForChannel + (1 - Constants.ALPHA) * lowPassResults
        print("low pass results = \(lowPassResults)")
        if lowPassResults > 0.7 {
            print("threshold exceeded")
        }
    }
    
    // STOP RECORDING
    @IBAction func btnStopPress(sender: AnyObject) {
        audioRecorder!.stop()
        levelTimer.invalidate()
    }
}

您必须将此键添加到info.plist,以请求用户允许使用麦克风:

| 关键|类型|值|
| --|--|--|
| 隐私-麦克风使用说明|字符串|此应用程序需要使用麦克风。|

功率图

我创建了一个显示峰值功率和低通滤波的图,以更好地理解发生了什么。在录音过程中我拍了三次手。注意它是如何在声音消失后保持峰值功率约一秒钟,然后衰减回背景噪音。

相关问题