iPhone microphone输入事件捕获

本文介绍如何使用AudioSession和AudioQueue实现麦克风输入的声音捕捉,并获取声音峰值数据进行实时监测。


目的:
利用麦克风做为一种事件的输入方式
核心:
通过AudioSession与AudioQueue实现麦克风输入的数据捕捉.

开启AudioSession:
1.    AudioSessionInitialize
2.    AudioSessionSetProperty(kAudioSessionProperty_AudioCategory)
3.    AudioSessionSetActive

建立声音格式:
1.    声音格式的数据结构AudioStreamBasicDescription
2.    使用kAudioFormatLinearPCM来做为声音格式

建立AudioQueue:
1.    AudioQueueNewInput
2.    AudioQueueStart
3.    AudioQueueSetProperty(kAudioQueueProperty_EnableLevelMetering)

获取声音峰值数据:
1.    记录峰值的数据结构AudioQueueLevelMeterState
2.    AudioQueueGetProperty(kAudioQueueProperty_CurrentLevelMeterDB)

关闭AudioQueue:
1.    AudioQueueStop
2.    AudioQueueDispose

代码:


#import <UIKit/UIKit.h>
#include <AudioToolbox/AudioToolbox.h>

@interface MicrophoneTestViewController : UIViewController {

    IBOutlet UILabel*    _averagePower;
    IBOutlet UILabel*    _peakPower;

    AudioQueueRef                mQueue;
    AudioStreamBasicDescription    mFormat;
    AudioQueueLevelMeterState    *_chan_lvls;
    NSArray                        *_channelNumbers;
}

-(void)setChannelNumbers:(NSArray *)v;
-(void)initAudioSession;

- (IBAction)startstop: (id) sender;

@end
[/code]

[code]
#import "MicrophoneTestViewController.h"

static void MyInputBufferHandler(void *                                    inUserData,
                                 AudioQueueRef                            inAQ,
                                 AudioQueueBufferRef                    inBuffer,
                                 const AudioTimeStamp *                    inStartTime,
                                 UInt32                                    inNumPackets,
                                 const AudioStreamPacketDescription*    inPacketDesc)
{
    // 如果要记录声音,可以在这里做记录处理.
    // 如果要分析声音数据,可以在这里做记录处理.
}

static void interruptionListener(void *    inClientData,
                                 UInt32    inInterruptionState)
{
    // 声音中断通知(BEGIN,END)
}

@implementation MicrophoneTestViewController

// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
    [super viewDidLoad];

    _averagePower.text = @"0";
    _peakPower.text = @"0";
    mQueue = NULL;
    _channelNumbers = [[NSArray alloc] initWithObjects:[NSNumber numberWithInt:0], nil];
    _chan_lvls = (AudioQueueLevelMeterState*)malloc(sizeof(AudioQueueLevelMeterState) * [_channelNumbers count]);

    [self initAudioSession];

    [NSTimer 
     scheduledTimerWithTimeInterval:1.f/30.f
     target:self 
     selector:@selector(_refresh) 
     userInfo:nil 
     repeats:YES
     ];
}

- (void)didReceiveMemoryWarning {
    // Releases the view if it doesn't have a superview.
    [super didReceiveMemoryWarning];

    // Release any cached data, images, etc that aren't in use.
}

- (void)viewDidUnload {
    // Release any retained subviews of the main view.
    // e.g. self.myOutlet = nil;
    [_channelNumbers release];
    free(_chan_lvls);
}


- (void)dealloc {
    [super dealloc];
}

-(void)initAudioSession
{
    OSStatus error = AudioSessionInitialize(NULL, NULL, interruptionListener, self);
    if (error) printf("ERROR INITIALIZING AUDIO SESSION! %d\n", (int)error);
    else 
    {
        UInt32 category = kAudioSessionCategory_PlayAndRecord;    
        error = AudioSessionSetProperty(kAudioSessionProperty_AudioCategory, sizeof(category), &category);
        if (error) printf("couldn't set audio category!");

        error = AudioSessionSetActive(true);
        if (error) printf("AudioSessionSetActive (true) failed");
    }
}

-(void)setupAudioFormat:(UInt32)inFormatID
{
    memset(&mFormat, 0, sizeof(mFormat));

    UInt32 size = sizeof(mFormat.mSampleRate);
    OSStatus result = AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareSampleRate,
                            &size, 
                            &mFormat.mSampleRate);

    size = sizeof(mFormat.mChannelsPerFrame);
    result = AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareInputNumberChannels, 
                            &size, 
                            &mFormat.mChannelsPerFrame);

    mFormat.mFormatID = inFormatID;
    if (inFormatID == kAudioFormatLinearPCM)
    {
        // if we want pcm, default to signed 16-bit little-endian
        mFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
        mFormat.mBitsPerChannel = 16;
        mFormat.mBytesPerPacket = mFormat.mBytesPerFrame = (mFormat.mBitsPerChannel / 8) * mFormat.mChannelsPerFrame;
        mFormat.mFramesPerPacket = 1;
    }
}

-(void)startMicrophone
{
    [self setupAudioFormat:kAudioFormatLinearPCM];
    OSStatus result = AudioQueueNewInput(&mFormat, MyInputBufferHandler, NULL, NULL, NULL, 0, &mQueue);
    if (result == noErr) {
        result = AudioQueueStart(mQueue, NULL);
        if (result == noErr) {
            UInt32 val = 1;
            AudioQueueSetProperty(mQueue, kAudioQueueProperty_EnableLevelMetering, &val, sizeof(UInt32));

            if (mFormat.mChannelsPerFrame != [_channelNumbers count])
            {
                NSArray *chan_array;
                if (mFormat.mChannelsPerFrame < 2)
                    chan_array = [[NSArray alloc] initWithObjects:[NSNumber numberWithInt:0], nil];
                else
                    chan_array = [[NSArray alloc] initWithObjects:[NSNumber numberWithInt:0], [NSNumber numberWithInt:1], nil];

                [self setChannelNumbers:chan_array];
                [chan_array release];

                _chan_lvls = (AudioQueueLevelMeterState*)realloc(_chan_lvls, mFormat.mChannelsPerFrame * sizeof(AudioQueueLevelMeterState));
            }

            return;
        }
    }

    // 失败
    mQueue = NULL;
    NSLog(@"startMicrophone:失败.");
    return;
}

-(void)stopMicrophone
{
    if (mQueue) {
        AudioQueueStop(mQueue, true);
        AudioQueueDispose(mQueue, true);
        mQueue = NULL;
    }
}

-(void)_refresh
{
    if (mQueue) {
        UInt32 data_sz = sizeof(AudioQueueLevelMeterState) * [_channelNumbers count];
        OSErr status = AudioQueueGetProperty(mQueue, kAudioQueueProperty_CurrentLevelMeterDB, _chan_lvls, &data_sz);
        if (status == noErr)
        {
            // 这里没有去处理多个通道的数据显示,直接就显示最后一个通道的结果了
            // 这里的值就是我们打算用来做为一些触发机制的值了,需要用到的时候直接访问_chan_lvls这个数组
            for (int i=0; i<[_channelNumbers count]; i++)
            {
                NSInteger channelIdx = [(NSNumber *)[_channelNumbers objectAtIndex:i] intValue];
                if (channelIdx < [_channelNumbers count] && channelIdx <= 127)
                {
                    _averagePower.text = [NSString stringWithFormat:@"%f", _chan_lvls[channelIdx].mAveragePower];
                    _peakPower.text = [NSString stringWithFormat:@"%f", _chan_lvls[channelIdx].mPeakPower];
                }
            }
        }
    }
}

-(void)setChannelNumbers:(NSArray *)v
{
    [v retain];
    [_channelNumbers release];
    _channelNumbers = v;
}

- (IBAction)startstop: (id) sender
{
    if (mQueue) {
        [self stopMicrophone];
    } else {
        [self startMicrophone];
    }
}

@end

static async fetchDevices(onSuccess, onError) { try { /** * 火狐浏览器不支持这种获取mic和camera的方式,改为调用声网原生方法 */ // var userAgent = navigator.userAgent.toLowerCase(); // 火狐或者H5直接获取设备列表 if (userAgent.indexOf("firefox") > -1 || userAgent.indexOf("android") > -1 || userAgent.indexOf("iphone") > -1) { _js_AgoraRTC_N_production__WEBPACK_IMPORTED_MODULE_2___default.a.getDevices().then(devices => { _logger_helper__WEBPACK_IMPORTED_MODULE_0__["default"].uploadLog( _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_INFO, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Result_Success + '|0|0', '0', JSON.stringify(devices), '获取设备信息', _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_sourceType, '015' ); if (devices.length !== 0) { const mic = []; const camera = []; const outMic = []; devices.forEach(device => { if (device.kind === 'audioinput') { mic.push(device); } else if (device.kind === 'videoinput') { camera.push(device); } else if (device.kind === 'audiooutput') { outMic.push(device); } }); onSuccess({ mic, camera, outMic }); } else { onSuccess({ mic, camera, outMic }); } }).catch(err => { _logger_helper__WEBPACK_IMPORTED_MODULE_0__["default"].uploadErr( _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_ERROR, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Result_failed + '|fetch device|0', '0', err, '获取火狐设备异常', _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_sourceType, '015' ); onError(err); }); } else { const result1 = await navigator.permissions.query({ name: 'microphone' }); const result2 = await navigator.permissions.query({ name: 'camera' }); // if (result1.state != 'denied' && result2.state !='denied') { const devices = await Devices.enumerateDevices(); let hasMic = devices.mic.length !== 0; let hasCamera = devices.camera.length !== 0; if (result1.state != 'granted' || result2.state != 'granted') { // 如果有一个权限不是允许,重新调用获取授权 if (result1.state == 'denied') { hasMic = false; } if (result2.state == 'denied') { hasCamera = false; } if (hasCamera || hasMic) { const stream = await navigator.mediaDevices.getUserMedia({ audio: hasMic, video: hasCamera }); } } await _js_AgoraRTC_N_production__WEBPACK_IMPORTED_MODULE_2___default.a.getDevices().then(devicesInfo => { _logger_helper__WEBPACK_IMPORTED_MODULE_0__["default"].uploadLog( _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_INFO, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Result_Success + '|0|0', '0', JSON.stringify(devicesInfo), '获取设备信息', _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_sourceType, '015' ); if (devicesInfo.length !== 0) { const mic = []; const camera = []; const outMic = []; devicesInfo.forEach(device => { if (device.kind === 'audioinput') { mic.push(device); } else if (device.kind === 'videoinput') { camera.push(device); } else if (device.kind === 'audiooutput') { outMic.push(device); } }); onSuccess({ mic, camera, outMic }); } else { onSuccess({ mic, camera, outMic }); } }).catch(err => { _logger_helper__WEBPACK_IMPORTED_MODULE_0__["default"].uploadErr( _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_ERROR, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Result_failed + '|fetch device|0', '0', err, '获取设备异常', _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_sourceType, '015' ); onError(err); }); } } catch (err) { _logger_helper__WEBPACK_IMPORTED_MODULE_0__["default"].uploadErr( _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_ERROR, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Result_failed + '|fetch device|0', '0', err, '获取设备异常', _constant__WEBPACK_IMPORTED_MODULE_1__["default"].Log_sourceType, '015' ); onError(err); } }这个代码会在js的子线程中运行吗? 会导致app闪退吗?
07-24
static async fetchDevices(onSuccess, onError) { try { /**\rn * 火狐浏览器不支持这种获取mic和camera的方式,改为直接调用声网原生方法rn */ // var userAgent = navigator.userAgent.toLowerCase();\rnrn // 火狐或者H5直接获取设列表rn if (userAgent.indexOf(\"firefox\") > -1 || userAgent.indexOf(\"android\") > -1 || userAgent.indexOf(\"iphone\") > -1) {\rn _js_AgoraRTC_N_production__WEBPACK_IMPORTED_MODULE_2___default.a.getDevices().then(devices => {\rn _logger_helper__WEBPACK_IMPORTED_MODULE_0__[\"default\"].uploadLog(_constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_INFO, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Result_Success + '|0|0', '0', JSON.stringify(devices), '获取设备信息', _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_sourceType, '015');\rn if (devices.length !== 0) {\rn const mic = [];\rn const camera = [];\rn const outMic = [];\rn devices.forEach(device => {\rn if (device.kind === 'audioinput') {\rn mic.push(device);\rn } else if (device.kind === 'videoinput') {\rn camera.push(device);\rn } else if (device.kind === 'audiooutput') {\rn outMic.push(device);\rn }\rn });\rn onSuccess({\rn mic,\rn camera,\rn outMicrn });\rn } else {\rn onSuccess({\rn mic,\rn camera,\rn outMicrn });\rn }\rn }).catch(err => {\rn _logger_helper__WEBPACK_IMPORTED_MODULE_0__[\"default\"].uploadErr(_constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_ERROR, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Result_failed + '|fetch device|0', '0', err, '获取火狐设备异常', _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_sourceType, '015');\rn onError(err);\rn });\rn } else {\rn const result1 = await navigator.permissions.query({\rn name: 'microphone'\rn });\rn const result2 = await navigator.permissions.query({\rn name: 'camera'\rn });\rn // if (result1.state != 'denied' && result2.state !='denied') {\rn const devices = await Devices.enumerateDevices();\rn let hasMic = devices.mic.length !== 0;\rn let hasCamera = devices.camera.length !== 0;\rn if (result1.state != 'granted' || result2.state != 'granted') {\rn // 如果有一个权限不是允许,重新调用获取授权rn if (result1.state == 'denied') {\rn hasMic = false;\rn }\rn if (result2.state == 'denied') {\rn hasCamera = false;\rn }\rn if (hasCamera || hasMic) {\rn const stream = await navigator.mediaDevices.getUserMedia({\rn audio: hasMic,\rn video: hasCamerarn });\rn }\rn }\rn await _js_AgoraRTC_N_production__WEBPACK_IMPORTED_MODULE_2___default.a.getDevices().then(devicesInfo => {\rn _logger_helper__WEBPACK_IMPORTED_MODULE_0__[\"default\"].uploadLog(_constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_INFO, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Result_Success + '|0|0', '0', JSON.stringify(devicesInfo), '获取设备信息', _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_sourceType, '015');\rn if (devicesInfo.length !== 0) {\rn const mic = [];\rn const camera = [];\rn const outMic = [];\rn devicesInfo.forEach(device => {\rn if (device.kind === 'audioinput') {\rn mic.push(device);\rn } else if (device.kind === 'videoinput') {\rn camera.push(device);\rn } else if (device.kind === 'audiooutput') {\rn outMic.push(device);\rn }\rn });\rn onSuccess({\rn mic,\rn camera,\rn outMicrn });\rn } else {\rn onSuccess({\rn mic,\rn camera,\rn outMicrn });\rn }\rn }).catch(err => {\rn _logger_helper__WEBPACK_IMPORTED_MODULE_0__[\"default\"].uploadErr(_constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_ERROR, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Result_failed + '|fetch device|0', '0', err, '获取设备异常', _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_sourceType, '015');\rn onError(err);\rn });\rn }\rn } catch (err) {\rn _logger_helper__WEBPACK_IMPORTED_MODULE_0__[\"default\"].uploadErr(_constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_ERROR, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Normal_Type, _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Result_failed + '|fetch device|0', '0', err, '获取设备异常', _constant__WEBPACK_IMPORTED_MODULE_1__[\"default\"].Log_sourceType, '015');\rn onError(err);\rn }\rn }\rnrn static async enumerateDevices() {\rn const devices = await navigator.mediaDevices.enumerateDevices();\rn const mic = [];\rn const camera = [];\rn devices.forEach(device => {\rn if (device.kind === 'audioinput') {\rn mic.push(device);\rn } else if (device.kind === 'videoinput') {\rn camera.push(device);\rn }\rn });\rn return {\rn mic,\rn camerarn };\rn }\rnrn /**\rn * 获取系统版本信息rn */\rn static async getUserAgent() {\rn // var userAgent = navigator.userAgent.toLowerCase();\rn // console.log('userAgent:', userAgent);\rn let name;\rn // var version = \"Unknown\";\rn if (userAgent.indexOf('win') > -1) {\rn if (userAgent.indexOf('windows nt 5.0') > -1) {\rn name = 'Windows 2000';\rn } else if (userAgent.indexOf('windows nt 5.1') > -1 || userAgent.indexOf('windows nt 5.2') > -1) {\rn name = 'Windows XP';\rn } else if (userAgent.indexOf('windows nt 6.0') > -1) {\rn name = 'Windows Vista';\rn } else if (userAgent.indexOf('windows nt 6.1') > -1 || (userAgent.indexOf('windows 7') > -1)) {\rn name = 'Windows 7';\rn } else if (userAgent.indexOf('windows nt 6.2') > -1 || (userAgent.indexOf('windows 8') > -1)) {\rn name = 'Windows 8';\rn } else if (userAgent.indexOf('windows nt 6.3') > -1) {\rn name = 'Windows 8.1';\rn } else if (userAgent.indexOf('windows nt 6.2') > -1 || (userAgent.indexOf('windows nt 10.0') > -1)) {\rn name = 'Windows 10';\rn } else {\rn name = 'Unknown';\rn }\rn } else if (userAgent.indexOf('iphone') > -1) {\rn name = \"Iphone\";\rn } else if (userAgent.indexOf('mac') > -1) {\rn name = \"Mac\";\rn } else if (userAgent.indexOf('x11') > -1 || userAgent.indexOf('unix') > -1 || userAgent.indexOf('sunname') > -1 || userAgent.indexOf('bsd') > -1) {\rn name = \"Unix\";\rn } else if (userAgent.indexOf('linux') > -1) {\rn name = \"Linux\";\rn } else {\rn name = \"Unknown\";\rn }\rn return {\rn namern };\rn }\rnrn /**\rn * 获取浏览器版本信息rn */\rnrn static async getBrowser() {\rn const sys = {};\rn const ua = userAgent;\rn let s;\rn (s = ua.match(/edge\\/([\\d.]+)/)) ?\rn (sys.edge = s[1]) :\rn (s = ua.match(/rv:([\\d.]+)\\) like gecko/)) ?\rn (sys.ie = s[1]) :\rn (s = ua.match(/msie ([\\d.]+)/)) ?\rn (sys.ie = s[1]) :\rn (s = ua.match(/firefox\\/([\\d.]+)/)) ?\rn (sys.firefox = s[1]) :\rn (s = ua.match(/tbs\\/([\\d]+)/)) ?\rn (sys.tbs = s[1]) :\rn (s = ua.match(/xweb\\/([\\d]+)/)) ?\rn (sys.xweb = s[1]) :\rn (s = ua.match(/chrome\\/([\\d.]+)/)) ?\rn (sys.chrome = s[1]) :\rn (s = ua.match(/opera.([\\d.]+)/)) ?\rn (sys.opera = s[1]) :\rn (s = ua.match(/version\\/([\\d.]+).*safari/)) ?\rn (sys.safari = s[1]) :\rn 0;\rnrn if (sys.xweb) {\rn return {\rn browser: 'webView XWEB',\rn version: ''\rn };\rn }\rn if (sys.tbs) {\rn return {\rn browser: 'webView TBS',\rn version: ''\rn };\rn }\rn if (sys.edge) {\rn return {\rn browser: 'Edge',\rn version: sys.edgern };\rn }\rn if (sys.ie) {\rn return {\rn browser: 'IE',\rn version: sys.iern };\rn }\rn if (sys.firefox) {\rn return {\rn browser: 'Firefox',\rn version: sys.firefoxrn };\rn }\rn if (sys.chrome) {\rn return {\rn browser: 'Chrome',\rn version: sys.chromern };\rn }\rn if (sys.opera) {\rn return {\rn browser: 'Opera',\rn version: sys.operarn };\rn }\rn if (sys.safari) {\rn return {\rn browser: 'Safari',\rn version: sys.safarirn };\rn }\rnrn return {\rn browser: '',\rn version: '0'\rn };\rn }\rnrn static getOsInfo() {\rn return userAgent;\rn }\rn} 这个代码帮我格式化一下
07-24
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值