IOS语音录取
在IOS中,在做语音识别中,需要对语音进行抓取。
#import "GetAudioViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
#import <ImageIO/ImageIO.h>
#import <MobileCoreServices/MobileCoreServices.h>
#import <QuartzCore/QuartzCore.h>
@interface GetAudioViewController ()
{
AVAudioPlayer *_player;
AVAudioRecorder *_audiorecord;
NSTimer* _timerForPitch;
CAShapeLayer *_shapeLayer;
CADisplayLink* _displayLink;
__weak IBOutlet UIProgressView *_audioPower;
__weak IBOutlet UIButton *_record;
__weak IBOutlet UIButton *_pause;
__weak IBOutlet UIButton *_resume;
__weak IBOutlet UIButton *_stop;
__weak IBOutlet UIView *_viewForWave;
float Pitch;
NSInteger _recordEncoding;
CFTimeInterval _firstTimestamp;
NSInteger _loopCount;
}
@end
@implementation GetAudioViewController
- (void)viewDidLoad {
[super viewDidLoad];
}
-(void)cratePath:(NSString*)path
{
NSFileManager* filemanager = [NSFileManager defaultManager];
if(![filemanager fileExistsAtPath:path])
[filemanager createDirectoryAtPath:path
withIntermediateDirectories:YES
attributes:nil
error:nil];
}
- (UIBezierPath *)pathAtInterval:(NSTimeInterval) interval
{
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:CGPointMake(0, _viewForWave.bounds.size.height / 2.0)];
CGFloat fractionOfSecond = interval - floor(interval);
CGFloat yOffset = _viewForWave.bounds.size.height * sin(fractionOfSecond * M_PI * Pitch*8);
[path addCurveToPoint:CGPointMake(_viewForWave.bounds.size.width, _viewForWave.bounds.size.height / 2.0)
controlPoint1:CGPointMake(_viewForWave.bounds.size.width / 2.0, _viewForWave.bounds.size.height / 2.0 - yOffset)
controlPoint2:CGPointMake(_viewForWave.bounds.size.width / 2.0, _viewForWave.bounds.size.height / 2.0 + yOffset)];
return path;
}
- (void)addShapeLayer
{
_shapeLayer = [CAShapeLayer layer];
_shapeLayer.path = [[self pathAtInterval:2.0] CGPath];
_shapeLayer.fillColor = [[UIColor redColor] CGColor];
_shapeLayer.lineWidth = 1.0;
_shapeLayer.strokeColor = [[UIColor whiteColor] CGColor];
[_viewForWave.layer addSublayer:_shapeLayer];
}
- (void)handleDisplayLink:(CADisplayLink *)displayLink
{
if (!_firstTimestamp)
_firstTimestamp = displayLink.timestamp;
_loopCount++;
NSTimeInterval elapsed = (displayLink.timestamp - _firstTimestamp);
_shapeLayer.path = [[self pathAtInterval:elapsed] CGPath];
}
- (void)startDisplayLink
{
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(handleDisplayLink:)];
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
- (IBAction)recordClick:(id)sender {
_viewForWave.hidden = NO;
[self addShapeLayer];
[self startDisplayLink];
NSLog(@"startRecording");
_audiorecord = nil;
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryRecord error:nil];
NSMutableDictionary *recordSettings = [[NSMutableDictionary alloc] initWithCapacity:10];
if(_recordEncoding == 6)
{
[recordSettings setObject:[NSNumber numberWithInt: kAudioFormatLinearPCM] forKey: AVFormatIDKey];
[recordSettings setObject:[NSNumber numberWithFloat:44100.0] forKey: AVSampleRateKey];
[recordSettings setObject:[NSNumber numberWithInt:2] forKey:AVNumberOfChannelsKey];
[recordSettings setObject:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
[recordSettings setObject:[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsBigEndianKey];
[recordSettings setObject:[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsFloatKey];
}
else
{
NSNumber *formatObject;
switch (_recordEncoding) {
case 1:
formatObject = [NSNumber numberWithInt: kAudioFormatMPEG4AAC];
break;
case 2:
formatObject = [NSNumber numberWithInt: kAudioFormatAppleLossless];
break;
case 3:
formatObject = [NSNumber numberWithInt: kAudioFormatAppleIMA4];
break;
case 4:
formatObject = [NSNumber numberWithInt: kAudioFormatiLBC];
break;
case 5:
formatObject = [NSNumber numberWithInt: kAudioFormatULaw];
break;
default:
formatObject = [NSNumber numberWithInt: kAudioFormatAppleIMA4];
}
[recordSettings setObject:formatObject forKey: AVFormatIDKey];
[recordSettings setObject:[NSNumber numberWithFloat:44100.0] forKey: AVSampleRateKey];
[recordSettings setObject:[NSNumber numberWithInt:2] forKey:AVNumberOfChannelsKey];
[recordSettings setObject:[NSNumber numberWithInt:12800] forKey:AVEncoderBitRateKey];
[recordSettings setObject:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
[recordSettings setObject:[NSNumber numberWithInt: AVAudioQualityHigh] forKey: AVEncoderAudioQualityKey];
}
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *soundFilePath = [docsDir
stringByAppendingPathComponent:@"recordTest.caf"];
NSURL *url = [NSURL fileURLWithPath:soundFilePath];
NSError *error = nil;
_audiorecord = [[ AVAudioRecorder alloc] initWithURL:url settings:recordSettings error:&error];
_audiorecord.meteringEnabled = YES;
if ([_audiorecord prepareToRecord] == YES){
_audiorecord.meteringEnabled = YES;
[_audiorecord record];
_timerForPitch =[NSTimer scheduledTimerWithTimeInterval: 0.01 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
}else {
//int errorCode = CFSwapInt32HostToBig ([error code]);
//NSLog(@"Error: %@ [%4.4s])" , [error localizedDescription], (char*)&errorCode);
}
}
- (void)levelTimerCallback:(NSTimer *)timer {
[_audiorecord updateMeters];
// float linear = pow (10, [_audiorecord peakPowerForChannel:0] / 20);
float linear1 = pow (10, [_audiorecord averagePowerForChannel:0] / 20);
if (linear1>0.03) {
Pitch = linear1+.20;//pow (10, [audioRecorder averagePowerForChannel:0] / 20);//[audioRecorder peakPowerForChannel:0];
}
else {
Pitch = 0.0;
}
// //Pitch =linear1;
// NSLog(@"Pitch==%f",Pitch);
// _customRangeBar.value = Pitch;//linear1+.30;
[_audioPower setProgress:Pitch];
// float minutes = floor(_audiorecord.currentTime/60);
// float seconds = _audiorecord.currentTime - (minutes * 60);
// NSString *time = [NSString stringWithFormat:@"%0.0f.%0.0f",minutes, seconds];
// [self.statusLabel setText:[NSString stringWithFormat:@"%@ sec", time]];
// NSLog(@"recording");
}
- (IBAction)pauseClick:(id)sender {
NSLog(@"stopRecording");
// kSeconds = 0.0;
_viewForWave.hidden = YES;
[_audiorecord stop];
[self stopDisplayLink];
_shapeLayer.path = [[self pathAtInterval:0] CGPath];
[_timerForPitch invalidate];
_timerForPitch = nil;
}
- (void)stopDisplayLink
{
[_displayLink invalidate];
_displayLink = nil;
}
- (IBAction)resumeClick:(id)sender {
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *soundFilePath = [docsDir stringByAppendingPathComponent:@"recordTest.caf"];
NSURL *url = [NSURL fileURLWithPath:soundFilePath];
NSError *error;
_player = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
_player.numberOfLoops = 0;
[_player play];
}
- (IBAction)stopClick:(id)sender {
[_player stop];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
代码全部在这里了。
IOS语音录取的更多相关文章
- iOS语音
<span style="white-space:pre"> </span>语音技术近来可是出遍了风头,从iphone4s的siri,到微信的语音聊天 ...
- IOS语音集成
1.注册讯飞账号,申请APPID(注意选择IOS平台) 2.加载所需要的类库 3.导入所需要的类库文件头 4.调用申请的APPID以及所需函数,完成语音合成(需要参考官方给出的SDK文件) 详细步 ...
- iOS - 语音云通讯
iOS SDK 2.0 语音及图片消息详解本文档将详细介绍融云的语音及图片消息接口功能及使用说明.阅读本文前,我们假设您已经阅读了融云 iOS 开发指南,并掌握融云 SDK 的基本用法. 语音消息用来 ...
- iOS语音播报文字
记得大学的时候学微软Window Phone时,有语音识别类似苹果的嘿,Siri.今天无聊百度搜了一下,搜到苹果语音播报文字.自己试了下还挺好玩. 1.引入框架#import <AVFounda ...
- iOS 语音朗读
//判断版本大于7.0 if ([[[UIDevice currentDevice] systemVersion] integerValue] >= 7.0) { NSStr ...
- ios语音输入崩溃
游戏中任何可以输入的地方,只要调用语音输入,必然会导致app崩溃,解决方法如下: ok, so essentially the gist of it is that siri wants gl con ...
- iOS语音播放之切换听筒和扬声器的方法解决方案
关于流媒体播放的相关知识可以加本人QQ:564702640 一起来讨论 [[UIDevice currentDevice] setProximityMonitoringEnabled:YES]; // ...
- iOS语音通话(语音对讲)
中间参考了别人的Demo,下载地址不记得了. 因为项目需要做一个语音对讲功能,其实说白了就是类似QQ的语音通话,但是资料少之又少,研究了好久,才跟同事弄出一个粗略的版本.我记性不好,所以来记录一下,也 ...
- 使用Olami SDK 语音控制一个支持HomeKit的智能家居的iOS程序
前言 HomeKit是苹果发布的智能家居平台.通过HomeKit组件,用户可以通过iphone.iPad和ipod Touch来控制智能灯泡,风扇.空调等支持HomeKit的智能家居,尤其是可以通过S ...
随机推荐
- ThinkPhp知识大全(非常详细)
php框架 一.真实项目开发步骤: 多人同时开发项目,协作开发项目.分工合理.效率有提高(代码风格不一样.分工不好) 测试阶段 上线运行 对项目进行维护.修改.升级(单个人维护项目,十分困难,代码风格 ...
- 【Unity优化】构建一个拒绝GC的List
版权声明:本文为博主原创文章,欢迎转载.请保留博主链接:http://blog.csdn.net/andrewfan 上篇文章<[Unity优化]Unity中究竟能不能使用foreach?> ...
- js实现ajax的post请求步骤
post请求步骤与前篇的get请求步骤差别不大,只是增加了 xhr.setRequestHeader("Content-type","application/x-www- ...
- PowerDesigner建模应用(一)逆向工程,配置数据源并导出PDM文件
物理数据模型(Physical Data Model)PDM,提供了系统初始设计所需要的基础元素,以及相关元素之间的关系:数据库的物理设计阶段必须在此基础上进行详细的后台设计,包括数据库的存储过程.操 ...
- PTVS在Visual Studio中的安装
下载链接,点这里 PTVS是VS下的python开发插件 1.下载完成后,双击运行,安装完毕 2.解释脚本:打开VS,找到文件-新建-项目,在新建项目页面的左侧树形菜单的已安装->模板-> ...
- Oralce Plsql 中文显示乱码问题无需修改注册表完美解决
此方法在其它版系统也可以解决 win10-64 plsql oracle 11g绿色版客户端 错误现象: 因为oracle用的是绿色版,无法设置注册表,尝试以下方法. 解决方法: 1.在plsql中查 ...
- ###服务(Service)
Start服务开启方式 1) 创建服务 public class MyService extends Service 2) 添加注册表 <service android:name=&qu ...
- java中的对象
对象 --计算机语言中的对象 通常,我们可以从一般事物的三个方面,去认识事物: 一.是什么? 二.为什么? 三.怎么样? 接下来,我们也利用这三个方面的思维,去 ...
- 使用Struts2校验器
今天遇到了这样的问题:我的jsp页面.web.xml.struts.xml.UserAction-validation.xml等内容写的都正确,就是无法使用校验器!在网上找了半天就是不出来,迫不得已我 ...
- iOS开发之CALayer
1. 概述 在iOS中,你能看得见摸得着的东西基本上都是UIView,比如一个按钮.一个文本标签.一个文本输入框.一个图标等等,这些都是UIView,其实UIView之所以能显示在屏幕 ...