1.

AQRecorder

    mRecordFormat.mFormatID = inFormatID;
if (inFormatID == kAudioFormatLinearPCM)
{
// if we want pcm, default to signed 16-bit little-endian
mRecordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
mRecordFormat.mBitsPerChannel = ;
mRecordFormat.mBytesPerPacket = mRecordFormat.mBytesPerFrame = (mRecordFormat.mBitsPerChannel / ) * mRecordFormat.mChannelsPerFrame;
mRecordFormat.mFramesPerPacket = ;
} // else {
//
// mRecordFormat.mSampleRate = 44100;//8000.0;//44100.0;
//
// mRecordFormat.mFormatID = kAudioFormatMPEG4AAC; // kAudioFormatMPEG4AAC_HE does not work. Can't find `AudioClassDescription`. `mFormatFlags` is set to 0.
// mRecordFormat.mFormatFlags = kMPEG4Object_AAC_LC; // Format-specific flags to specify details of the format. Set to 0 to indicate no format flags. See “Audio Data Format Identifiers” for the flags that apply to each format.
// mRecordFormat.mBytesPerPacket = 0; // The number of bytes in a packet of audio data. To indicate variable packet size, set this field to 0. For a format that uses variable packet size, specify the size of each packet using an AudioStreamPacketDescription structure.
// mRecordFormat.mFramesPerPacket = 0; // The number of frames in a packet of audio data. For uncompressed audio, the value is 1. For variable bit-rate formats, the value is a larger fixed number, such as 1024 for AAC. For formats with a variable number of frames per packet, such as Ogg Vorbis, set this field to 0.
// mRecordFormat.mBytesPerFrame = 0; // The number of bytes from the start of one frame to the start of the next frame in an audio buffer. Set this field to 0 for compressed formats. ...
// mRecordFormat.mChannelsPerFrame = 1; // The number of channels in each frame of audio data. This value must be nonzero.
// mRecordFormat.mBitsPerChannel = 0; // ... Set this field to 0 for compressed formats.
// mRecordFormat.mReserved = 0; // Pads the structure out to force an even 8-byte alignment. Must be set to 0.
// }
        url = CFURLCreateWithString(kCFAllocatorDefault, (CFStringRef)inRecordFile, NULL);

        // create the audio file
OSStatus status = AudioFileCreateWithURL(url, kAudioFileCAFType, &mRecordFormat, kAudioFileFlags_EraseFile, &mRecordFile);
CFRelease(url);
// ____________________________________________________________________________________
// AudioQueue callback function, called when an input buffers has been filled.
void AQRecorder::MyInputBufferHandler( void * inUserData,
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp * inStartTime,
UInt32 inNumPackets,
const AudioStreamPacketDescription* inPacketDesc)
{
AQRecorder *aqr = (AQRecorder *)inUserData;
try {
if (inNumPackets > ) {
// write packets to file
XThrowIfError(AudioFileWritePackets(aqr->mRecordFile, FALSE, inBuffer->mAudioDataByteSize,
inPacketDesc, aqr->mRecordPacket, &inNumPackets, inBuffer->mAudioData),
"AudioFileWritePackets failed");
aqr->mRecordPacket += inNumPackets;
} // if we're not stopping, re-enqueue the buffe so that it gets filled again
if (aqr->IsRunning())
XThrowIfError(AudioQueueEnqueueBuffer(inAQ, inBuffer, , NULL), "AudioQueueEnqueueBuffer failed");
} catch (CAXException e) {
char buf[];
fprintf(stderr, "Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
}
}

AQPlayer

void AQPlayer::SetupNewQueue()
{
XThrowIfError(AudioQueueNewOutput(&mDataFormat, AQPlayer::AQBufferCallback, this,
CFRunLoopGetCurrent(), kCFRunLoopCommonModes, , &mQueue), "AudioQueueNew failed"); ... void AQPlayer::AQBufferCallback(void * inUserData,
AudioQueueRef inAQ,
AudioQueueBufferRef inCompleteAQBuffer)
{
AQPlayer *THIS = (AQPlayer *)inUserData; if (THIS->mIsDone) return; UInt32 numBytes;
UInt32 nPackets = THIS->GetNumPacketsToRead();
OSStatus result = AudioFileReadPackets(THIS->GetAudioFileID(), false, &numBytes, inCompleteAQBuffer->mPacketDescriptions, THIS->GetCurrentPacket(), &nPackets,
inCompleteAQBuffer->mAudioData);
if (result)
printf("AudioFileReadPackets failed: %d", (int)result);
if (nPackets > ) {
inCompleteAQBuffer->mAudioDataByteSize = numBytes;
inCompleteAQBuffer->mPacketDescriptionCount = nPackets;
AudioQueueEnqueueBuffer(inAQ, inCompleteAQBuffer, , NULL);
THIS->mCurrentPacket = (THIS->GetCurrentPacket() + nPackets);
}

2.levelmeter

- (void)updateLevelMeter:(id)sender {
/*
if (self.delegate) {
UInt32 dataSize = sizeof(AudioQueueLevelMeterState);
AudioQueueGetProperty([self.decapsulator Queue], kAudioQueueProperty_CurrentLevelMeter, levelMeterStates, &dataSize);
if ([self.delegate respondsToSelector:@selector(levelMeterChanged:)]) {
[self.delegate levelMeterChanged:levelMeterStates[0].mPeakPower];
} }*/
}
    //self.timerLevelMeter = [NSTimer scheduledTimerWithTimeInterval:0.2 target:self selector:@selector(updateLevelMeter:) userInfo:nil repeats:YES];
__weak __typeof(self)weakSelf = self;
MLAudioMeterObserver *meterObserver = [[MLAudioMeterObserver alloc]init];
meterObserver.actionBlock = ^(NSArray *levelMeterStates,MLAudioMeterObserver *meterObserver){
NSLog(@"volume:%f",[MLAudioMeterObserver volumeForLevelMeterStates:levelMeterStates]); if ([weakSelf.delegate respondsToSelector:@selector(levelMeterChanged:)]) {
[weakSelf.delegate levelMeterChanged:[MLAudioMeterObserver volumeForLevelMeterStates:levelMeterStates]];
}
};
meterObserver.errorBlock = ^(NSError *error,MLAudioMeterObserver *meterObserver){
//[[[UIAlertView alloc]initWithTitle:@"错误" message:error.userInfo[NSLocalizedDescriptionKey] delegate:nil cancelButtonTitle:nil otherButtonTitles:@"知道了", nil]show];
};
self.meterObserver = meterObserver;
self.meterObserver.audioQueue = player->Queue();

3.

linesview

- (void)levelMeterChanged:(float)levelMeter {
dispatch_async(dispatch_get_main_queue(), ^{
//self.levelMeter.progress = levelMeter;
NSLog(@"%.2f",levelMeter*); [self.levelMeterLineView1 addMeter:levelMeter*];
[self.levelMeterLineView2 addMeter:levelMeter*];
}); }
-(void)addMeter:(float)meter
{
if (high) {
meter = meter*0.6 + 0.4;
} else {
meter = meter*0.6 + 0.35;
}
high = !high; [_meters addObject:@(meter)]; if (_meters.count > ) {
[_meters removeObjectAtIndex:];
}
[self setNeedsDisplay];
}

DXRecordView

- (void)levelMeterChanged:(float)levelMeter {
dispatch_async(dispatch_get_main_queue(), ^{
//self.levelMeter.progress = levelMeter;
NSLog(@"%.2f",levelMeter*); float showMeter = levelMeter*0.6 + 0.35; [_recordView setVoiceImageWithLowPassResults:showMeter]; }); }
-(void)setVoiceImageWithLowPassResults:(double)lowPassResults
{
CGRect frame = _meterImageView.frame;
frame.size.height = *lowPassResults;
frame.origin.y = +5.5+*(-lowPassResults);
_meterImageView.frame = frame;

4.recordButton 按住说话

    //录制
self.recordButton = [[UIButton alloc] initWithFrame:CGRectMake(, kVerticalPadding, CGRectGetWidth(self.bounds)-( * ), kInputTextViewMinHeight)];
self.recordButton.titleLabel.font = [UIFont systemFontOfSize:15.0];
[self.recordButton setTitleColor:[UIColor darkGrayColor] forState:UIControlStateNormal];
[self.recordButton setBackgroundImage:[UIImage imageNamed:@"btn_long_round"] forState:UIControlStateNormal];
[self.recordButton setBackgroundImage:[UIImage imageNamed:@"btn_long_round_hl"] forState:UIControlStateHighlighted];
[self.recordButton setTitle:LOCALIZATION(@"按住说话") forState:UIControlStateNormal];
[self.recordButton setTitle:LOCALIZATION(@"松开结束") forState:UIControlStateHighlighted];
[self.recordButton setTitleColor:[UIColor whiteColor] forState:UIControlStateHighlighted];
//self.recordButton.hidden = YES;
[self.recordButton addTarget:self action:@selector(recordButtonTouchDown) forControlEvents:UIControlEventTouchDown];
[self.recordButton addTarget:self action:@selector(recordButtonTouchUpOutside) forControlEvents:UIControlEventTouchUpOutside];
[self.recordButton addTarget:self action:@selector(recordButtonTouchUpInside) forControlEvents:UIControlEventTouchUpInside];
[self.recordButton addTarget:self action:@selector(recordDragOutside) forControlEvents:UIControlEventTouchDragExit];
[self.recordButton addTarget:self action:@selector(recordDragInside) forControlEvents:UIControlEventTouchDragEnter];

5.EMChatAudioBubbleView

- (void)setModel:(MessageModel *)model
{
[super setModel:model]; _timeLabel.text = [NSString stringWithFormat:@"%d'",self.model.time]; if (self.model.isSender) {
[_isReadView setHidden:YES];
_animationImageView.image = [UIImage imageNamed:SENDER_ANIMATION_IMAGEVIEW_IMAGE_DEFAULT];
_animationImageView.animationImages = _senderAnimationImages;
}
else{
if (model.isPlayed) {
[_isReadView setHidden:YES];
}else{
[_isReadView setHidden:NO];
} _animationImageView.image = [UIImage imageNamed:RECEIVER_ANIMATION_IMAGEVIEW_IMAGE_DEFAULT];
_animationImageView.animationImages = _recevierAnimationImages;
} if (self.model.isPlaying)
{
[self startAudioAnimation];
}else {
[self stopAudioAnimation];
}
}
@interface MiniCourseViewTableViewCell : UITableViewCell<UITableViewDataSource,UITableViewDelegate,CommentVoiceDelegate>

@property(nonatomic, strong) NSString * MiniContent;
@property(nonatomic, strong) UILabel *contentLabel,*numberLabel,* nameLabel,* timeLabel;
@property(nonatomic, strong) NSMutableArray * commentModelArray;
@property(nonatomic, strong) UIImageView * headImageView;
@property(nonatomic, strong) UIButton * laudButton; @property (nonatomic, strong) UITableView * tableView;
@property (nonatomic, strong) PlayVoiceButton *playVoiceButton;
@property (nonatomic, strong)id <ReplyVoiceDelegate> delegate; @property(nonatomic, strong) CourseReplay * replyModel; + (float)getHeightWithString:(NSString *)string fontSize:(int)size contenViewWidth:(CGFloat)width; @end
 
@interface FollowTalkThingCommentTableViewCell : UITableViewCell

@property (nonatomic, strong) UIImageView *userImageView;
@property (nonatomic, strong) UILabel *nameLabel;
@property (nonatomic, strong) UILabel *timeLabel;
@property (nonatomic, strong) UIButton *zanButton; @property (strong, nonatomic) UILabel *answerLabel,*replyLabel;
@property (nonatomic, strong) PlayVoiceButton *voiceButton;
@property (strong, nonatomic) UIView *answerView; @property (strong, nonatomic) UILabel *scoreLabel; @property (nonatomic, strong)id <playVoiceButton> delegate; +(CGSize)cellBubbleSizeWithContent:(NSString *)content; @property (strong, nonatomic) id model; @end

第九天 iOS音频技术的更多相关文章

  1. iOS 开发技术体系

    iOS 开发技术体系图: - 层级 | 主要框架 - ---------------------|--------------------------------------------------- ...

  2. iOS音频AAC视频H264编码 推流最佳方案

    iOS音频AAC视频H264编码 推流最佳方案 项目都是个人的调研与实验,可能很多不好或者不对的地方请多包涵. 1    功能概况 *  实现音视频的数据的采集 *  实现音视频数据的编码,视频编码成 ...

  3. IOS 音频开发文件大小计算

    音频基础知识 音频文件计算大小 音频转码 标签(空格分隔): 调查 IOS音频 https://developer.apple.com/library/ios/documentation/MusicA ...

  4. iOS音频处理

    ios音频处理 1. iOS底层音频处理技术(带源代码) http://www.cocoachina.com/ios/20111122/3563.html 2.ios 音频入门 http://blog ...

  5. 了解iOS消息推送一文就够:史上最全iOS Push技术详解

    本文作者:陈裕发, 腾讯系统测试工程师,由腾讯WeTest整理发表. 1.引言 开发iOS系统中的Push推送,通常有以下3种情况: 1)在线Push:比如QQ.微信等IM界面处于前台时,聊天消息和指 ...

  6. IOS音频1:之采用四种方式播放音频文件(一)AudioToolbox AVFoundation OpenAL AUDIO QUEUE

    本文转载至 http://blog.csdn.net/u014011807/article/details/40187737 在本卷你可以学到什么? 采用四种方法设计应用于各种场合的音频播放器: 基于 ...

  7. 内行看门道:看似“佛系”的《QQ炫舞手游》,背后的音频技术一点都不简单

    欢迎大家前往腾讯云+社区,获取更多腾讯海量技术实践干货哦~ 本文由腾讯游戏云发表于云+社区专栏 3月14日,腾讯旗下知名手游<QQ炫舞>正式上线各大应用商店,并迅速登上App Store免 ...

  8. 转: HTTP Live Streaming直播(iOS直播)技术分析与实现

    http://www.cnblogs.com/haibindev/archive/2013/01/30/2880764.html HTTP Live Streaming直播(iOS直播)技术分析与实现 ...

  9. iOS 音频开发

      音频基础知识 组成 音频文件的组成:文件格式(或者音频容器) + 数据格式(或者音频编码). 文件格式(或音频容器)是用于形容文件本身的格式. 我们可以通过多种不同的方法为真正的音频数据编码.例如 ...

随机推荐

  1. 【Beta】Scrum01

    Info 时间:2016.11.26 21:30 时长:10min 地点:大运村1号公寓5楼楼道 类型:日常Scrum会议 NXT:2016.11.28 21:30 Task Report Name ...

  2. HTML5系列三(多媒体播放、本地存储、本地数据库、离线应用)

    各浏览器对编码格式的支持情况 audio和video元素的属性介绍 1.src:媒体数据的URL地址 <video src="pr6.mp4"></video&g ...

  3. 深入JVM-有关String的内存泄漏

    什么是内存泄漏?所谓内存泄漏,就是由于疏忽或错误造成程序未能释放已经不再使用的内存的情况,他并不是说物理内存消失了,而是指由于不再使用的对象占据了内存不被释放,而导致可用内存不断减小,最终有可能导致内 ...

  4. 深入JVM-性能监控工具

    一.Linux下的性能监控工具 1.1 显式系统整体资源使用情况-top命令 top命令的输出可以分为两个部分:前半部分是系统统计信息,后半部分是进程信息. 在统计信息中,第一行是任务队列信息,他的结 ...

  5. 开发板ping不通主机和虚拟机的看过来(转载)!

    前几天在做uboot下用tftp下载文件到开发板的实验时,为了能解决开发板ping不通主机和虚拟机的问题,可谓绞尽脑汁,正所谓久病成医,虽然为了这一小问题废了我那么长时间,但我在解决问题的同时也学到了 ...

  6. 在Linux下安装和使用MySQL

    [简 介] 想使用Linux已经很长时间了,由于没有硬性任务一直也没有系统学习,近日由于工作需要必须使用Linux下的MySQL.本以为有Windows下使用SQL Server的经验,觉得在Linu ...

  7. Java数据结构的特点

  8. ajax传输 基础一

    一个简单页面的传输 index.php <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" &quo ...

  9. CLR

    [精]步步为营 C# 技术漫谈 三.公共语言运行库(CLR) http://www.cnblogs.com/springyangwc/archive/2011/06/12/2079250.html [ ...

  10. 常用JS效果 不断进步贴 不停更新~ 纪念用~

    常用效果 JS  都是Jquery  没有特殊说明 1.选项卡  用的JQuery  以后学好点再来对比 看下 /* * @parent 最外层父级元素 * @EventElement 触发事件元素 ...