大约一年来,我一直在努力解决这个问题,试图找出我的问题,并把它代表给别人看。
我一直在写一个像录音一样依赖于“GarageBand”的应用程序。也就是说,我想要记录用户的准确8次,然后我希望他们能够循环这个。我同时在为用户播放节拍器(用户会戴着耳机听节拍器,在他们的设备上录制麦克风)。
我可以设法打开录音大约4.8秒(.6*8拍),计时器说它运行了4.8秒,但是我的录音总是比4.8短一点。它就像4.78,或4.71,它会导致循环失控。
我试验了AVAudioRecorder、AudioQueue和AudioUnits,认为后一种方法可能会解决我的问题。
我使用NSTimer每隔一秒钟就启动一次.6,为节拍器播放一个简短的节拍器。4拍后,节拍器计时器的功能,打开记录器节拍器,等待4.6秒停止录音。
我使用时间间隔来确定地铁运行的时间(在4.800xxx时看起来很紧),并将其与音频文件的持续时间进行比较,而音频文件的时间总是不同的。
我希望我可以附加我的项目,但我想我将不得不满足附加我的头和实现。要进行测试,您必须创建一个具有以下IB特性的项目:
录制,播放,停止按钮歌曲/跟踪持续时间标签计时器持续时间标签调试标签
如果你启动应用程序,然后点击记录,你会被计数4拍,然后录音机开始。把手指按在桌子上,直到录音机停止为止。在多拍8下(总共12次)之后,录音机停止了。
您可以在显示中看到,录制的音轨略短于4.8秒,而在某些情况下,则要短得多,从而导致音频不能正常循环。
有人知道我能做些什么来加强这一点吗?感谢您的阅读。
这是我的密码:
//
// ViewController.h
// speakagain
//
// Created by NOTHING on 2014-03-18.
//
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#import "CoreAudio/CoreAudioTypes.h"
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>
#import <AVFoundation/AVFoundation.h>
@interface ViewController : UIViewController
{
IBOutlet UIButton *btnRecord;
IBOutlet UIButton *btnPlay;
IBOutlet UIButton *btnStop;
IBOutlet UILabel *debugLabel;
IBOutlet UILabel *timerDuration;
IBOutlet UILabel *songDuration;
//UILabel *labelDebug;
struct AQRecorderState {
AudioStreamBasicDescription mDataFormat;
AudioQueueRef mQueue;
AudioQueueBufferRef mBuffers[kNumberBuffers];
AudioFileID mAudioFile;
UInt32 bufferByteSize;
SInt64 mCurrentPacket;
bool mIsRunning; // 8
};
struct AQRecorderState aqData;
AVAudioPlayer *audioPlayer;
NSString *songName;
NSTimer *recordTimer;
NSTimer *metroTimer;
NSTimeInterval startTime, endTime, elapsedTime;
int inputBuffer;
int beatNumber;
}
@property (nonatomic, retain) IBOutlet UIButton *btnRecord;
@property (nonatomic, retain) IBOutlet UIButton *btnPlay;
@property (nonatomic, retain) IBOutlet UIButton *btnStop;
@property (nonatomic, retain) IBOutlet UILabel *debugLabel;
@property (nonatomic, retain) IBOutlet UILabel *timerDuration;
@property (nonatomic, retain) IBOutlet UILabel *songDuration;
- (IBAction) record;
- (IBAction) stop;
- (IBAction) play;
static void HandleInputBuffer (void *aqData,AudioQueueRef inAQ,AudioQueueBufferRef inBuffer,const AudioTimeStamp *inStartTime, UInt32 inNumPackets,const AudioStreamPacketDescription *inPacketDesc);
@end执行情况:
//
// ViewController.m
// speakagain
//
// Created by NOTHING on 2014-03-18.
//
#import "ViewController.h"
@interface ViewController ()
@end
@implementation ViewController
@synthesize btnPlay, btnRecord,btnStop,songDuration, timerDuration, debugLabel;
- (void)viewDidLoad
{
debugLabel.text = @"";
songName =[[NSString alloc ]init];
//NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
//NSString *documentsDirectory = [paths objectAtIndex:0];
songName = @"TestingQueue.caf";
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)prepareAudioQueue
{
//struct AQRecorderState *pAqData;
inputBuffer=0;
aqData.mDataFormat.mFormatID = kAudioFormatLinearPCM;
aqData.mDataFormat.mSampleRate = 44100.0;
aqData.mDataFormat.mChannelsPerFrame = 1;
aqData.mDataFormat.mBitsPerChannel = 16;
aqData.mDataFormat.mBytesPerPacket =
aqData.mDataFormat.mBytesPerFrame = aqData.mDataFormat.mChannelsPerFrame * sizeof (SInt16);
aqData.mDataFormat.mFramesPerPacket = 1;
// AudioFileTypeID fileType = kAudioFileAIFFType;
AudioFileTypeID fileType = kAudioFileCAFType;
aqData.mDataFormat.mFormatFlags = kLinearPCMFormatFlagIsBigEndian| kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;
AudioQueueNewInput (&aqData.mDataFormat,HandleInputBuffer, &aqData,NULL, kCFRunLoopCommonModes, 0,&aqData.mQueue);
UInt32 dataFormatSize = sizeof (aqData.mDataFormat);
// in Mac OS X, instead use
// kAudioConverterCurrentInputStreamDescription
AudioQueueGetProperty (aqData.mQueue,kAudioQueueProperty_StreamDescription,&aqData.mDataFormat,&dataFormatSize);
//Verify
NSFileManager *fileManager = [NSFileManager defaultManager];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *txtPath = [documentsDirectory stringByAppendingPathComponent:songName];
NSLog(@"INITIALIZING FILE");
if ([fileManager fileExistsAtPath:txtPath] == YES) {
NSLog(@"PREVIOUS FILE REMOVED");
[fileManager removeItemAtPath:txtPath error:nil];
}
const char *filePath = [txtPath UTF8String];
CFURLRef audioFileURL = CFURLCreateFromFileSystemRepresentation ( NULL,(const UInt8 *) filePath,strlen (filePath),false );
AudioFileCreateWithURL (audioFileURL,fileType,&aqData.mDataFormat, kAudioFileFlags_EraseFile,&aqData.mAudioFile );
DeriveBufferSize (aqData.mQueue,aqData.mDataFormat,0.5,&aqData.bufferByteSize);
for (int i = 0; i < kNumberBuffers; ++i)
{
AudioQueueAllocateBuffer (aqData.mQueue,aqData.bufferByteSize,&aqData.mBuffers[i]);
AudioQueueEnqueueBuffer (aqData.mQueue,aqData.mBuffers[i], 0,NULL );
}
}
- (void) metronomeFire
{
if(beatNumber < 5)
{
//count in time.
// just play the metro beep but don't start recording
debugLabel.text = @"count in (1,2,3,4)";
[self playSound];
}
if(beatNumber == 5)
{
//start recording
aqData.mCurrentPacket = 0;
aqData.mIsRunning = true;
startTime = [NSDate timeIntervalSinceReferenceDate];
recordTimer = [NSTimer scheduledTimerWithTimeInterval:4.8 target:self selector:@selector(killTimer) userInfo:nil repeats:NO];
AudioQueueStart (aqData.mQueue,NULL);
debugLabel.text = @"Recording for 8 beats (1,2,3,4 1,2,3,4)";
[self playSound];
}
else if (beatNumber < 12)
{ //play metronome from beats 6-16
[self playSound];
}
if(beatNumber == 12)
{
[metroTimer invalidate]; metroTimer = nil;
[self playSound];
}
beatNumber++;
}
- (IBAction) play
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *txtPath = [documentsDirectory stringByAppendingPathComponent:songName];
NSURL *url = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@",txtPath]];
if (audioPlayer)
{
[audioPlayer stop];
audioPlayer = nil;
}
NSError *error;
audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
if (audioPlayer == nil)
{
NSLog(@"%@",[error description]);
}
else
{
[audioPlayer play];
[audioPlayer setNumberOfLoops:-1];
}
}
- (void) killTimer
{
//this is the timer function. Runs once after 4.8 seconds.
[self stop];
}
- (IBAction) stop
{
if (audioPlayer)
{
[audioPlayer stop];
audioPlayer = nil;
}
else
{
if(metroTimer)
{
[metroTimer invalidate];metroTimer = nil;
}
//Stop the audio queue
AudioQueueStop (aqData.mQueue,true);
aqData.mIsRunning = false;
AudioQueueDispose (aqData.mQueue,true);
AudioFileClose (aqData.mAudioFile);
//Get elapsed time of timer
endTime = [NSDate timeIntervalSinceReferenceDate];
elapsedTime = endTime - startTime;
//Get elapsed time of audio file
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
songName,
nil];
NSURL *audioFileURL = [NSURL fileURLWithPathComponents:pathComponents];
AVURLAsset* audioAsset = [AVURLAsset URLAssetWithURL:audioFileURL options:nil];
CMTime audioDuration = audioAsset.duration;
float audioDurationSeconds = CMTimeGetSeconds(audioDuration);
//Log values
NSLog(@"Track Duration: %f",audioDurationSeconds);
NSLog(@"Timer Duration: %.6f", elapsedTime);
//Show values on GUI too
songDuration.text = [NSString stringWithFormat: @"Track Duration: %f",audioDurationSeconds];
timerDuration.text = [NSString stringWithFormat:@"Timer Duration: %@",[NSString stringWithFormat: @"%.6f", elapsedTime]];
debugLabel.text = @"Why is the duration of the track less than the duration the timer ran?";
}
}
-(void) playSound
{
NSString *path = [[NSBundle mainBundle] pathForResource:@"blip2" ofType:@"aif"];
SystemSoundID soundID;
AudioServicesCreateSystemSoundID((__bridge CFURLRef)[NSURL fileURLWithPath:path], &soundID);
AudioServicesPlaySystemSound (soundID);
}
- (IBAction) record
{
[self prepareAudioQueue];
songDuration.text = @"";
timerDuration.text = @"";
//debugLabel.text = @"Please wait 12 beats (The first four are count in)";
//init beat number
beatNumber = 1;
//safe guard
if(aqData.mIsRunning)
{
AudioQueueStop (aqData.mQueue,true);
aqData.mIsRunning = false;
AudioQueueDispose (aqData.mQueue,true);
AudioFileClose (aqData.mAudioFile);
}
//start count in (metro will start recording)
//aqData.mCurrentPacket = 0;
//aqData.mIsRunning = true;
startTime = [NSDate timeIntervalSinceReferenceDate];
metroTimer = [NSTimer scheduledTimerWithTimeInterval:.6 target:self selector:@selector(metronomeFire) userInfo:nil repeats:YES];
//recordTimer = [NSTimer scheduledTimerWithTimeInterval:4.8 target:self selector:@selector(killTimer) userInfo:nil repeats:NO];
//AudioQueueStart (aqData.mQueue,NULL);
}
static void HandleInputBuffer (void *aqData,AudioQueueRef inAQ,AudioQueueBufferRef inBuffer,const AudioTimeStamp *inStartTime,UInt32 inNumPackets,const AudioStreamPacketDescription *inPacketDesc)
{
//boiler plate
NSLog(@"HandleInputBuffer");
struct AQRecorderState *pAqData = (struct AQRecorderState *) aqData;
if (inNumPackets == 0 && pAqData->mDataFormat.mBytesPerPacket != 0)
inNumPackets = inBuffer->mAudioDataByteSize / pAqData->mDataFormat.mBytesPerPacket;
if (AudioFileWritePackets (pAqData->mAudioFile,false,inBuffer->mAudioDataByteSize,inPacketDesc,pAqData->mCurrentPacket,&inNumPackets,inBuffer->mAudioData) == noErr)
{
pAqData->mCurrentPacket += inNumPackets;
}
if (pAqData->mIsRunning == 0)
return;
AudioQueueEnqueueBuffer (pAqData->mQueue,inBuffer,0,NULL);
}
void DeriveBufferSize(AudioQueueRef audioQueue,AudioStreamBasicDescription ASBDescription,Float64 seconds,UInt32 *outBufferSize)
{
//boiler plate
static const int maxBufferSize = 0x50000;
int maxPacketSize = ASBDescription.mBytesPerPacket;
if(maxPacketSize == 0)
{
UInt32 maxVBRPacketSize = sizeof(maxPacketSize);
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_MaximumOutputPacketSize, &maxPacketSize, &maxVBRPacketSize);
NSLog(@"max buffer = %d",maxPacketSize);
}
Float64 numBytesForTime = ASBDescription.mSampleRate * maxPacketSize * seconds;
*outBufferSize = (UInt32)(numBytesForTime < maxBufferSize ? numBytesForTime : maxBufferSize);
}
OSStatus SetMagicCookieForFile (AudioQueueRef inQueue, AudioFileID inFile)
{
//boiler plate
OSStatus result = noErr;
UInt32 cookieSize;
if (AudioQueueGetPropertySize (inQueue,kAudioQueueProperty_MagicCookie,&cookieSize) == noErr)
{
char* magicCookie =(char *) malloc (cookieSize);
if (AudioQueueGetProperty (inQueue,kAudioQueueProperty_MagicCookie,magicCookie,&cookieSize) == noErr)
{
result = AudioFileSetProperty (inFile,kAudioFilePropertyMagicCookieData,cookieSize,magicCookie);
}
free (magicCookie);
}
return result;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
@end发布于 2014-03-21 16:06:55
这是一个很大的话题,所以我怀疑你是否能得到足够大的答案来重新架构你所提供的代码。然而,我可以给你的链接,将提供绝大多数您所需的。
首先,由于同步问题,NSTimer永远无法工作。另外,忘记AudioQueue和AVAudioRecorder吧。只有AudioUnit级别足够满足您的需要。
看看我的回答:
但真正的金矿-和知识,你将需要密切熟悉-是美味皮克斯的博客。美味像素是Loopy的供应商,但也是一个能够分享一些非常深入的知识的人。
请参见:
一种简单、快速的音频处理循环缓冲实现
开发循环,第2部分:实现
和
使用RemoteIO音频单元
最后,确保您熟悉包、帧、样本等,所有的东西都需要完美地同步。
https://stackoverflow.com/questions/22563522
复制相似问题