简体   繁体   English

如何使用NSInputStream和NSOutputStream读取和写入音频文件

[英]How to read and write audio file using NSInputStream and NSOutputStream

I record and save the caf audio file using AVAudioRecorder. 我使用AVAudioRecorder录制并保存了caf音频文件。 When I convert the (300 mb).caf to .wav format the Application crashed with error(Received memory warning. Level=1 and Received memory warning. Level=2). 当我将(300 mb).caf转换为.wav格式时,应用程序崩溃并显示错误(已接收内存警告。级别= 1且已接收内存警告。级别= 2)。 How to use NSInputStream for reading audio file and NSOutputStream for writing audio file. 如何使用NSInputStream读取音频文件和NSOutputStream来编写音频文件。

-(void) convertToWav:(NSNumber *) numIndex
{
// set up an AVAssetReader to read from the iPod Library
int index = [numIndex integerValue];

NSString *strName;
NSString *strFilePath1 =[delegate.strCassettePathSide stringByAppendingPathComponent:@"audio_list.plist"];

bool bTapeInfoFileExists = [[NSFileManager defaultManager] fileExistsAtPath:strFilePath1];

if (bTapeInfoFileExists)
{
    NSMutableDictionary *dictInfo = [[NSMutableDictionary alloc] initWithContentsOfFile:strFilePath1];

    if ([dictInfo valueForKey:@"lastName"])
        strName =[dictInfo valueForKey:@"lastName"];
    else
        strName= [delegate.arrNameList objectAtIndex:0];
}
else
{
    strName = [delegate.arrNameList objectAtIndex:0];
}

NSString *cafFilePath =[[delegate.arrSessionList objectAtIndex:index] valueForKey:@"path"];
NSURL *assetURL = [NSURL fileURLWithPath:cafFilePath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];

NSError *assetError = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset
                                                           error:&assetError]
;
if (assetError) {
    NSLog (@"error: %@", assetError);
    return;
}

AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput
                                          assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks
                                          audioSettings: nil];
if (! [assetReader canAddOutput: assetReaderOutput]) {
    NSLog (@"can't add reader output... die!");
    return;
}
[assetReader addOutput: assetReaderOutput];

NSString *strWavFileName = [NSString stringWithFormat:@"%@.wav",[[cafFilePath lastPathComponent] stringByDeletingPathExtension]];
NSString *wavFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:strWavFileName];

if ([[NSFileManager defaultManager] fileExistsAtPath:wavFilePath])
{
    [[NSFileManager defaultManager] removeItemAtPath:wavFilePath error:nil];
}
NSURL *exportURL = [NSURL fileURLWithPath:wavFilePath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:exportURL
                                                      fileType:AVFileTypeWAVE
                                                         error:&assetError];
if (assetError)
{
    NSLog (@"error: %@", assetError);
    return;
}

AppDelegate *appDelegate =[[UIApplication sharedApplication]delegate];
int nSampleRate=[[appDelegate.dictWAVQuality valueForKey:@"samplerate"] integerValue];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
                                [NSNumber numberWithFloat:nSampleRate], AVSampleRateKey,
                                [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
                                [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
                                [NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
                                [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
                                [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey,
                                [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
                                nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio
                                                                          outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput])
{
    [assetWriter addInput:assetWriterInput];
}
else
{
    NSLog(@"can't add asset writer input... die!");
    return;
}

assetWriterInput.expectsMediaDataInRealTime = NO;

[assetWriter startWriting];
[assetReader startReading];

AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime: startTime];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);

[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue
                                        usingBlock: ^
 {
     while (assetWriterInput.readyForMoreMediaData)
     {
         CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
         if (nextBuffer)
         {
             // append buffer
             [assetWriterInput appendSampleBuffer: nextBuffer];
             convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
         }
         else
         {
             [assetWriterInput markAsFinished];
             //              [assetWriter finishWriting];
             [assetReader cancelReading];

             [dictTemp setValue:wavFilePath forKey:@"path"];
             [dictTemp setValue:nil forKey:@"progress"];
             [delegate.arrSessionList replaceObjectAtIndex:index withObject:dictTemp];

             NSString *strListFilePath = [delegate.strCassettePathSide stringByAppendingPathComponent:@"audiolist.plist"];
             [delegate.arrSessionList writeToFile:strListFilePath atomically:YES];
             break;
         }
     }
 }];}

i assumed you are using ARC, you need to release the buffer. 我假设您正在使用ARC,您需要释放缓冲区。

Core Foundation does not release by ARC as per the “The Create Rule” in Memory Management Programming Guide for Core Foundation. Core Foundation不会按照Core Foundation内存管理编程指南中的“创建规则”由ARC发布。 You have to release CMSampleBufferRef obtained from -(CMSampleBufferRef)copyNextSampleBuffer or you will get a memory leak. 您必须释放从 - (CMSampleBufferRef)copyNextSampleBuffer获取的CMSampleBufferRef,否则您将收到内存泄漏。

https://developer.apple.com/library/mac/documentation/CoreFoundation/Conceptual/CFMemoryMgmt/Concepts/Ownership.html#//apple_ref/doc/uid/20001148-103029


if (nextBuffer)
         {
             // append buffer
             [assetWriterInput appendSampleBuffer: nextBuffer];
             convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
             CFRelease(nextBuffer);
         }

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM