2014-05-21 15 views
5

Ho provato a creare un'app video come una vite utilizzando "AVFoundation". Ora posso salvare i video attraverso AVCaptureVideoDataOutput e posso giocare. Ma in qualche modo l'audio non funziona e non so perché. Sono principiante dell'app iOS, quindi potrebbe non essere chiaro da spiegare. Spero tu capisca quello che sto cercando di dire e darmi qualche consiglio.come scrivere video e audio contemporaneamente usando AVCaptureVideoDataOutput e AVCaptureAudioDataOutput?

Questo è il codice che sto usando.

Impostazione AVCaptureVideoDataOutput e AVCaptureAudioDataOutput:

AVCaptureVideoDataOutput* videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; 
[CaptureSession addOutput:videoDataOutput]; 

videoDataOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
           [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, 
           nil]; 

dispatch_queue_t videoQueue = dispatch_queue_create("VideoQueue", NULL); 
[videoDataOutput setSampleBufferDelegate:self queue:videoQueue]; 

AVCaptureAudioDataOutput *audioDataOutput = [[AVCaptureAudioDataOutput alloc] init]; 
[CaptureSession addOutput:audioDataOutput]; 

dispatch_queue_t audioQueue = dispatch_queue_create("AudioQueue", NULL); 
[audioDataOutput setSampleBufferDelegate:self queue:audioQueue]; 

Impostazione AVAssetWrite e AVAssetWriterInput:

- (void)makeWriter{ 
pathString = [NSHomeDirectory()stringByAppendingPathComponent:@"Documents/capture.mov"]; 
exportURL = [NSURL fileURLWithPath:pathString]; 

if ([[NSFileManager defaultManager] fileExistsAtPath:exportURL.path]) 
{ 
    [[NSFileManager defaultManager] removeItemAtPath:exportURL.path error:nil]; 
} 
NSError* error; 
writer = [[AVAssetWriter alloc] initWithURL:exportURL 
            fileType:AVFileTypeQuickTimeMovie 
             error:&error]; 
NSDictionary* videoSetting = [NSDictionary dictionaryWithObjectsAndKeys: 
           AVVideoCodecH264, AVVideoCodecKey, 
           [NSNumber numberWithInt:1280], AVVideoWidthKey, 
           [NSNumber numberWithInt:720], AVVideoHeightKey, 
           nil]; 

videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo 
                 outputSettings:videoSetting]; 


videoWriterInput.expectsMediaDataInRealTime = YES; 

// Add the audio input 
AudioChannelLayout acl; 
bzero(&acl, sizeof(acl)); 
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; 

NSDictionary* audioOutputSettings = nil; 
// Both type of audio inputs causes output video file to be corrupted. 
if(NO) { 
    // should work from iphone 3GS on and from ipod 3rd generation 
    audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
          [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey, 
          [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, 
          [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, 
          [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, 
          [ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey, 
          nil]; 
} else { 
    // should work on any device requires more space 
    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: 
          [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey, 
          [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey, 
          [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, 
          [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, 
          [ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey, 
          nil ]; 
} 

audioWriterInput = [AVAssetWriterInput 
         assetWriterInputWithMediaType: AVMediaTypeAudio 
        outputSettings: audioOutputSettings ]; 

audioWriterInput.expectsMediaDataInRealTime = YES;  
// add input 
[writer addInput:videoWriterInput]; 
[writer addInput:audioWriterInput]; 

} 

E infine il codice CaptureOutput:

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ 
if ((isPause) && (isRecording)) { return; } 
if(!CMSampleBufferDataIsReady(sampleBuffer)){return;} 
if(isRecording == YES) { 
    isWritting = YES; 
    if(writer.status != AVAssetWriterStatusWriting ) { 
     [writer startWriting]; 

     [writer startSessionAtSourceTime:kCMTimeZero]; 
    } 

    if([videoWriterInput isReadyForMoreMediaData]) { 
     CFRetain(sampleBuffer); 
     CMSampleBufferRef newSampleBuffer = [self offsetTimmingWithSampleBufferForVideo:sampleBuffer]; 
     [videoWriterInput appendSampleBuffer:newSampleBuffer]; 

     CFRelease(sampleBuffer); 
     CFRelease(newSampleBuffer); 
    } 
    writeFrames++; 

} 
} 

- (CMSampleBufferRef)offsetTimmingWithSampleBufferForVideo:(CMSampleBufferRef)sampleBuffer 
{ 
CMSampleBufferRef newSampleBuffer; 
CMSampleTimingInfo sampleTimingInfo; 
sampleTimingInfo.duration = CMTimeMake(1, 30); 
sampleTimingInfo.presentationTimeStamp = CMTimeMake(writeFrames, 30); 
sampleTimingInfo.decodeTimeStamp = kCMTimeInvalid; 

CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, 
             sampleBuffer, 
             1, 
             &sampleTimingInfo, 
             &newSampleBuffer); 


return newSampleBuffer; 
} 

risposta

Problemi correlati