2013-08-06 13 views
9

Muszę zaimplementować funkcję, aby wielokrotnie wstrzymywać i wznawiać przechwytywanie wideo w jednej sesji, ale każdy nowy segment (przechwycone segmenty po każdej pauzie) został dodany do tego samego pliku wideo , z AVFoundation. Obecnie za każdym razem, gdy naciskasz "stop", a następnie "nagrywaj" ponownie, po prostu zapisuje nowy plik wideo w katalogu dokumentów mojego iPhone'a i rozpoczyna przechwytywanie do nowego pliku. Muszę być w stanie nacisnąć przycisk "zapisz/zatrzymaj", przechwyć tylko wideo & audio, gdy rekord jest aktywny ... po naciśnięciu przycisku "done", poproś o pojedynczy plik AV ze wszystkimi segmentami razem. A wszystko to musi się zdarzyć w tej samej sesji sesji przechwytywania/podglądu.Wstrzymywanie i wznawianie przechwytywania wideo przy użyciu AVCaptureMovieFileOutput i AVCaptureVideoDataOutput w iOS

Nie używam AVAssetWriterInput.

Jedyny sposób, w jaki mogę to wypróbować, to naciśnięcie przycisku "done", pobranie każdego pojedynczego pliku wyjściowego i połączenie ich w jeden plik.

Ten kod działa dla iOS 5, ale nie dla iOS 6. Właściwie dla iOS 6, po raz pierwszy, kiedy wstrzymuję nagrywanie (zatrzymuję nagrywanie), wywoływana jest metoda AVCaptureFileOutputRecordingDelegate (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:), ale po tym, gdy rozpoczynam nagrywanie, delegat metoda (captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:) jest wywoływana ponownie, ale nie jest wywoływana w momencie zatrzymania nagrywania.

Potrzebuję rozwiązania tego problemu. Proszę pomóż mi.

//View LifeCycle 
- (void)viewDidLoad 
{ 
[super viewDidLoad]; 

self.finalRecordedVideoName = [self stringWithNewUUID]; 

arrVideoName = [[NSMutableArray alloc]initWithCapacity:0]; 
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0]; 

CaptureSession = [[AVCaptureSession alloc] init]; 


captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
if ([captureDevices count] > 0) 
{ 
    NSError *error; 
    VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error]; 
    if (!error) 
    { 
     if ([CaptureSession canAddInput:VideoInputDevice]) 
      [CaptureSession addInput:VideoInputDevice]; 
     else 
      NSLog(@"Couldn't add video input"); 
    } 
    else 
    { 
     NSLog(@"Couldn't create video input"); 
    } 
} 
else 
{ 
    NSLog(@"Couldn't create video capture device"); 
} 



//ADD VIDEO PREVIEW LAYER 
NSLog(@"Adding video preview layer"); 
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession]; 

[self setPreviewLayer:layer]; 


UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation; 

NSLog(@"%d",currentOrientation); 

if (currentOrientation == UIDeviceOrientationPortrait) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; 
} 
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown; 
} 
else if (currentOrientation == UIDeviceOrientationLandscapeRight) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight; 
} 
else if (currentOrientation == UIDeviceOrientationLandscapeLeft) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft; 
} 

[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 

//ADD MOVIE FILE OUTPUT 
NSLog(@"Adding movie file output"); 
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; 
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; 

NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey; 
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 

[VideoDataOutput setVideoSettings:videoSettings]; 

Float64 TotalSeconds = 60;   //Total seconds 
int32_t preferredTimeScale = 30; //Frames per second 
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION 
MovieFileOutput.maxRecordedDuration = maxDuration; 
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME 

//SET THE CONNECTION PROPERTIES (output properties) 
[self CameraSetOutputProperties];   //(We call a method as it also has to be done after changing camera) 
AVCaptureConnection *videoConnection = nil; 

for (AVCaptureConnection *connection in [MovieFileOutput connections]) 
{ 
    NSLog(@"%@", connection); 
    for (AVCaptureInputPort *port in [connection inputPorts]) 
    { 
     NSLog(@"%@", port); 
     if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
     { 
      videoConnection = connection; 
     } 
    } 
} 

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false** 
{ 
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]]; 
} NSLog(@"Setting image quality"); 
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow]; 

//----- DISPLAY THE PREVIEW LAYER ----- 

CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348); 

[self.PreviewLayer setBounds:layerRect]; 
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))]; 

if ([CaptureSession canAddOutput:MovieFileOutput]) 
    [CaptureSession addOutput:MovieFileOutput]; 
    [CaptureSession addOutput:VideoDataOutput]; 
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front): 
CameraView = [[UIView alloc] init]; 
[videoPreviewLayer addSubview:CameraView]; 
[videoPreviewLayer sendSubviewToBack:CameraView]; 
[[CameraView layer] addSublayer:PreviewLayer]; 

//----- START THE CAPTURE SESSION RUNNING ----- 
[CaptureSession startRunning]; 
} 

#pragma mark - IBACtion Methods 
-(IBAction)btnStartAndStopPressed:(id)sender 
{ 
UIButton *StartAndStopButton = (UIButton*)sender; 
if ([StartAndStopButton isSelected] == NO) 
{ 
    [StartAndStopButton setSelected:YES]; 
    [btnPauseAndResume setEnabled:YES]; 
    [btnBack setEnabled:NO]; 
    [btnSwitchCameraInput setHidden:YES]; 

    NSDate *date = [NSDate date]; 
    NSLog(@" date %@",date); 

    NSArray *paths     = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *recordedFileName = nil; 
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date]; 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    self.outputPath     = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]]; 
    NSLog(@"%@",self.outputPath); 

    [arrVideoName addObject:recordedFileName]; 

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath]; 
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) 
    { 
     NSError *error; 
     if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO) 
     { 
      //Error - handle if requried 
     } 
    } 
    //Start recording 
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 
    recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(VideoRecording) userInfo:nil repeats:YES]; 

} 
else 
{ 
    [StartAndStopButton setSelected:NO]; 
    [btnPauseAndResume setEnabled:NO]; 
    [btnBack setEnabled:YES]; 
    [btnSwitchCameraInput setHidden:NO]; 

    NSLog(@"STOP RECORDING"); 
    WeAreRecording = NO; 

    [MovieFileOutput stopRecording]; 
    [((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:@"Please wait...."]; 

    if ([recordingTimer isValid]) 
    { 
     [recordingTimer invalidate]; 
     recordingTimer = nil; 
     recordingTime = 30; 
    } 

    stopRecording = YES; 
} 
} 

- (IBAction)btnPauseAndResumePressed:(id)sender 
{ 
UIButton *PauseAndResumeButton = (UIButton*)sender; 
if (PauseAndResumeButton.selected == NO) 
{ 
    PauseAndResumeButton.selected = YES; 
    NSLog(@"recording paused"); 
    WeAreRecording = NO; 

    [MovieFileOutput stopRecording]; 
    [self pauseTimer:recordingTimer]; 

    [btnStartAndStop setEnabled:NO]; 
    [btnBack setEnabled:YES]; 
    [btnSwitchCameraInput setHidden:NO]; 
} 
else 
{ 
    PauseAndResumeButton.selected = NO; 
    NSLog(@"recording resumed"); 

    [btnStartAndStop setEnabled:YES]; 
    [btnBack setEnabled:NO]; 
    [btnSwitchCameraInput setHidden:YES]; 

    WeAreRecording = YES; 

    NSDate *date = [NSDate date]; 
    NSLog(@" date %@",date); 

    NSArray *paths     = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES); 
    NSString *recordedFileName = nil; 
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date]; 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    self.outputPath     = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]]; 
    NSLog(@"%@",self.outputPath); 

    [arrVideoName addObject:recordedFileName]; 

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath]; 
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) 
    { 
     NSError *error; 
     if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO) 
     { 
      //Error - handle if requried 
     } 
    } 
    [self resumeTimer:recordingTimer]; 
    //Start recording 
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 
} 
} 

- (void) CameraSetOutputProperties 
{ 
//SET THE CONNECTION PROPERTIES (output properties) 
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 

[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait]; 
//Set frame rate (if requried) 
CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 

if (CaptureConnection.supportsVideoMinFrameDuration) 
    CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 
if (CaptureConnection.supportsVideoMaxFrameDuration) 
    CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 

CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 
} 

- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position 
{ 
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
for (AVCaptureDevice *Device in Devices) 
{ 
    if ([Device position] == Position) 
    { 
     NSLog(@"%d",Position); 
     return Device; 
    } 
} 
return nil; 
} 

#pragma mark - AVCaptureFileOutputRecordingDelegate Method 

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 

if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer]; 

for(AVCaptureConnection *captureConnection in [captureOutput connections]) 
{ 
    if ([captureConnection isVideoOrientationSupported]) 
    { 
     AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft; 
     [captureConnection setVideoOrientation:orientation]; 
    } 
}  

UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation]; 

CGAffineTransform t; 

if (curOr == UIDeviceOrientationPortrait) 
{ 
    t = CGAffineTransformMakeRotation(-M_PI/2); 
} 
else if (curOr == UIDeviceOrientationPortraitUpsideDown) 
{ 
    t = CGAffineTransformMakeRotation(M_PI/2); 
} 
else if (curOr == UIDeviceOrientationLandscapeRight) 
{ 
    t = CGAffineTransformMakeRotation(M_PI); 
} 
else 
{ 
    t = CGAffineTransformMakeRotation(0); 
} 
} 

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error 
{ 
NSLog(@"didFinishRecordingToOutputFileAtURL - enter"); 
NSLog(@"output file url : %@", [outputFileURL absoluteString]); 

BOOL RecordedSuccessfully = YES; 
if ([error code] != noErr) 
{ 
    // A problem occurred: Find out if the recording was successful. 
    id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey]; 
    if (value) 
    { 
     RecordedSuccessfully = [value boolValue]; 
    } 
} 
AVCaptureConnection *videoConnection=nil; 
for (AVCaptureConnection *connection in [MovieFileOutput connections]) 
{ 
    NSLog(@"%@", connection); 
    for (AVCaptureInputPort *port in [connection inputPorts]) 
    { 
     NSLog(@"%@", port); 
     if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
     { 
      videoConnection = connection; 
     } 
    } 
} 

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false** 
{ 
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]]; 
} NSLog(@"Setting image quality"); 

NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL]; 
[videoData writeToFile:self.outputPath atomically:NO]; 

[arrOutputUrl addObject:outputFileURL]; 

if (stopRecording) 
{ 
    [self mergeMultipleVideo]; 
} 
} 

//Method to merge multiple audios 
-(void)mergeMultipleVideo 
{ 
mixComposition = [AVMutableComposition composition]; 

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

CMTime nextClipStartTime = kCMTimeZero; 
NSLog(@"Array of output file url : %@", arrOutputUrl); 
if (arrOutputUrl.count > 0) 
{ 
    for(int i = 0 ;i < [arrOutputUrl count];i++) 
    { 
     AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil]; 

     CMTimeRange timeRangeInAsset; 
     timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]); 

     [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
    } 
} 

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",self.finalRecordedVideoName]]; 
NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
exportSession.outputURL=url; 
exportSession.outputFileType = AVFileTypeQuickTimeMovie; 
exportSession.shouldOptimizeForNetworkUse = YES; 
[exportSession exportAsynchronouslyWithCompletionHandler:^{ 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     [self exportDidFinish:exportSession path:myPathDocs]; 
    }); 
}]; 
} 

-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath 
{ 
NSLog(@"session.status : %d",session.status); 
if (session.status == AVAssetExportSessionStatusCompleted) 
{ 
    NSURL *outputURL = session.outputURL; 

    NSData *videoData = [NSData dataWithContentsOfURL:outputURL]; 
    [videoData writeToFile:outputVideoPath atomically:NO]; 

    if ([arrVideoName count] > 0) 
    { 
     for (int i = 0; i < [arrVideoName count]; i++) 
     { 
      NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
      NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:@"%@",[arrVideoName objectAtIndex:i]]]; 

      NSLog(@"Full path of file to be deleted: %@",fullFilePath); 

      NSFileManager *fileManager = [NSFileManager defaultManager]; 
      NSError *error; 

      if ([fileManager fileExistsAtPath:fullFilePath]) 
      { 
       [fileManager removeItemAtPath:fullFilePath error:&error]; 
      } 
     } 
     [arrVideoName removeAllObjects]; 
    } 
    if (arrOutputUrl.count > 0) 
    { 
     [arrOutputUrl removeAllObjects]; 
    } 
    [((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view]; 
    [self.view addSubview:afterRecordingPopupView]; 
} 
} 
+0

Dostałeś swoje rozwiązanie? –

Odpowiedz

0

Sprawdź właściwość włączoną AVCaptureConnection. Dla połączenia wyjściowego, ustaw włączony na NIE zamiast zatrzymywania sesji.

+0

Próbowałem. Ale kiedy ustawiłem włączone moje AVCaptureConnection na NO, - (void) captureOutput: (AVCaptureFileOutput *) captureOutput didFinishRecordingToOutputFileAtURL: (NSURL *) outputFileURL fromConnections: (NSArray *) błąd połączenia: (NSError *) błąd jest wywoływany co nie jest tym czym chcieć. –

Powiązane problemy