2014-07-22 10 views
5

Mam jeden plik audio i chcę zmienić okładkę albumu. Czy to możliwe? Jak mogę ustawić okładkę albumu dla pliku audio w programowaniu na iOS?Jak dodać grafikę do pliku audio, aby pokazać okładkę albumu?

Właściwie połączyłem dwa pliki audio i chcę dodać okładkę do okładki, która pojawi się w iTune.

Kod jest podany poniżej:

- (BOOL) combineVoices1 
{ 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); 
NSString *libraryCachesDirectory = [paths objectAtIndex:0]; 
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; 
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; 
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; 
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> 
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> 

if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) 
{ 
    return NO; 
} 

[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; 
//CMTime nextClipStartTime = kCMTimeZero; 
AVMutableComposition *composition = [[AVMutableComposition alloc] init]; 
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix1; 
if (playbackDelayAfterTimeMix1 > 0) { 
    nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); 
}else{ 
    nextClipStartTimeMix1 = kCMTimeZero; 
} 
CMTime startTimeMix1; 
if (playbackDelayMix1 > 0) { 
    startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); 
}else{ 
    startTimeMix1 = kCMTimeZero; 
} 
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1]; 
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; 
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; 
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack; 
if (tracks.count > 0) { 
    clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; 

//avAsset.commonMetadata 
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix2; 
if (playbackDelayAfterTimeMix2 > 0) { 
    nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); 
}else{ 
    nextClipStartTimeMix2 = kCMTimeZero; 
} 
CMTime startTimeMix2; 
if (playbackDelayMix2 > 0) { 
    startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); 
}else{ 
    startTimeMix2 = kCMTimeZero; 
} 

[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2]; 
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; 
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; 
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; 
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack1; 
if (tracks1.count > 0) { 
    clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; 


AVAssetExportSession *exportSession = [AVAssetExportSession 
             exportSessionWithAsset:composition 
             presetName:AVAssetExportPresetAppleM4A]; 

if (nil == exportSession) return NO; 


exportSession.outputURL = audioFileOutput; 
exportSession.outputFileType = AVFileTypeAppleM4A; 

[exportSession exportAsynchronouslyWithCompletionHandler:^ 
{ 
    if (AVAssetExportSessionStatusCompleted == exportSession.status) 
    { 

     [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; 

    } 
    else if (AVAssetExportSessionStatusFailed == exportSession.status) 
    { 
     [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; 
     [[NSTSharedData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@",[[exportSession error] localizedDescription]]]; 
     //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); 
    } 
}]; 

return YES; 
} 
+1

Na co się przyjrzałeś? Jakiś kod? – ljacqu

+0

Myślę, że jest to możliwe, prawdopodobnie używając tej klasy 'AVURLAsset',' AVMetadataItem'. –

+0

Połączyłem dwa pliki audio i chcę dodać kompozycję do scalonego pliku. Które (grafika) pokaże w iTune. – Rathore

Odpowiedz

3

mi rozwiązać mój problem, a teraz działa dobrze, dodałem kod blisko „AVAssetExportSession” w powyższym kodzie. I na koniec metoda:

- (BOOL) combineVoices1 
{ 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); 
NSString *libraryCachesDirectory = [paths objectAtIndex:0]; 
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; 
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; 
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; 
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> 
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> 

if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) 
{ 
    return NO; 
} 

[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; 
//CMTime nextClipStartTime = kCMTimeZero; 
AVMutableComposition *composition = [[AVMutableComposition alloc] init]; 
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix1; 
if (playbackDelayAfterTimeMix1 > 0) { 
    nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); 
}else{ 
    nextClipStartTimeMix1 = kCMTimeZero; 
} 
CMTime startTimeMix1; 
if (playbackDelayMix1 > 0) { 
    startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); 
}else{ 
    startTimeMix1 = kCMTimeZero; 
} 
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1]; 
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; 
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; 
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack; 
if (tracks.count > 0) { 
    clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; 

//avAsset.commonMetadata 
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix2; 
if (playbackDelayAfterTimeMix2 > 0) { 
    nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); 
}else{ 
    nextClipStartTimeMix2 = kCMTimeZero; 
} 
CMTime startTimeMix2; 
if (playbackDelayMix2 > 0) { 
    startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); 
}else{ 
    startTimeMix2 = kCMTimeZero; 
} 

[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2]; 
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; 
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; 
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; 
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack1; 
if (tracks1.count > 0) { 
    clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; 

/** 
added MetadataItem 
**/ 

AVMutableMetadataItem *artistMetadata = [[AVMutableMetadataItem alloc] init]; 
artistMetadata.key = AVMetadataiTunesMetadataKeyArtist; 
artistMetadata.keySpace = AVMetadataKeySpaceiTunes; 
artistMetadata.locale = [NSLocale currentLocale]; 
artistMetadata.value = uTakeTheMicArtist; 

AVMutableMetadataItem *albumMetadata = [[AVMutableMetadataItem alloc] init]; 
albumMetadata.key = AVMetadataiTunesMetadataKeyAlbum; 
albumMetadata.keySpace = AVMetadataKeySpaceiTunes; 
albumMetadata.locale = [NSLocale currentLocale]; 
albumMetadata.value = uTakeTheMicAlbum; 

AVMutableMetadataItem *songMetadata = [[AVMutableMetadataItem alloc] init]; 
songMetadata.key = AVMetadataiTunesMetadataKeySongName; 
songMetadata.keySpace = AVMetadataKeySpaceiTunes; 
songMetadata.locale = [NSLocale currentLocale]; 
songMetadata.value = textFieldMixFile.text; 

AVMutableMetadataItem *imageMetadata = [[AVMutableMetadataItem alloc] init]; 
imageMetadata.key = AVMetadataiTunesMetadataKeyCoverArt; 
imageMetadata.keySpace = AVMetadataKeySpaceiTunes; 
imageMetadata.locale = [NSLocale currentLocale]; 
imageMetadata.value = imageData; //imageData is NSData of UIImage. 
NSArray *metadata = [NSArray arrayWithObjects:artistMetadata, albumMetadata, songMetadata, imageMetadata, nil]; 

AVAssetExportSession *exportSession = [AVAssetExportSession 
             exportSessionWithAsset:composition 
             presetName:AVAssetExportPresetAppleM4A]; 

if (nil == exportSession) return NO; 

exportSession.metadata = metadata; 
exportSession.outputURL = audioFileOutput; 
exportSession.outputFileType = AVFileTypeAppleM4A; 

[exportSession exportAsynchronouslyWithCompletionHandler:^ 
{ 
    if (AVAssetExportSessionStatusCompleted == exportSession.status) 
    { 

     [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; 

    } 
    else if (AVAssetExportSessionStatusFailed == exportSession.status) 
    { 
     [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; 
     [[NSTSharedData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@.",[[exportSession error] localizedDescription]]]; 
     //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); 
    } 
}]; 

return YES; 
} 
Powiązane problemy