Here's how that worked for me; the method you settled won't allow for scaling of your app down the line. You may as well learn how to do things correctly at the start—even at the expense of more time and effort—than not to.
In my app, after creating the asset writer...
_writer = [[AVAssetWriter alloc] initWithURL:_outURL fileType:AVFileTypeQuickTimeMovie error:outError]
...I create an video asset track...
NSArray *videoTracks = [_asset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
assetVideoTrack = [videoTracks objectAtIndex:0];
...and then an asset reader track output object out of that:
_readerVideoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetVideoTrack outputSettings:decompressionVideoSettings]
[_reader addOutput:_readerVideoOutput]
Then, I load the video track's format description properties into an array, casting each element as a CMFormatDescriptionRef object as I access each property:
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [assetVideoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
Then, I attempt to load both the clean-aperture settings and pixel-aspect ratio settings, and then choose between them at the end:
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = @{
AVVideoCleanApertureWidthKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth),
AVVideoCleanApertureHeightKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight),
AVVideoCleanApertureHorizontalOffsetKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset),
AVVideoCleanApertureVerticalOffsetKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset)
};
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = @{
AVVideoPixelAspectRatioHorizontalSpacingKey : (id)CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing),
AVVideoPixelAspectRatioVerticalSpacingKey : (id)CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing)
};
}
if (cleanAperture || pixelAspectRatio)
{
NSMutableDictionary *mutableCompressionSettings = [NSMutableDictionary dictionary];
if (cleanAperture)
[mutableCompressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[mutableCompressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
compressionSettings = mutableCompressionSettings;
}
}
That's where you got confused; some video tracks have one, but not the other, and vice versa. So, you attempt to load them both, and then choose which set of properties came back full, and discard the set that did not.
Keep in mind that there is really only one way to nest all of the methods required to read and write a file to an iPhone, even while you do see a lot of variation. The most sound and prudent thing you could ever do is make sure you're doing it that one, right way.
If you're interested in seeing what that looks like, here it is:
#import "ExportVideo.h"
@implementation ExportVideo
@synthesize url = _url;
@synthesize renderer = _renderer;
- (id)initWithURL:(NSURL *)url usingRenderer:(GLKitView *)renderer {
NSLog(@"ExportVideo");
if (!(self = [super init])) {
return nil;
}
self.url = url;
self.renderer = renderer;
NSString *serializationQueueDescription = [NSString stringWithFormat:@"%@ serialization queue", self];
_mainSerializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
NSString *rwAudioSerializationQueueDescription = [NSString stringWithFormat:@"%@ rw audio serialization queue", self];
_rwAudioSerializationQueue = dispatch_queue_create([rwAudioSerializationQueueDescription UTF8String], NULL);
NSString *rwVideoSerializationQueueDescription = [NSString stringWithFormat:@"%@ rw video serialization queue", self];
_rwVideoSerializationQueue = dispatch_queue_create([rwVideoSerializationQueueDescription UTF8String], NULL);
return self;
}
- (void)startProcessing {
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
_asset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
NSLog(@"URL: %@", self.url);
_cancelled = NO;
[_asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
dispatch_async(_mainSerializationQueue, ^{
if (_cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([_asset statusOfValueForKey:@"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [self.url path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
if (success)
success = [self setupAssetReaderAndAssetWriter:&localError];
if (success)
success = [self startAssetReaderAndWriter:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setupAssetReaderAndAssetWriter:(NSError **)outError
{
_reader = [[AVAssetReader alloc] initWithAsset:_asset error:outError];
BOOL success = (_reader != nil);
if (success)
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
_outputURL = paths[0];
NSFileManager *manager = [NSFileManager defaultManager];
[manager createDirectoryAtPath:_outputURL withIntermediateDirectories:YES attributes:nil error:nil];
_outputURL = [_outputURL stringByAppendingPathComponent:@"output.mov"];
[manager removeItemAtPath:_outputURL error:nil];
_outURL = [NSURL fileURLWithPath:_outputURL];
_writer = [[AVAssetWriter alloc] initWithURL:_outURL fileType:AVFileTypeQuickTimeMovie error:outError];
success = (_writer != nil);
}
if (success)
{
AVAssetTrack *assetAudioTrack = nil, *assetVideoTrack = nil;
NSArray *audioTracks = [_asset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
assetAudioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [_asset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
assetVideoTrack = [videoTracks objectAtIndex:0];
if (assetAudioTrack)
{
NSDictionary *decompressionAudioSettings = @{ AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM] };
_readerAudioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetAudioTrack outputSettings:decompressionAudioSettings];
[_reader addOutput:_readerAudioOutput];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
NSDictionary *compressionAudioSettings = @{
AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC],
AVEncoderBitRateKey : [NSNumber numberWithInteger:128000],
AVSampleRateKey : [NSNumber numberWithInteger:44100],
AVChannelLayoutKey : channelLayoutAsData,
AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:2]
};
_writerAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:[assetAudioTrack mediaType] outputSettings:compressionAudioSettings];
[_writer addInput:_writerAudioInput];
}
if (assetVideoTrack)
{
NSDictionary *decompressionVideoSettings = @{
(id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
(id)kCVPixelBufferIOSurfacePropertiesKey : [NSDictionary dictionary]
};
_readerVideoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:assetVideoTrack outputSettings:decompressionVideoSettings];
[_reader addOutput:_readerVideoOutput];
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [assetVideoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (__bridge CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [assetVideoTrack naturalSize];
NSDictionary *compressionSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = @{
AVVideoCleanApertureWidthKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth),
AVVideoCleanApertureHeightKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight),
AVVideoCleanApertureHorizontalOffsetKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset),
AVVideoCleanApertureVerticalOffsetKey : (id)CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset)
};
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = @{
AVVideoPixelAspectRatioHorizontalSpacingKey : (id)CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing),
AVVideoPixelAspectRatioVerticalSpacingKey : (id)CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing)
};
}
if (cleanAperture || pixelAspectRatio)
{
NSMutableDictionary *mutableCompressionSettings = [NSMutableDictionary dictionary];
if (cleanAperture)
[mutableCompressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[mutableCompressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
compressionSettings = mutableCompressionSettings;
}
}
NSMutableDictionary *videoSettings = (NSMutableDictionary *) @{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : [NSNumber numberWithDouble:trackDimensions.width],
AVVideoHeightKey : [NSNumber numberWithDouble:trackDimensions.height]
};
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
_writerVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:[assetVideoTrack mediaType] outputSettings:videoSettings];
[_writer addInput:_writerVideoInput];
}
}
return success;
}
- (BOOL)startAssetReaderAndWriter:(NSError **)outError
{
BOOL success = YES;
success = [_reader startReading];
if (!success) {
*outError = [_reader error];
NSLog(@"Reader error");
}
if (success)
{
success = [_writer startWriting];
if (!success) {
*outError = [_writer error];
NSLog(@"Writer error");
}
}
if (success)
{
_dispatchGroup = dispatch_group_create();
[_writer startSessionAtSourceTime:kCMTimeZero];
_audioFinished = NO;
_videoFinished = NO;
if (_writerAudioInput)
{
dispatch_group_enter(_dispatchGroup);
[_writerAudioInput requestMediaDataWhenReadyOnQueue:_rwAudioSerializationQueue usingBlock:^{
if (_audioFinished)
return;
BOOL completedOrFailed = NO;
while ([_writerAudioInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [_readerAudioOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
BOOL success = [_writerAudioInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
{
BOOL oldFinished = _audioFinished;
_audioFinished = YES;
if (oldFinished == NO)
{
[_writerAudioInput markAsFinished];
}
dispatch_group_leave(_dispatchGroup);
}
}];
}
if (_writerVideoInput)
{
dispatch_group_enter(_dispatchGroup);
[_writerVideoInput requestMediaDataWhenReadyOnQueue:_rwVideoSerializationQueue usingBlock:^{
if (_videoFinished)
return;
BOOL completedOrFailed = NO;
while ([_writerVideoInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [_readerVideoOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
BOOL success = [_writerVideoInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
{
BOOL oldFinished = _videoFinished;
_videoFinished = YES;
if (oldFinished == NO)
{
[_writerVideoInput markAsFinished];
}
dispatch_group_leave(_dispatchGroup);
}
}];
}
dispatch_group_notify(_dispatchGroup, _mainSerializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (_cancelled)
{
[_reader cancelReading];
[_writer cancelWriting];
}
else
{
if ([_reader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [_reader error];
NSLog(@"_reader finalError: %@", finalError);
}
[_writer finishWritingWithCompletionHandler:^{
UISaveVideoAtPathToSavedPhotosAlbum(_outputURL, nil, nil, nil);
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:[_writer error]];
}];
}
});
}
return success;
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
if (!success)
{
[_reader cancelReading];
[_writer cancelWriting];
dispatch_async(dispatch_get_main_queue(), ^{
});
}
else
{
_cancelled = NO;
_videoFinished = NO;
_audioFinished = NO;
dispatch_async(dispatch_get_main_queue(), ^{
});
}
NSLog(@"readingAndWritingDidFinishSuccessfully success = %@ : Error = %@", (success == 0) ? @"NO" : @"YES", error);
}
@end