123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474 |
- //
- // SDAVAssetExportSession.m
- //
- // This file is part of the SDAVAssetExportSession package.
- //
- // Created by Olivier Poitrey <rs@dailymotion.com> on 13/03/13.
- // Copyright 2013 Olivier Poitrey. All rights servered.
- //
- // For the full copyright and license information, please view the LICENSE
- // file that was distributed with this source code.
- //
- #import "SDAVAssetExportSession.h"
- @interface SDAVAssetExportSession ()
- @property (nonatomic, assign, readwrite) float progress;
- @property (nonatomic, strong) AVAssetReader *reader;
- @property (nonatomic, strong) AVAssetReaderVideoCompositionOutput *videoOutput;
- @property (nonatomic, strong) AVAssetReaderAudioMixOutput *audioOutput;
- @property (nonatomic, strong) AVAssetWriter *writer;
- @property (nonatomic, strong) AVAssetWriterInput *videoInput;
- @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
- @property (nonatomic, strong) AVAssetWriterInput *audioInput;
- @property (nonatomic, strong) dispatch_queue_t inputQueue;
- @property (nonatomic, strong) void (^completionHandler)(void);
- @end
- @implementation SDAVAssetExportSession
- {
- NSError *_error;
- NSTimeInterval duration;
- CMTime lastSamplePresentationTime;
- }
- + (id)exportSessionWithAsset:(AVAsset *)asset
- {
- return [SDAVAssetExportSession.alloc initWithAsset:asset];
- }
- - (id)initWithAsset:(AVAsset *)asset
- {
- if ((self = [super init]))
- {
- _asset = asset;
- _timeRange = CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity);
- }
- return self;
- }
- - (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))handler
- {
- NSParameterAssert(handler != nil);
- [self cancelExport];
- self.completionHandler = handler;
- if (!self.outputURL)
- {
- _error = [NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorExportFailed userInfo:@
- {
- NSLocalizedDescriptionKey: @"Output URL not set"
- }];
- handler();
- return;
- }
- NSError *readerError;
- self.reader = [AVAssetReader.alloc initWithAsset:self.asset error:&readerError];
- if (readerError)
- {
- _error = readerError;
- handler();
- return;
- }
- NSError *writerError;
- self.writer = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:self.outputFileType error:&writerError];
- if (writerError)
- {
- _error = writerError;
- handler();
- return;
- }
- self.reader.timeRange = self.timeRange;
- self.writer.shouldOptimizeForNetworkUse = self.shouldOptimizeForNetworkUse;
- self.writer.metadata = self.metadata;
- NSArray *videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
- if (CMTIME_IS_VALID(self.timeRange.duration) && !CMTIME_IS_POSITIVE_INFINITY(self.timeRange.duration))
- {
- duration = CMTimeGetSeconds(self.timeRange.duration);
- }
- else
- {
- duration = CMTimeGetSeconds(self.asset.duration);
- }
- //
- // Video output
- //
- if (videoTracks.count > 0) {
- self.videoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:videoTracks videoSettings:self.videoInputSettings];
- self.videoOutput.alwaysCopiesSampleData = NO;
- if (self.videoComposition)
- {
- self.videoOutput.videoComposition = self.videoComposition;
- }
- else
- {
- self.videoOutput.videoComposition = [self buildDefaultVideoComposition];
- }
- if ([self.reader canAddOutput:self.videoOutput])
- {
- [self.reader addOutput:self.videoOutput];
- }
- //
- // Video input
- //
- self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
- self.videoInput.expectsMediaDataInRealTime = NO;
- if ([self.writer canAddInput:self.videoInput])
- {
- [self.writer addInput:self.videoInput];
- }
- NSDictionary *pixelBufferAttributes = @
- {
- (id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
- (id)kCVPixelBufferWidthKey: @(self.videoOutput.videoComposition.renderSize.width),
- (id)kCVPixelBufferHeightKey: @(self.videoOutput.videoComposition.renderSize.height),
- @"IOSurfaceOpenGLESTextureCompatibility": @YES,
- @"IOSurfaceOpenGLESFBOCompatibility": @YES,
- };
- self.videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:pixelBufferAttributes];
- }
- //
- //Audio output
- //
- NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
- if (audioTracks.count > 0) {
- self.audioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
- self.audioOutput.alwaysCopiesSampleData = NO;
- self.audioOutput.audioMix = self.audioMix;
- if ([self.reader canAddOutput:self.audioOutput])
- {
- [self.reader addOutput:self.audioOutput];
- }
- } else {
- // Just in case this gets reused
- self.audioOutput = nil;
- }
- //
- // Audio input
- //
- if (self.audioOutput) {
- self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:self.audioSettings];
- self.audioInput.expectsMediaDataInRealTime = NO;
- if ([self.writer canAddInput:self.audioInput])
- {
- [self.writer addInput:self.audioInput];
- }
- }
-
- [self.writer startWriting];
- [self.reader startReading];
- [self.writer startSessionAtSourceTime:self.timeRange.start];
- __block BOOL videoCompleted = NO;
- __block BOOL audioCompleted = NO;
- __weak typeof(self) wself = self;
- self.inputQueue = dispatch_queue_create("VideoEncoderInputQueue", DISPATCH_QUEUE_SERIAL);
- if (videoTracks.count > 0) {
- [self.videoInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
- {
- if (![wself encodeReadySamplesFromOutput:wself.videoOutput toInput:wself.videoInput])
- {
- @synchronized(wself)
- {
- videoCompleted = YES;
- if (audioCompleted)
- {
- [wself finish];
- }
- }
- }
- }];
- }
- else {
- videoCompleted = YES;
- }
-
- if (!self.audioOutput) {
- audioCompleted = YES;
- } else {
- [self.audioInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
- {
- if (![wself encodeReadySamplesFromOutput:wself.audioOutput toInput:wself.audioInput])
- {
- @synchronized(wself)
- {
- audioCompleted = YES;
- if (videoCompleted)
- {
- [wself finish];
- }
- }
- }
- }];
- }
- }
- - (BOOL)encodeReadySamplesFromOutput:(AVAssetReaderOutput *)output toInput:(AVAssetWriterInput *)input
- {
- while (input.isReadyForMoreMediaData)
- {
- CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
- if (sampleBuffer)
- {
- BOOL handled = NO;
- BOOL error = NO;
- if (self.reader.status != AVAssetReaderStatusReading || self.writer.status != AVAssetWriterStatusWriting)
- {
- handled = YES;
- error = YES;
- }
-
- if (!handled && self.videoOutput == output)
- {
- // update the video progress
- lastSamplePresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
- lastSamplePresentationTime = CMTimeSubtract(lastSamplePresentationTime, self.timeRange.start);
- self.progress = duration == 0 ? 1 : CMTimeGetSeconds(lastSamplePresentationTime) / duration;
- if ([self.delegate respondsToSelector:@selector(exportSession:renderFrame:withPresentationTime:toBuffer:)])
- {
- CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
- CVPixelBufferRef renderBuffer = NULL;
- CVPixelBufferPoolCreatePixelBuffer(NULL, self.videoPixelBufferAdaptor.pixelBufferPool, &renderBuffer);
- [self.delegate exportSession:self renderFrame:pixelBuffer withPresentationTime:lastSamplePresentationTime toBuffer:renderBuffer];
- if (![self.videoPixelBufferAdaptor appendPixelBuffer:renderBuffer withPresentationTime:lastSamplePresentationTime])
- {
- error = YES;
- }
- CVPixelBufferRelease(renderBuffer);
- handled = YES;
- }
- }
- if (!handled && ![input appendSampleBuffer:sampleBuffer])
- {
- error = YES;
- }
- CFRelease(sampleBuffer);
- if (error)
- {
- return NO;
- }
- }
- else
- {
- [input markAsFinished];
- return NO;
- }
- }
- return YES;
- }
- - (AVMutableVideoComposition *)buildDefaultVideoComposition
- {
- AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
- AVAssetTrack *videoTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
- // get the frame rate from videoSettings, if not set then try to get it from the video track,
- // if not set (mainly when asset is AVComposition) then use the default frame rate of 30
- float trackFrameRate = 0;
- if (self.videoSettings)
- {
- NSDictionary *videoCompressionProperties = [self.videoSettings objectForKey:AVVideoCompressionPropertiesKey];
- if (videoCompressionProperties)
- {
- NSNumber *frameRate = [videoCompressionProperties objectForKey:AVVideoAverageNonDroppableFrameRateKey];
- if (frameRate)
- {
- trackFrameRate = frameRate.floatValue;
- }
- }
- }
- else
- {
- trackFrameRate = [videoTrack nominalFrameRate];
- }
- if (trackFrameRate == 0)
- {
- trackFrameRate = 30;
- }
- videoComposition.frameDuration = CMTimeMake(1, trackFrameRate);
- CGSize targetSize = CGSizeMake([self.videoSettings[AVVideoWidthKey] floatValue], [self.videoSettings[AVVideoHeightKey] floatValue]);
- CGSize naturalSize = [videoTrack naturalSize];
- CGAffineTransform transform = videoTrack.preferredTransform;
-
- // Fix for mp4 video black screen with audio, see https://github.com/rs/SDAVAssetExportSession/issues/79 for more info
- CGRect rect = {{0, 0}, naturalSize};
- CGRect transformedRect = CGRectApplyAffineTransform(rect, transform);
- // transformedRect should have origin at 0 if correct; otherwise add offset to correct it
- transform.tx -= transformedRect.origin.x;
- transform.ty -= transformedRect.origin.y;
-
- // Workaround radar 31928389, see https://github.com/rs/SDAVAssetExportSession/pull/70 for more info
- if (transform.ty == -560) {
- transform.ty = 0;
- }
- if (transform.tx == -560) {
- transform.tx = 0;
- }
- CGFloat videoAngleInDegree = atan2(transform.b, transform.a) * 180 / M_PI;
- if (videoAngleInDegree == 90 || videoAngleInDegree == -90) {
- CGFloat width = naturalSize.width;
- naturalSize.width = naturalSize.height;
- naturalSize.height = width;
- }
- videoComposition.renderSize = naturalSize;
- // center inside
- {
- float ratio;
- float xratio = targetSize.width / naturalSize.width;
- float yratio = targetSize.height / naturalSize.height;
-
- // Threema modification: use the higher of the two ratios two avoid black bars,
- // at the expense of cropping some of the video
- ratio = MAX(xratio, yratio);
- float postWidth = naturalSize.width * ratio;
- float postHeight = naturalSize.height * ratio;
- float transx = (targetSize.width - postWidth) / 2;
- float transy = (targetSize.height - postHeight) / 2;
- CGAffineTransform matrix = CGAffineTransformMakeTranslation(transx / xratio, transy / yratio);
- matrix = CGAffineTransformScale(matrix, ratio / xratio, ratio / yratio);
- transform = CGAffineTransformConcat(transform, matrix);
- }
- // Make a "pass through video track" video composition.
- AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
- passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
- AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
- [passThroughLayer setTransform:transform atTime:kCMTimeZero];
- passThroughInstruction.layerInstructions = @[passThroughLayer];
- videoComposition.instructions = @[passThroughInstruction];
- return videoComposition;
- }
- - (void)finish
- {
- // Synchronized block to ensure we never cancel the writer before calling finishWritingWithCompletionHandler
- if (self.reader.status == AVAssetReaderStatusCancelled || self.writer.status == AVAssetWriterStatusCancelled)
- {
- return;
- }
- if ([self failed])
- {
- [self complete];
- }
- else if (self.reader.status == AVAssetReaderStatusFailed) {
- [self.writer cancelWriting];
- [self complete];
- }
- else
- {
- [self.writer finishWritingWithCompletionHandler:^
- {
- [self complete];
- }];
- }
- }
- - (BOOL) failed
- {
- BOOL sampleTimeValid = ((Boolean)(((lastSamplePresentationTime).flags & (kCMTimeFlags_Valid | kCMTimeFlags_ImpliedValueFlagsMask)) == kCMTimeFlags_Valid));
- return (self.writer.status == AVAssetWriterStatusFailed || !sampleTimeValid);
- }
- - (void)complete
- {
- if ([self failed] || self.writer.status == AVAssetWriterStatusCancelled)
- {
- [NSFileManager.defaultManager removeItemAtURL:self.outputURL error:nil];
- }
- if (self.completionHandler)
- {
- self.completionHandler();
- self.completionHandler = nil;
- }
- }
- - (NSError *)error
- {
- if (_error)
- {
- return _error;
- }
- else
- {
- return self.writer.error ? : self.reader.error;
- }
- }
- - (AVAssetExportSessionStatus)status
- {
- switch (self.writer.status)
- {
- default:
- case AVAssetWriterStatusUnknown:
- return AVAssetExportSessionStatusUnknown;
- case AVAssetWriterStatusWriting:
- return AVAssetExportSessionStatusExporting;
- case AVAssetWriterStatusFailed:
- return AVAssetExportSessionStatusFailed;
- case AVAssetWriterStatusCompleted:
- return AVAssetExportSessionStatusCompleted;
- case AVAssetWriterStatusCancelled:
- return AVAssetExportSessionStatusCancelled;
- }
- }
- - (void)cancelExport
- {
- if (self.inputQueue)
- {
- dispatch_async(self.inputQueue, ^
- {
- [self.writer cancelWriting];
- [self.reader cancelReading];
- [self complete];
- [self reset];
- });
- }
- }
- - (void)reset
- {
- _error = nil;
- self.progress = 0;
- self.reader = nil;
- self.videoOutput = nil;
- self.audioOutput = nil;
- self.writer = nil;
- self.videoInput = nil;
- self.videoPixelBufferAdaptor = nil;
- self.audioInput = nil;
- self.inputQueue = nil;
- self.completionHandler = nil;
- }
- @end
|