SDAVAssetExportSession.m 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474
  1. //
  2. // SDAVAssetExportSession.m
  3. //
  4. // This file is part of the SDAVAssetExportSession package.
  5. //
  6. // Created by Olivier Poitrey <rs@dailymotion.com> on 13/03/13.
  7. // Copyright 2013 Olivier Poitrey. All rights servered.
  8. //
  9. // For the full copyright and license information, please view the LICENSE
  10. // file that was distributed with this source code.
  11. //
  12. #import "SDAVAssetExportSession.h"
  13. @interface SDAVAssetExportSession ()
  14. @property (nonatomic, assign, readwrite) float progress;
  15. @property (nonatomic, strong) AVAssetReader *reader;
  16. @property (nonatomic, strong) AVAssetReaderVideoCompositionOutput *videoOutput;
  17. @property (nonatomic, strong) AVAssetReaderAudioMixOutput *audioOutput;
  18. @property (nonatomic, strong) AVAssetWriter *writer;
  19. @property (nonatomic, strong) AVAssetWriterInput *videoInput;
  20. @property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
  21. @property (nonatomic, strong) AVAssetWriterInput *audioInput;
  22. @property (nonatomic, strong) dispatch_queue_t inputQueue;
  23. @property (nonatomic, strong) void (^completionHandler)(void);
  24. @end
  25. @implementation SDAVAssetExportSession
  26. {
  27. NSError *_error;
  28. NSTimeInterval duration;
  29. CMTime lastSamplePresentationTime;
  30. }
  31. + (id)exportSessionWithAsset:(AVAsset *)asset
  32. {
  33. return [SDAVAssetExportSession.alloc initWithAsset:asset];
  34. }
  35. - (id)initWithAsset:(AVAsset *)asset
  36. {
  37. if ((self = [super init]))
  38. {
  39. _asset = asset;
  40. _timeRange = CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity);
  41. }
  42. return self;
  43. }
  44. - (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))handler
  45. {
  46. NSParameterAssert(handler != nil);
  47. [self cancelExport];
  48. self.completionHandler = handler;
  49. if (!self.outputURL)
  50. {
  51. _error = [NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorExportFailed userInfo:@
  52. {
  53. NSLocalizedDescriptionKey: @"Output URL not set"
  54. }];
  55. handler();
  56. return;
  57. }
  58. NSError *readerError;
  59. self.reader = [AVAssetReader.alloc initWithAsset:self.asset error:&readerError];
  60. if (readerError)
  61. {
  62. _error = readerError;
  63. handler();
  64. return;
  65. }
  66. NSError *writerError;
  67. self.writer = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:self.outputFileType error:&writerError];
  68. if (writerError)
  69. {
  70. _error = writerError;
  71. handler();
  72. return;
  73. }
  74. self.reader.timeRange = self.timeRange;
  75. self.writer.shouldOptimizeForNetworkUse = self.shouldOptimizeForNetworkUse;
  76. self.writer.metadata = self.metadata;
  77. NSArray *videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];
  78. if (CMTIME_IS_VALID(self.timeRange.duration) && !CMTIME_IS_POSITIVE_INFINITY(self.timeRange.duration))
  79. {
  80. duration = CMTimeGetSeconds(self.timeRange.duration);
  81. }
  82. else
  83. {
  84. duration = CMTimeGetSeconds(self.asset.duration);
  85. }
  86. //
  87. // Video output
  88. //
  89. if (videoTracks.count > 0) {
  90. self.videoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:videoTracks videoSettings:self.videoInputSettings];
  91. self.videoOutput.alwaysCopiesSampleData = NO;
  92. if (self.videoComposition)
  93. {
  94. self.videoOutput.videoComposition = self.videoComposition;
  95. }
  96. else
  97. {
  98. self.videoOutput.videoComposition = [self buildDefaultVideoComposition];
  99. }
  100. if ([self.reader canAddOutput:self.videoOutput])
  101. {
  102. [self.reader addOutput:self.videoOutput];
  103. }
  104. //
  105. // Video input
  106. //
  107. self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
  108. self.videoInput.expectsMediaDataInRealTime = NO;
  109. if ([self.writer canAddInput:self.videoInput])
  110. {
  111. [self.writer addInput:self.videoInput];
  112. }
  113. NSDictionary *pixelBufferAttributes = @
  114. {
  115. (id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
  116. (id)kCVPixelBufferWidthKey: @(self.videoOutput.videoComposition.renderSize.width),
  117. (id)kCVPixelBufferHeightKey: @(self.videoOutput.videoComposition.renderSize.height),
  118. @"IOSurfaceOpenGLESTextureCompatibility": @YES,
  119. @"IOSurfaceOpenGLESFBOCompatibility": @YES,
  120. };
  121. self.videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:pixelBufferAttributes];
  122. }
  123. //
  124. //Audio output
  125. //
  126. NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
  127. if (audioTracks.count > 0) {
  128. self.audioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
  129. self.audioOutput.alwaysCopiesSampleData = NO;
  130. self.audioOutput.audioMix = self.audioMix;
  131. if ([self.reader canAddOutput:self.audioOutput])
  132. {
  133. [self.reader addOutput:self.audioOutput];
  134. }
  135. } else {
  136. // Just in case this gets reused
  137. self.audioOutput = nil;
  138. }
  139. //
  140. // Audio input
  141. //
  142. if (self.audioOutput) {
  143. self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:self.audioSettings];
  144. self.audioInput.expectsMediaDataInRealTime = NO;
  145. if ([self.writer canAddInput:self.audioInput])
  146. {
  147. [self.writer addInput:self.audioInput];
  148. }
  149. }
  150. [self.writer startWriting];
  151. [self.reader startReading];
  152. [self.writer startSessionAtSourceTime:self.timeRange.start];
  153. __block BOOL videoCompleted = NO;
  154. __block BOOL audioCompleted = NO;
  155. __weak typeof(self) wself = self;
  156. self.inputQueue = dispatch_queue_create("VideoEncoderInputQueue", DISPATCH_QUEUE_SERIAL);
  157. if (videoTracks.count > 0) {
  158. [self.videoInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
  159. {
  160. if (![wself encodeReadySamplesFromOutput:wself.videoOutput toInput:wself.videoInput])
  161. {
  162. @synchronized(wself)
  163. {
  164. videoCompleted = YES;
  165. if (audioCompleted)
  166. {
  167. [wself finish];
  168. }
  169. }
  170. }
  171. }];
  172. }
  173. else {
  174. videoCompleted = YES;
  175. }
  176. if (!self.audioOutput) {
  177. audioCompleted = YES;
  178. } else {
  179. [self.audioInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
  180. {
  181. if (![wself encodeReadySamplesFromOutput:wself.audioOutput toInput:wself.audioInput])
  182. {
  183. @synchronized(wself)
  184. {
  185. audioCompleted = YES;
  186. if (videoCompleted)
  187. {
  188. [wself finish];
  189. }
  190. }
  191. }
  192. }];
  193. }
  194. }
  195. - (BOOL)encodeReadySamplesFromOutput:(AVAssetReaderOutput *)output toInput:(AVAssetWriterInput *)input
  196. {
  197. while (input.isReadyForMoreMediaData)
  198. {
  199. CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
  200. if (sampleBuffer)
  201. {
  202. BOOL handled = NO;
  203. BOOL error = NO;
  204. if (self.reader.status != AVAssetReaderStatusReading || self.writer.status != AVAssetWriterStatusWriting)
  205. {
  206. handled = YES;
  207. error = YES;
  208. }
  209. if (!handled && self.videoOutput == output)
  210. {
  211. // update the video progress
  212. lastSamplePresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  213. lastSamplePresentationTime = CMTimeSubtract(lastSamplePresentationTime, self.timeRange.start);
  214. self.progress = duration == 0 ? 1 : CMTimeGetSeconds(lastSamplePresentationTime) / duration;
  215. if ([self.delegate respondsToSelector:@selector(exportSession:renderFrame:withPresentationTime:toBuffer:)])
  216. {
  217. CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
  218. CVPixelBufferRef renderBuffer = NULL;
  219. CVPixelBufferPoolCreatePixelBuffer(NULL, self.videoPixelBufferAdaptor.pixelBufferPool, &renderBuffer);
  220. [self.delegate exportSession:self renderFrame:pixelBuffer withPresentationTime:lastSamplePresentationTime toBuffer:renderBuffer];
  221. if (![self.videoPixelBufferAdaptor appendPixelBuffer:renderBuffer withPresentationTime:lastSamplePresentationTime])
  222. {
  223. error = YES;
  224. }
  225. CVPixelBufferRelease(renderBuffer);
  226. handled = YES;
  227. }
  228. }
  229. if (!handled && ![input appendSampleBuffer:sampleBuffer])
  230. {
  231. error = YES;
  232. }
  233. CFRelease(sampleBuffer);
  234. if (error)
  235. {
  236. return NO;
  237. }
  238. }
  239. else
  240. {
  241. [input markAsFinished];
  242. return NO;
  243. }
  244. }
  245. return YES;
  246. }
  247. - (AVMutableVideoComposition *)buildDefaultVideoComposition
  248. {
  249. AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
  250. AVAssetTrack *videoTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
  251. // get the frame rate from videoSettings, if not set then try to get it from the video track,
  252. // if not set (mainly when asset is AVComposition) then use the default frame rate of 30
  253. float trackFrameRate = 0;
  254. if (self.videoSettings)
  255. {
  256. NSDictionary *videoCompressionProperties = [self.videoSettings objectForKey:AVVideoCompressionPropertiesKey];
  257. if (videoCompressionProperties)
  258. {
  259. NSNumber *frameRate = [videoCompressionProperties objectForKey:AVVideoAverageNonDroppableFrameRateKey];
  260. if (frameRate)
  261. {
  262. trackFrameRate = frameRate.floatValue;
  263. }
  264. }
  265. }
  266. else
  267. {
  268. trackFrameRate = [videoTrack nominalFrameRate];
  269. }
  270. if (trackFrameRate == 0)
  271. {
  272. trackFrameRate = 30;
  273. }
  274. videoComposition.frameDuration = CMTimeMake(1, trackFrameRate);
  275. CGSize targetSize = CGSizeMake([self.videoSettings[AVVideoWidthKey] floatValue], [self.videoSettings[AVVideoHeightKey] floatValue]);
  276. CGSize naturalSize = [videoTrack naturalSize];
  277. CGAffineTransform transform = videoTrack.preferredTransform;
  278. // Fix for mp4 video black screen with audio, see https://github.com/rs/SDAVAssetExportSession/issues/79 for more info
  279. CGRect rect = {{0, 0}, naturalSize};
  280. CGRect transformedRect = CGRectApplyAffineTransform(rect, transform);
  281. // transformedRect should have origin at 0 if correct; otherwise add offset to correct it
  282. transform.tx -= transformedRect.origin.x;
  283. transform.ty -= transformedRect.origin.y;
  284. // Workaround radar 31928389, see https://github.com/rs/SDAVAssetExportSession/pull/70 for more info
  285. if (transform.ty == -560) {
  286. transform.ty = 0;
  287. }
  288. if (transform.tx == -560) {
  289. transform.tx = 0;
  290. }
  291. CGFloat videoAngleInDegree = atan2(transform.b, transform.a) * 180 / M_PI;
  292. if (videoAngleInDegree == 90 || videoAngleInDegree == -90) {
  293. CGFloat width = naturalSize.width;
  294. naturalSize.width = naturalSize.height;
  295. naturalSize.height = width;
  296. }
  297. videoComposition.renderSize = naturalSize;
  298. // center inside
  299. {
  300. float ratio;
  301. float xratio = targetSize.width / naturalSize.width;
  302. float yratio = targetSize.height / naturalSize.height;
  303. // Threema modification: use the higher of the two ratios two avoid black bars,
  304. // at the expense of cropping some of the video
  305. ratio = MAX(xratio, yratio);
  306. float postWidth = naturalSize.width * ratio;
  307. float postHeight = naturalSize.height * ratio;
  308. float transx = (targetSize.width - postWidth) / 2;
  309. float transy = (targetSize.height - postHeight) / 2;
  310. CGAffineTransform matrix = CGAffineTransformMakeTranslation(transx / xratio, transy / yratio);
  311. matrix = CGAffineTransformScale(matrix, ratio / xratio, ratio / yratio);
  312. transform = CGAffineTransformConcat(transform, matrix);
  313. }
  314. // Make a "pass through video track" video composition.
  315. AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  316. passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
  317. AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
  318. [passThroughLayer setTransform:transform atTime:kCMTimeZero];
  319. passThroughInstruction.layerInstructions = @[passThroughLayer];
  320. videoComposition.instructions = @[passThroughInstruction];
  321. return videoComposition;
  322. }
  323. - (void)finish
  324. {
  325. // Synchronized block to ensure we never cancel the writer before calling finishWritingWithCompletionHandler
  326. if (self.reader.status == AVAssetReaderStatusCancelled || self.writer.status == AVAssetWriterStatusCancelled)
  327. {
  328. return;
  329. }
  330. if ([self failed])
  331. {
  332. [self complete];
  333. }
  334. else if (self.reader.status == AVAssetReaderStatusFailed) {
  335. [self.writer cancelWriting];
  336. [self complete];
  337. }
  338. else
  339. {
  340. [self.writer finishWritingWithCompletionHandler:^
  341. {
  342. [self complete];
  343. }];
  344. }
  345. }
  346. - (BOOL) failed
  347. {
  348. BOOL sampleTimeValid = ((Boolean)(((lastSamplePresentationTime).flags & (kCMTimeFlags_Valid | kCMTimeFlags_ImpliedValueFlagsMask)) == kCMTimeFlags_Valid));
  349. return (self.writer.status == AVAssetWriterStatusFailed || !sampleTimeValid);
  350. }
  351. - (void)complete
  352. {
  353. if ([self failed] || self.writer.status == AVAssetWriterStatusCancelled)
  354. {
  355. [NSFileManager.defaultManager removeItemAtURL:self.outputURL error:nil];
  356. }
  357. if (self.completionHandler)
  358. {
  359. self.completionHandler();
  360. self.completionHandler = nil;
  361. }
  362. }
  363. - (NSError *)error
  364. {
  365. if (_error)
  366. {
  367. return _error;
  368. }
  369. else
  370. {
  371. return self.writer.error ? : self.reader.error;
  372. }
  373. }
  374. - (AVAssetExportSessionStatus)status
  375. {
  376. switch (self.writer.status)
  377. {
  378. default:
  379. case AVAssetWriterStatusUnknown:
  380. return AVAssetExportSessionStatusUnknown;
  381. case AVAssetWriterStatusWriting:
  382. return AVAssetExportSessionStatusExporting;
  383. case AVAssetWriterStatusFailed:
  384. return AVAssetExportSessionStatusFailed;
  385. case AVAssetWriterStatusCompleted:
  386. return AVAssetExportSessionStatusCompleted;
  387. case AVAssetWriterStatusCancelled:
  388. return AVAssetExportSessionStatusCancelled;
  389. }
  390. }
  391. - (void)cancelExport
  392. {
  393. if (self.inputQueue)
  394. {
  395. dispatch_async(self.inputQueue, ^
  396. {
  397. [self.writer cancelWriting];
  398. [self.reader cancelReading];
  399. [self complete];
  400. [self reset];
  401. });
  402. }
  403. }
  404. - (void)reset
  405. {
  406. _error = nil;
  407. self.progress = 0;
  408. self.reader = nil;
  409. self.videoOutput = nil;
  410. self.audioOutput = nil;
  411. self.writer = nil;
  412. self.videoInput = nil;
  413. self.videoPixelBufferAdaptor = nil;
  414. self.audioInput = nil;
  415. self.inputQueue = nil;
  416. self.completionHandler = nil;
  417. }
  418. @end