Skip to content

AVFoundation tvOS xcode9 beta1

Manuel de la Pena edited this page Jul 5, 2017 · 2 revisions

#AVFoundation.framework ##mandel

diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h	2016-08-05 01:59:15.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h	2017-05-24 00:28:26.000000000 -0400
@@ -3,7 +3,7 @@
  
     Framework:  AVFoundation
  
-	Copyright 2010-2012 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
  
  */
 
@@ -19,6 +19,7 @@
 */
 AVF_EXPORT const CFTimeInterval AVCoreAnimationBeginTimeAtZero NS_AVAILABLE(10_7, 4_0);
 
+typedef NSString * AVLayerVideoGravity NS_STRING_ENUM;
 
 /*!
 	@constant		AVLayerVideoGravityResizeAspect
@@ -26,7 +27,7 @@
 	@discussion		AVLayerVideoGravityResizeAspect may be used when setting the videoGravity
                     property of an AVPlayerLayer or AVCaptureVideoPreviewLayer instance.
  */
-AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspect NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVLayerVideoGravity const AVLayerVideoGravityResizeAspect NS_AVAILABLE(10_7, 4_0);
 
 
 /*!
@@ -35,7 +36,7 @@
     @discussion     AVLayerVideoGravityResizeAspectFill may be used when setting the videoGravity
                     property of an AVPlayerLayer or AVCaptureVideoPreviewLayer instance.
  */
-AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspectFill NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVLayerVideoGravity const AVLayerVideoGravityResizeAspectFill NS_AVAILABLE(10_7, 4_0);
 
 /*!
 	@constant		AVLayerVideoGravityResize
@@ -43,4 +44,4 @@
     @discussion     AVLayerVideoGravityResize may be used when setting the videoGravity
                     property of an AVPlayerLayer or AVCaptureVideoPreviewLayer instance.
  */
-AVF_EXPORT NSString *const AVLayerVideoGravityResize NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVLayerVideoGravity const AVLayerVideoGravityResize NS_AVAILABLE(10_7, 4_0);
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h	2017-02-22 01:14:49.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
@@ -11,6 +11,8 @@
 #import <Foundation/Foundation.h>
 #import <AVFoundation/AVAsynchronousKeyValueLoading.h>
 #import <AVFoundation/AVContentKeySession.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVMetadataFormat.h>
 
 #import <CoreGraphics/CGAffineTransform.h>
 
@@ -177,7 +179,7 @@
   @result		An NSArray of AVAssetTracks; may be empty if no tracks of the specified media type are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVAssetTrack *> *)tracksWithMediaType:(NSString *)mediaType;
+- (NSArray<AVAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
 
 /*!
   @method		tracksWithMediaCharacteristic:
@@ -187,7 +189,7 @@
   @result		An NSArray of AVAssetTracks; may be empty if no tracks with the specified characteristic are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVAssetTrack *> *)tracksWithMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (NSArray<AVAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 /*!
  @property trackGroups
@@ -224,7 +226,7 @@
 
 /* Provides an NSArray of NSStrings, each representing a metadata format that's available to the asset (e.g. ID3, iTunes metadata, etc.). Metadata formats are defined in AVMetadataFormat.h.
 */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
 
 /*!
   @method		metadataForFormat:
@@ -234,7 +236,7 @@
   @result		An NSArray containing AVMetadataItems; may be empty if there is no metadata of the specified format.
   @discussion	Becomes callable without blocking when the key @"availableMetadataFormats" has been loaded
 */
-- (NSArray<AVMetadataItem *> *)metadataForFormat:(NSString *)format;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
 
 @end
 
@@ -263,7 +265,7 @@
  
 	Further filtering of the metadata items in AVTimedMetadataGroups according to language can be accomplished using +[AVMetadataItem metadataItemsFromArray:filteredAndSortedAccordingToPreferredLanguages:]; filtering of the metadata items according to locale can be accomplished using +[AVMetadataItem metadataItemsFromArray:withLocale:].
 */
-- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<NSString *> *)commonKeys NS_AVAILABLE(10_7, 4_3);
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<AVMetadataKey> *)commonKeys NS_AVAILABLE(10_7, 4_3);
 
 /*!
  @method		chapterMetadataGroupsBestMatchingPreferredLanguages:
@@ -293,7 +295,7 @@
 
 /* Provides an NSArray of NSStrings, each NSString indicating a media characteristic for which a media selection option is available.
 */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMediaCharacteristicsWithMediaSelectionOptions NS_AVAILABLE(10_8, 5_0);
+@property (nonatomic, readonly) NSArray<AVMediaCharacteristic> *availableMediaCharacteristicsWithMediaSelectionOptions NS_AVAILABLE(10_8, 5_0);
 
 /*!
   @method		mediaSelectionGroupForMediaCharacteristic:
@@ -312,7 +314,7 @@
 	
 	Filtering of the options in the returned AVMediaSelectionGroup according to playability, locale, and additional media characteristics can be accomplished using the category AVMediaSelectionOptionFiltering defined on AVMediaSelectionGroup.
 */
-- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(NSString *)mediaCharacteristic NS_AVAILABLE(10_8, 5_0);
+- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic NS_AVAILABLE(10_8, 5_0);
 
 /*!
   @property		preferredMediaSelection
@@ -320,6 +322,14 @@
 */
 @property (nonatomic, readonly) AVMediaSelection *preferredMediaSelection NS_AVAILABLE(10_11, 9_0);
 
+/*!
+  @property		allMediaSelections
+  @abstract		Provides an array of all permutations of AVMediaSelection for this asset.
+  @discussion
+	Becomes callable without blocking when the key @"availableMediaCharacteristicsWithMediaSelectionOptions" has been loaded.
+*/
+@property (nonatomic, readonly) NSArray <AVMediaSelection *> *allMediaSelections NS_AVAILABLE(10_13, 11_0);
+
 @end
 
 
@@ -364,7 +374,10 @@
 
 @interface AVAsset (AVAssetUsability)
 
-/* indicates whether an AVPlayerItem can be initialized with the receiver or with its URL
+/*!
+ @property		playable
+ @abstract		Indicates whether an AVPlayer can play the contents of the asset in a manner that meets user expectations.
+ @discussion	A client can attempt playback when playable is NO, this however may lead to a substandard playback experience.
 */
 @property (nonatomic, readonly, getter=isPlayable) BOOL playable NS_AVAILABLE(10_7, 4_3);
 
@@ -477,7 +490,7 @@
   @abstract		Provides the file types the AVURLAsset class understands.
   @result		An NSArray of UTIs identifying the file types the AVURLAsset class understands.
 */
-+ (NSArray<NSString *> *)audiovisualTypes NS_AVAILABLE(10_7, 5_0);
++ (NSArray<AVFileType> *)audiovisualTypes NS_AVAILABLE(10_7, 5_0);
 
 /*!
   @method		audiovisualMIMETypes
@@ -632,7 +645,7 @@
 @interface AVFragmentedAsset : AVURLAsset <AVFragmentMinding>
 {
 @private
-	AVFragmentedAssetInternal	*_fragmentedAsset;
+	AVFragmentedAssetInternal	*_fragmentedAsset __attribute__((unused));
 }
 
 /*!
@@ -675,7 +688,7 @@
   @result		An NSArray of AVFragmentedAssetTracks; may be empty if no tracks of the specified media type are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaType:(NSString *)mediaType;
+- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
 
 /*!
   @method		tracksWithMediaCharacteristic:
@@ -685,7 +698,7 @@
   @result		An NSArray of AVFragmentedAssetTracks; may be empty if no tracks with the specified characteristic are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 @end
 
@@ -751,7 +764,7 @@
 	@property 		mayRequireContentKeysForMediaDataProcessing
 	@abstract		Allows AVURLAsset to be added as a content key recipient to an AVContentKeySession.
 */
-@property (nonatomic, readonly) BOOL mayRequireContentKeysForMediaDataProcessing API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+@property (nonatomic, readonly) BOOL mayRequireContentKeysForMediaDataProcessing;
 
 @end
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h	2017-05-24 00:41:53.000000000 -0400
@@ -0,0 +1,127 @@
+/*
+	File:  AVAssetDownloadStorageManager.h
+ 
+	Framework:  AVFoundation
+ 
+	Copyright 2017 Apple Inc. All rights reserved.
+ 
+ */
+
+/*!
+	@class		AVAssetDownloadStorageManager
+ 
+	@abstract	An AVAssetDownloadStorageManager manages the policy for automatic purging of downloaded AVAssets. The policy is vended as  AVAssetDownloadStorageManagementPolicy object.
+
+	@discussion	When a storage management policy needs to be set on an asset, sharedDownloadStorageManager singleton needs to be fetched. 
+ 				The new policy can then be set by using setStorageManagementPolicy and the location of the downloaded asset.
+ */
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class AVAssetDownloadStorageManagementPolicy;
+/*!
+	@group		AVAssetDownloadedAssetEvictionPriority string constants
+	@brief		Used by AVAssetDownloadStorageManagementPolicy.
+*/
+typedef NSString *AVAssetDownloadedAssetEvictionPriority NS_STRING_ENUM API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+
+/*!
+	@enum		AVAssetDownloadedAssetEvictionPriority
+	@abstract	These constants represents the eviction priority of downloaded assets.
+
+	@constant	AVAssetDownloadedAssetEvictionPriorityImportant
+				Used to mark assets with the highest priority. They will be the last to be purged.
+	@constant	AVAssetDownloadedAssetEvictionPriorityDefault
+				Used to mark assets have the default priority. They will be the first to be purged.
+*/
+AVF_EXPORT AVAssetDownloadedAssetEvictionPriority const AVAssetDownloadedAssetEvictionPriorityImportant				API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+AVF_EXPORT AVAssetDownloadedAssetEvictionPriority const AVAssetDownloadedAssetEvictionPriorityDefault				API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+
+API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos)
+@interface AVAssetDownloadStorageManager : NSObject
+
+/*!
+	@method		sharedDownloadStorageManager
+	@abstract	returns singleton instance.
+*/
++ (AVAssetDownloadStorageManager *)sharedDownloadStorageManager;
+
+/*!
+	@method		setStorageManagementPolicy: forURL
+	@abstract	Sets the policy for asset with disk backing at downloadStorageURL.
+	@param		downloadStorageURL
+ 				The location of downloaded asset.
+*/
+- (void)setStorageManagementPolicy:(AVAssetDownloadStorageManagementPolicy *)storageManagementPolicy forURL:(NSURL *)downloadStorageURL;
+
+/*!
+	@method		storageManagementPolicyForURL:downloadStorageURL
+	@abstract	Returns the storage management policy for asset downloaded at downloadStorageURL.
+                This may be nil if a storageManagementPolicy was never set on the downloaded asset.
+	@param		downloadStorageURL
+				The location of downloaded asset.
+*/
+- (nullable AVAssetDownloadStorageManagementPolicy *)storageManagementPolicyForURL:(NSURL *)downloadStorageURL;
+
+@end
+
+/*!
+	@class		AVAssetDownloadStorageManagementPolicy
+ 
+	@abstract	A class to inform the system of a policy for automatic purging of downloaded AVAssets.
+ 
+	@discussion	System will put in best-effort to evict all the assets based on expirationDate before evicting based on priority.
+ */
+@class AVAssetDownloadStorageManagementPolicyInternal;
+
+API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos)
+@interface AVAssetDownloadStorageManagementPolicy : NSObject <NSCopying, NSMutableCopying> {
+@private
+	AVAssetDownloadStorageManagementPolicyInternal    *_storageManagementPolicy;
+}
+
+/*
+ 	@property	priority
+ 	@abstract	Indicates the eviction priority of downloaded asset.
+ 	@discussion	Assets with default priority will be purged first before assets with higher priorities.
+				In case this is not set, default priority is used.
+ */
+@property (nonatomic, readonly, copy) AVAssetDownloadedAssetEvictionPriority priority;
+
+/*
+ 	@property	expirationDate
+ 	@abstract	Returns the expiration date of asset.
+ */
+@property (nonatomic, readonly, copy) NSDate *expirationDate;
+
+@end
+/*!
+	@class		AVMutableAssetDownloadStorageManagementPolicy
+ 
+	@abstract	A mutable subclass of AVAssetDownloadStorageManagementPolicy.
+ 
+	@discussion	System will put in best-effort to evict all the assets based on expirationDate before evicting based on priority.
+ */
+API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos)
+@interface AVMutableAssetDownloadStorageManagementPolicy : AVAssetDownloadStorageManagementPolicy
+
+/*
+	@property	priority
+	@abstract	Indicates the eviction priority of downloaded asset.
+	@discussion	Assets with default priority will be purged first before assets with higher priorities.
+ 				In case this is not set, default priority is used.
+ */
+@property (nonatomic, copy) AVAssetDownloadedAssetEvictionPriority priority;
+
+/*
+	@property	expirationDate
+ 	@abstract	Returns the expiration date of asset.
+ */
+@property (nonatomic, copy) NSDate *expirationDate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadTask.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadTask.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadTask.h	2016-08-05 01:59:15.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadTask.h	2017-05-24 00:28:06.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
 
-	Copyright 2015 Apple Inc. All rights reserved.
+	Copyright 2015-2016 Apple Inc. All rights reserved.
 
 */
 
@@ -36,10 +36,10 @@
 /*!
  @class			AVAssetDownloadTask
  @abstract		A NSURLSessionTask that accepts remote AVURLAssets to download locally.
- @discussion	Should be created with -[AVAssetDownloadURLSession assetDownloadTaskWithURLAsset:destinationURL:options:]. To utilize local data for playback for downloads that are in-progress, re-use the URLAsset supplied in initialization. An AVAssetDownloadTask may be instantiated with a destinationURL pointing to an existing asset on disk, for the purpose of completing or augmenting a downloaded asset.
+ @discussion	Should be created with -[AVAssetDownloadURLSession assetDownloadTaskWithURLAsset:assetTitle:assetArtworkData:options:]. To utilize local data for playback for downloads that are in-progress, re-use the URLAsset supplied in initialization. An AVAssetDownloadTask may be instantiated with a destinationURL pointing to an existing asset on disk, for the purpose of completing or augmenting a downloaded asset.
 */
 
-NS_CLASS_AVAILABLE_IOS(9_0) __TVOS_PROHIBITED
+NS_CLASS_AVAILABLE_IOS(9_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
 @interface AVAssetDownloadTask : NSURLSessionTask
 
 /*!
@@ -76,13 +76,34 @@
 
 @end
 
+/*!
+ @class			AVAggregateAssetDownloadTask
+ @abstract		An AVAssetDownloadTask used for downloading mutliple AVMediaSelections for a single AVAsset, under the umbrella of a single download task.
+ @discussion	Should be created with -[AVAssetDownloadURLSession aggregateAssetDownloadTaskWithURLAsset:mediaSelections:assetTitle:assetArtworkData:options:. For progress tracking, monitor the delegate callbacks for each childAssetDownloadTask.
+*/
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVAggregateAssetDownloadTask : NSURLSessionTask
+
+/*!
+ @property		URLAsset
+ @abstract		The asset supplied to the download task upon initialization.
+*/
+@property (nonatomic, readonly) AVURLAsset *URLAsset;
+
+// NSURLRequest and NSURLResponse objects are not available for AVAggregateAssetDownloadTask
+AV_INIT_UNAVAILABLE
+@property (readonly, copy) NSURLRequest *originalRequest NS_UNAVAILABLE;
+@property (readonly, copy) NSURLRequest *currentRequest NS_UNAVAILABLE;
+@property (readonly, copy) NSURLResponse *response NS_UNAVAILABLE;
+
+@end
 
 /*!
  @protocol		AVAssetDownloadDelegate
- @abstract		Delegate method to implement when adopting AVAssetDownloadTask.
+ @abstract		Delegate methods to implement when adopting AVAssetDownloadTask.
 */
 
-__TVOS_PROHIBITED
+__TVOS_PROHIBITED __WATCHOS_PROHIBITED
 @protocol AVAssetDownloadDelegate <NSURLSessionTaskDelegate>
 @optional
 /*!
@@ -100,7 +121,7 @@
 
 /*!
  @method		URLSession:assetDownloadTask:didLoadTimeRange:totalTimeRangesLoaded:timeRangeExpectedToLoad:
- @abstract		Method to adopt to subscribe to progress updates of the AVAssetDownloadTask
+ @abstract		Method to adopt to subscribe to progress updates of an AVAssetDownloadTask.
  @param			session
 				The session the asset download task is on.
  @param			assetDownloadTask
@@ -126,6 +147,49 @@
 */
 - (void)URLSession:(NSURLSession *)session assetDownloadTask:(AVAssetDownloadTask *)assetDownloadTask didResolveMediaSelection:(AVMediaSelection *)resolvedMediaSelection NS_AVAILABLE_IOS(9_0);
 
+/*
+ @method		URLSession:aggregateAssetDownloadTask:willDownloadToURL:
+ @abstract		Method called when the an aggregate download task determines the location this asset will be downloaded to.
+ @discussion	This URL should be saved for future instantiations of AVAsset. While an AVAsset already exists for this content, it is advisable to re-use that instance.
+ @param			session
+				The session the aggregate asset download task is on.
+ @param			aggregateAssetDownloadTask
+				The AVAggregateAssetDownloadTask.
+ @param			location
+				The file URL this task will download media data to.
+*/
+- (void)URLSession:(NSURLSession *)session aggregateAssetDownloadTask:(AVAggregateAssetDownloadTask *)aggregateAssetDownloadTask willDownloadToURL:(NSURL *)location NS_AVAILABLE_IOS(11_0);
+
+/*
+ @method		URLSession:aggregateAssetDownloadTask:didCompleteForMediaSelection:
+ @abstract		Method called when a child AVAssetDownloadTask completes.
+ @param			session
+				The session the aggregate asset download task is on.
+ @param			aggregateAssetDownloadTask
+				The AVAggregateAssetDownloadTask.
+ @param			mediaSelection
+				The AVMediaSelection which is now fully available for offline use.
+*/
+- (void)URLSession:(NSURLSession *)session aggregateAssetDownloadTask:(AVAggregateAssetDownloadTask *)aggregateAssetDownloadTask didCompleteForMediaSelection:(AVMediaSelection *)mediaSelection NS_AVAILABLE_IOS(11_0);
+
+/*
+ @method		URLSession:aggregateAssetDownloadTask:didLoadTimeRange:totalTimeRangesLoaded:timeRangeExpectedToLoad:forMediaSelection:
+ @abstract		Method to adopt to subscribe to progress updates of an AVAggregateAssetDownloadTask
+ @param			session
+				The session the asset download task is on.
+ @param			aggregateAssetDownloadTask
+				The AVAggregateAssetDownloadTask.
+ @param			timeRange
+				A CMTimeRange indicating the time range loaded for the media selection being downloaded.
+ @param			loadedTimeRanges
+				A NSArray of NSValues of CMTimeRanges indicating all the time ranges loaded for the media selection being downloaded.
+ @param			timeRangeExpectedToLoad
+				A CMTimeRange indicating the single time range that is expected to be loaded when the download is complete for the media selection being downloaded.
+ @param			mediaSelection
+				The media selection which has additional media data loaded for offline use.
+*/
+- (void)URLSession:(NSURLSession *)session aggregateDownloadTask:(AVAggregateAssetDownloadTask *)aggregateAssetDownloadTask didLoadTimeRange:(CMTimeRange)timeRange totalTimeRangesLoaded:(NSArray<NSValue *> *)loadedTimeRanges timeRangeExpectedToLoad:(CMTimeRange)timeRangeExpectedToLoad forMediaSelection:(AVMediaSelection *)mediaSelection NS_AVAILABLE_IOS(11_0);
+
 @end
 
 
@@ -176,6 +240,23 @@
 */
 - (nullable AVAssetDownloadTask *)assetDownloadTaskWithURLAsset:(AVURLAsset *)URLAsset assetTitle:(NSString *)title assetArtworkData:(nullable NSData *)artworkData options:(nullable NSDictionary<NSString *, id> *)options NS_AVAILABLE_IOS(10_0);
 
+/*!
+ @method		aggregateAssetDownloadTaskWithURLAsset:mediaSelections:assetTitle:assetArtworkData:options:
+ @abstract		Creates and initializes an AVAggregateAssetDownloadTask to download multiple AVMediaSelections on an AVURLAsset.
+ @discussion	This method may return nil if the URLSession has been invalidated. The value of AVAssetDownloadTaskMediaSelectionKey will be ignored.
+ @param			URLAsset
+				The AVURLAsset to download locally.
+ @param			mediaSelections
+				A list of AVMediaSelections. Each AVMediaSelection will correspond to a childAssetDownloadTask. Use -[AVAsset allMediaSelections] to download all AVMediaSelections on this AVAsset.
+ @param			assetTitle
+				A human readable title for this asset, expected to be as suitable as possible for the user's preferred languages. Will show up in the usage pane of the settings app.
+ @param			assetArtworkData
+				Artwork data for this asset. Optional. Will show up in the usage pane of the settings app.
+ @param			options
+				See AVAssetDownloadTask*Key above. Configures non-default behavior for the download task.
+*/
+- (nullable AVAggregateAssetDownloadTask *)aggregateAssetDownloadTaskWithURLAsset:(AVURLAsset *)URLAsset mediaSelections:(NSArray <AVMediaSelection *> *)mediaSelections assetTitle:(NSString *)title assetArtworkData:(nullable NSData *)artworkData options:(nullable NSDictionary<NSString *, id> *)options NS_AVAILABLE_IOS(11_0);
+
 // only AVAssetDownloadTasks can be created with AVAssetDownloadURLSession
 AV_INIT_UNAVAILABLE
 + (NSURLSession *)sharedSession NS_UNAVAILABLE;
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h	2016-05-03 18:21:25.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,12 +3,14 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVAudioProcessingSettings.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMTime.h>
 #import <CoreMedia/CMTimeRange.h>
@@ -53,21 +55,31 @@
 
 
 /* These export options can be used to produce movie files with video size appropriate to the device.
-	The export will not scale the video up from a smaller size. The video will be compressed using
-	H.264 and the audio will be compressed using AAC.  */
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   H.264 and the audio will be compressed using AAC.  */
+AVF_EXPORT NSString *const AVAssetExportPresetLowQuality         NS_AVAILABLE(10_11, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPresetMediumQuality      NS_AVAILABLE(10_11, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPresetHighestQuality     NS_AVAILABLE(10_11, 4_0);
 
-AVF_EXPORT NSString *const AVAssetExportPresetLowQuality        NS_AVAILABLE(10_11, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPresetMediumQuality     NS_AVAILABLE(10_11, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPresetHighestQuality    NS_AVAILABLE(10_11, 4_0);
+/* These export options can be used to produce movie files with video size appropriate to the device.
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   HEVC and the audio will be compressed using AAC.  */
+AVF_EXPORT NSString *const AVAssetExportPresetHEVCHighestQuality NS_AVAILABLE(10_13, 11_0);
+
+/* These export options can be used to produce movie files with the specified video size.
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   H.264 and the audio will be compressed using AAC.  Some devices cannot support some sizes. */
+AVF_EXPORT NSString *const AVAssetExportPreset640x480           NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPreset960x540           NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPreset1280x720          NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPreset1920x1080         NS_AVAILABLE(10_7, 5_0);
+AVF_EXPORT NSString *const AVAssetExportPreset3840x2160         NS_AVAILABLE(10_10, 9_0);
 
 /* These export options can be used to produce movie files with the specified video size.
-	The export will not scale the video up from a smaller size. The video will be compressed using
-	H.264 and the audio will be compressed using AAC.  Some devices cannot support some sizes. */
-AVF_EXPORT NSString *const AVAssetExportPreset640x480			NS_AVAILABLE(10_7, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPreset960x540   		NS_AVAILABLE(10_7, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPreset1280x720  		NS_AVAILABLE(10_7, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPreset1920x1080			NS_AVAILABLE(10_7, 5_0);
-AVF_EXPORT NSString *const AVAssetExportPreset3840x2160			NS_AVAILABLE(10_10, 9_0);
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   HEVC and the audio will be compressed using AAC.  Some devices cannot support some sizes. */
+AVF_EXPORT NSString *const AVAssetExportPresetHEVC1920x1080     NS_AVAILABLE(10_13, 11_0);
+AVF_EXPORT NSString *const AVAssetExportPresetHEVC3840x2160     NS_AVAILABLE(10_13, 11_0);
 
 /*  This export option will produce an audio-only .m4a file with appropriate iTunes gapless playback data */
 AVF_EXPORT NSString *const AVAssetExportPresetAppleM4A			NS_AVAILABLE(10_7, 4_0);
@@ -152,7 +164,7 @@
 /* Indicates the type of file to be written by the session.
    The value of this property must be set before you invoke -exportAsynchronouslyWithCompletionHandler:; otherwise -exportAsynchronouslyWithCompletionHandler: will raise an NSInternalInconsistencyException.
    Setting the value of this property to a file type that's not among the session's supported file types will result in an NSInvalidArgumentException. See supportedFileTypes. */
-@property (nonatomic, copy, nullable) NSString *outputFileType;
+@property (nonatomic, copy, nullable) AVFileType outputFileType;
 
 /* Indicates the URL of the export session's output. You may use UTTypeCopyPreferredTagWithClass(outputFileType, kUTTagClassFilenameExtension) to obtain an appropriate path extension for the outputFileType you have specified. For more information about UTTypeCopyPreferredTagWithClass and kUTTagClassFilenameExtension, on iOS see <MobileCoreServices/UTType.h> and on Mac OS X see <LaunchServices/UTType.h>.  */
 @property (nonatomic, copy, nullable) NSURL *outputURL;
@@ -231,10 +243,10 @@
 								(such as adding or deleting tracks) should be made to the asset between retrieving compatible identifiers and performing the export operation.
 	@param presetName			An NSString specifying the name of the preset template for the export.
 	@param asset				An AVAsset object that is intended to be exported.
-	@param outputFileType		An NSString indicating a file type to check; or nil, to query whether there are any compatible types.
+	@param outputFileType		An AVFileType indicating a file type to check; or nil, to query whether there are any compatible types.
 	@param completionHandler	A block called with the compatibility result.
  */
-+ (void)determineCompatibilityOfExportPreset:(NSString *)presetName withAsset:(AVAsset *)asset outputFileType:(nullable NSString *)outputFileType completionHandler:(void (^)(BOOL compatible))handler NS_AVAILABLE(10_9, 6_0);
++ (void)determineCompatibilityOfExportPreset:(NSString *)presetName withAsset:(AVAsset *)asset outputFileType:(nullable AVFileType)outputFileType completionHandler:(void (^)(BOOL compatible))handler NS_AVAILABLE(10_9, 6_0);
 
 @end
 
@@ -242,7 +254,7 @@
 
 /* Indicates the types of files the target can write, according to the preset the target was initialized with.
    Does not perform an inspection of the AVAsset to determine whether its contents are compatible with the supported file types. If you need to make that determination before initiating the export, use - (void)determineCompatibleFileTypesWithCompletionHandler:(void (^)(NSArray *compatibleFileTypes))handler:. */
-@property (nonatomic, readonly) NSArray<NSString *> *supportedFileTypes;
+@property (nonatomic, readonly) NSArray<AVFileType> *supportedFileTypes;
 
 /*!
 	@method						determineCompatibleFileTypesWithCompletionHandler:
@@ -251,7 +263,7 @@
 								Called when the inspection completes with an array of file types the ExportSession can write.  Note that this may have a count of zero.
 	@discussion					This method is different than the supportedFileTypes property in that it performs an inspection of the AVAsset in order to determine its compatibility with each of the session's supported file types.
 */
-- (void)determineCompatibleFileTypesWithCompletionHandler:(void (^)(NSArray<NSString *> *compatibleFileTypes))handler NS_AVAILABLE(10_9, 6_0);
+- (void)determineCompatibleFileTypesWithCompletionHandler:(void (^)(NSArray<AVFileType> *compatibleFileTypes))handler NS_AVAILABLE(10_9, 6_0);
 
 @end
 
@@ -298,7 +310,7 @@
 /* Indicates the processing algorithm used to manage audio pitch for scaled audio edits.
    Constants for various time pitch algorithms, e.g. AVAudioTimePitchAlgorithmSpectral, are defined in AVAudioProcessingSettings.h. An NSInvalidArgumentException will be raised if this property is set to a value other than the constants defined in that file.
    The default value is AVAudioTimePitchAlgorithmSpectral. */
-@property (nonatomic, copy) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, copy) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
 
 /* Indicates whether non-default audio mixing is enabled for export and supplies the parameters for audio mixing.  Ignored when export preset is AVAssetExportPresetPassthrough. */
 @property (nonatomic, copy, nullable) AVAudioMix *audioMix;
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h	2016-09-12 23:29:45.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
@@ -32,12 +32,19 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /*!
+ @typedef AVAssetImageGeneratorApertureMode
+ @abstract
+    The type of an aperture mode.
+*/
+typedef NSString * AVAssetImageGeneratorApertureMode NS_STRING_ENUM;
+
+/*!
 	@constant		AVAssetImageGeneratorApertureModeCleanAperture
 	@abstract		Both pixel aspect ratio and clean aperture will be applied.
 	@discussion
 		An image's clean aperture is a region of video free from transition artifacts caused by the encoding of the signal.
 */
-AVF_EXPORT NSString *const AVAssetImageGeneratorApertureModeCleanAperture NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVAssetImageGeneratorApertureMode const AVAssetImageGeneratorApertureModeCleanAperture NS_AVAILABLE(10_7, 4_0);
 
 /*!
 	@constant		AVAssetImageGeneratorApertureModeProductionAperture
@@ -45,7 +52,7 @@
 	@discussion
 		The image is not cropped to the clean aperture region, but it is scaled according to the pixel aspect ratio. Use this option when you want to see all the pixels in your video, including the edges.
 */
-AVF_EXPORT NSString *const AVAssetImageGeneratorApertureModeProductionAperture NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVAssetImageGeneratorApertureMode const AVAssetImageGeneratorApertureModeProductionAperture NS_AVAILABLE(10_7, 4_0);
 
 /*!
 	@constant		AVAssetImageGeneratorApertureModeEncodedPixels
@@ -53,7 +60,7 @@
 	@discussion
 		The image is not cropped to the clean aperture region and is not scaled according to the pixel aspect ratio. The encoded dimensions of the image description are displayed.
 */
-AVF_EXPORT NSString *const AVAssetImageGeneratorApertureModeEncodedPixels NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVAssetImageGeneratorApertureMode const AVAssetImageGeneratorApertureModeEncodedPixels NS_AVAILABLE(10_7, 4_0);
 
 typedef NS_ENUM(NSInteger, AVAssetImageGeneratorResult)
 {
@@ -83,7 +90,7 @@
 @property (nonatomic) CGSize maximumSize;
 
 /* Specifies the aperture mode for the generated image.  Default is AVAssetImageGeneratorApertureModeCleanAperture. */
-@property (nonatomic, copy, nullable) NSString *apertureMode;
+@property (nonatomic, copy, nullable) AVAssetImageGeneratorApertureMode apertureMode;
 
 /* Specifies the video composition to use when extracting images from assets with multiple video tracks.
    If no videoComposition is specified, only the first enabled video track will be used.
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h	2016-09-12 23:29:45.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,12 +3,13 @@
 
 	Framework:  AVFoundation
  
-    Copyright 2010-2015 Apple Inc. All rights reserved.
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
 #import <AVFoundation/AVBase.h>
 #import <AVFoundation/AVVideoComposition.h>
+#import <AVFoundation/AVAudioProcessingSettings.h>
 #import <CoreMedia/CMTime.h>
 #import <CoreMedia/CMSampleBuffer.h>
 
@@ -27,6 +28,8 @@
  
  @discussion
 	Clients can read the media data of an asset by adding one or more concrete instances of AVAssetReaderOutput to an AVAssetReader using the -[AVAssetReader addOutput:] method.
+	
+	IMPORTANT PERFORMANCE NOTE: Make sure to set the alwaysCopiesSampleData property to NO if you do not need to modify the sample data in-place, to avoid unnecessary and inefficient copying.
  */
 NS_CLASS_AVAILABLE(10_7, 4_1)
 @interface AVAssetReaderOutput : NSObject
@@ -244,7 +247,7 @@
  
 	The default value is AVAudioTimePitchAlgorithmSpectral.
  */
-@property (nonatomic, copy) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, copy) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
 
 @end
 
@@ -347,7 +350,7 @@
  
 	The default value is AVAudioTimePitchAlgorithmSpectral.
  */
-@property (nonatomic, copy) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, copy) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
 
 @end
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h	2017-02-22 00:47:31.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h	2017-05-24 00:41:53.000000000 -0400
@@ -395,7 +395,7 @@
  @result		The persistable content key data that may be stored offline to answer future loading requests of the same content key.
  @discussion	The data returned from this method may be used to immediately satisfy an AVAssetResourceLoadingDataRequest, as well as any subsequent requests for the same key url. The value of AVAssetResourceLoadingContentInformationRequest.contentType must be set to AVStreamingKeyDeliveryPersistentContentKeyType when responding with data created with this method.
 */
-- (NSData *)persistentContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse options:(nullable NSDictionary<NSString *, id> *)options error:(NSError **)outError NS_AVAILABLE_IOS(9_0);
+- (nullable NSData *)persistentContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse options:(nullable NSDictionary<NSString *, id> *)options error:(NSError **)outError NS_AVAILABLE_IOS(9_0);
 
 @end
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h	2016-09-23 21:02:15.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h	2017-05-23 21:01:43.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
@@ -20,6 +20,8 @@
 #import <AVFoundation/AVAsynchronousKeyValueLoading.h>
 #import <AVFoundation/AVAsset.h>
 #import <AVFoundation/AVAssetTrackSegment.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVMetadataFormat.h>
 #import <CoreMedia/CMTimeRange.h>
 
 NS_ASSUME_NONNULL_BEGIN
@@ -48,7 +50,7 @@
 @interface AVAssetTrack (AVAssetTrackBasicPropertiesAndCharacteristics)
 
 /* indicates the media type for this track, e.g. AVMediaTypeVideo, AVMediaTypeAudio, etc., as defined in AVMediaFormat.h. */
-@property (nonatomic, readonly) NSString *mediaType;
+@property (nonatomic, readonly) AVMediaType mediaType;
 
 /* provides an array of CMFormatDescriptions
    each of which indicates the format of media samples referenced by the track;
@@ -59,6 +61,9 @@
 /* Indicates whether the receiver is playable in the current environment; if YES, an AVPlayerItemTrack of an AVPlayerItem initialized with the receiver's asset can be enabled for playback.  */
 @property (nonatomic, readonly, getter=isPlayable) BOOL playable NS_AVAILABLE(10_8, 5_0);
 
+/* Indicates whether the receiver is decodable in the current environment; if YES, the track can be decoded even though decoding may be too slow for real time playback.  */
+@property (nonatomic, readonly, getter=isDecodable) BOOL decodable NS_AVAILABLE(10_13, 11_0);
+
 /* indicates whether the track is enabled according to state stored in its container or construct;
    note that its presentation state can be changed from this default via AVPlayerItemTrack */
 @property (nonatomic, readonly, getter=isEnabled) BOOL enabled;
@@ -77,7 +82,7 @@
 					as defined above.
 	@result			YES if the track references media with the specified characteristic, otherwise NO.
 */
-- (BOOL)hasMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (BOOL)hasMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 @end
 
@@ -192,7 +197,7 @@
 
 /* provides an NSArray of NSStrings, each representing a format of metadata that's available for the track (e.g. QuickTime userdata, etc.)
    Metadata formats are defined in AVMetadataItem.h. */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
 
 /*!
 	@method			metadataForFormat:
@@ -202,13 +207,20 @@
 	@result			An NSArray containing AVMetadataItems.
 	@discussion		Becomes callable without blocking when the key @"availableMetadataFormats" has been loaded
 */
-- (NSArray<AVMetadataItem *> *)metadataForFormat:(NSString *)format;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
 
 @end
 
 
 @interface AVAssetTrack (AVAssetTrackTrackAssociations)
 
+/*!
+ @typedef AVTrackAssociationType
+ @abstract
+    The type of a track association.
+*/
+typedef NSString * AVTrackAssociationType NS_STRING_ENUM;
+
 /*
  @constant		AVTrackAssociationTypeAudioFallback
  @abstract		Indicates an association between an audio track with another audio track that contains the same content but is typically encoded in a different format that's more widely supported, used to nominate a track that should be used in place of an unsupported track.
@@ -219,7 +231,7 @@
 	Example: Using AVTrackAssociationTypeAudioFallback, a stereo audio track with media subtype kAudioFormatMPEG4AAC could be nominated as the "fallback" for an audio track encoding the same source material but with media subtype kAudioFormatAC3 and a 5.1 channel layout.  This would ensure that all clients are capable of playing back some form of the audio.
 
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeAudioFallback NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeAudioFallback NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeChapterList
@@ -228,7 +240,7 @@
  @discussion
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with a corresponding track that has renderable content while the input parameter should be an instance of AVAssetWriterInput with a corresponding track that contains chapter metadata.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeChapterList NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeChapterList NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeForcedSubtitlesOnly
@@ -237,7 +249,7 @@
  @discussion
 	Associations of type AVTrackAssociationTypeForcedSubtitlesOnly are supported only between subtitle tracks.  This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with a corresponding subtitle track that contains non-forced subtitles, and the input parameter should be an instance of AVAssetWriterInput with a corresponding subtitle track that contains forced subtitles only.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeForcedSubtitlesOnly NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeForcedSubtitlesOnly NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeSelectionFollower
@@ -246,7 +258,7 @@
  @discussion
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the input parameter should be an instance of AVAssetWriterInput whose selection may depend on the selection of the receiver.  In the example above, the receiver would be the instance of AVAssetWriterInput corresponding with the audio track and the input parameter would be the instance of AVAssetWriterInput corresponding with the subtitle track.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeSelectionFollower NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeSelectionFollower NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeTimecode
@@ -255,7 +267,7 @@
  @discussion
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with a corresponding track that may be a video track or an audio track while the input parameter should be an instance of AVAssetWriterInput with a corresponding timecode track.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeTimecode NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeTimecode NS_AVAILABLE(10_9, 7_0);
 
 /*
 @constant		AVTrackAssociationTypeMetadataReferent
@@ -265,11 +277,11 @@
 	This track association is optional for AVAssetTracks with the mediaType AVMediaTypeMetadata. When a metadata track lacks this track association, its contents are assumed to describe or annotate the asset as a whole.
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with mediaType AVMediaTypeMetadata while the input parameter should be an instance of AVAssetWriterInput that's used to create the track to which the contents of the receiver's corresponding metadata track refer.
 */
-AVF_EXPORT NSString *const AVTrackAssociationTypeMetadataReferent NS_AVAILABLE(10_10, 8_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeMetadataReferent NS_AVAILABLE(10_10, 8_0);
 
 /* Provides an NSArray of NSStrings, each representing a type of track association that the receiver has with one or more of the other tracks of the asset (e.g. AVTrackAssociationTypeChapterList, AVTrackAssociationTypeTimecode, etc.).
    Track association types are defined immediately above. */
-@property (nonatomic, readonly) NSArray<NSString *> *availableTrackAssociationTypes NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, readonly) NSArray<AVTrackAssociationType> *availableTrackAssociationTypes NS_AVAILABLE(10_9, 7_0);
 
 /*!
 	@method			associatedTracksOfType:
@@ -279,7 +291,7 @@
 	@result			An NSArray containing AVAssetTracks; may be empty if there is no associated tracks of the specified type.
 	@discussion		Becomes callable without blocking when the key @"availableTrackAssociationTypes" has been loaded.
 */
-- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(NSString *)trackAssociationType NS_AVAILABLE(10_9, 7_0);
+- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(AVTrackAssociationType)trackAssociationType NS_AVAILABLE(10_9, 7_0);
 
 @end
 
@@ -360,7 +372,7 @@
 @interface AVFragmentedAssetTrack : AVAssetTrack
 {
 @private
-	AVFragmentedAssetTrackInternal	*_fragmentedAssetTrack;
+	AVFragmentedAssetTrackInternal	*_fragmentedAssetTrack __attribute__((unused));
 }
 
 @end
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h	2016-08-05 01:59:16.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h	2017-05-24 00:28:26.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
 
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2016 Apple Inc. All rights reserved.
 
  */
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h	2016-08-05 01:30:06.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h	2017-05-24 00:41:53.000000000 -0400
@@ -8,6 +8,7 @@
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <AVFoundation/AVMediaSelectionGroup.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMBase.h>
@@ -84,7 +85,7 @@
 	
 	UTIs for container formats that can be written are declared in AVMediaFormat.h.
  */
-+ (nullable instancetype)assetWriterWithURL:(NSURL *)outputURL fileType:(NSString *)outputFileType error:(NSError * _Nullable * _Nullable)outError;
++ (nullable instancetype)assetWriterWithURL:(NSURL *)outputURL fileType:(AVFileType)outputFileType error:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @method initWithURL:fileType:error:
@@ -105,7 +106,7 @@
 	
 	UTIs for container formats that can be written are declared in AVMediaFormat.h.
  */
-- (nullable instancetype)initWithURL:(NSURL *)outputURL fileType:(NSString *)outputFileType error:(NSError * _Nullable * _Nullable)outError NS_DESIGNATED_INITIALIZER;
+- (nullable instancetype)initWithURL:(NSURL *)outputURL fileType:(AVFileType)outputFileType error:(NSError * _Nullable * _Nullable)outError NS_DESIGNATED_INITIALIZER;
 
 /*!
  @property outputURL
@@ -121,7 +122,7 @@
  @abstract
 	The UTI of the file format of the file for which the instance of AVAssetWriter was initialized for writing.
  */
-@property (nonatomic, copy, readonly) NSString *outputFileType;
+@property (nonatomic, copy, readonly) AVFileType outputFileType;
 
 /*!
  @property availableMediaTypes
@@ -131,7 +132,7 @@
  @discussion
 	Some media types may not be accepted within the file format with which an AVAssetWriter was initialized.
  */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMediaTypes;
+@property (nonatomic, readonly) NSArray<AVMediaType> *availableMediaTypes;
 
 /*!
  @property status
@@ -217,7 +218,7 @@
  
 	Attempting to add an input with output settings and a media type for which this method returns NO will cause an exception to be thrown.
 */
-- (BOOL)canApplyOutputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings forMediaType:(NSString *)mediaType;
+- (BOOL)canApplyOutputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings forMediaType:(AVMediaType)mediaType;
 
 /*!
  @method canAddInput:
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h	2016-08-05 01:30:07.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h	2017-05-24 00:37:42.000000000 -0400
@@ -3,11 +3,12 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMTime.h>
 #import <CoreMedia/CMTimeRange.h>
@@ -59,9 +60,9 @@
  
 	For AVMediaTypeAudio the following keys are not currently supported in the outputSettings dictionary: AVEncoderAudioQualityKey and AVSampleRateConverterAudioQualityKey.  When using this method to construct a new instance, an audio settings dictionary must be fully specified, meaning that it must contain AVFormatIDKey, AVSampleRateKey, and AVNumberOfChannelsKey.  If no other channel layout information is available, a value of 1 for AVNumberOfChannelsKey will result in mono output and a value of 2 will result in stereo output.  If AVNumberOfChannelsKey specifies a channel count greater than 2, the dictionary must also specify a value for AVChannelLayoutKey.  For kAudioFormatLinearPCM, all relevant AVLinearPCM*Key keys must be included, and for kAudioFormatAppleLossless, AVEncoderBitDepthHintKey keys must be included.  See +assetWriterInputWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.
  
-	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this method to construct a new instance, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See +assetWriterInputWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecH264 and AVVideoCodecJPEG.  AVVideoCodecH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
+	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this method to construct a new instance, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See +assetWriterInputWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecTypeH264 and AVVideoCodecTypeJPEG.  AVVideoCodecTypeH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
  */
-+ (instancetype)assetWriterInputWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
++ (instancetype)assetWriterInputWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
 
 /*!
  @method assetWriterInputWithMediaType:outputSettings:sourceFormatHint:
@@ -82,7 +83,7 @@
  
 	An NSInvalidArgumentException will be thrown if the media type of the format description does not match the media type string passed into this method.
  */
-+ (instancetype)assetWriterInputWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0);
++ (instancetype)assetWriterInputWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0);
 
 /*!
  @method initWithMediaType:outputSettings:
@@ -103,9 +104,9 @@
  
 	For AVMediaTypeAudio the following keys are not currently supported in the outputSettings dictionary: AVEncoderAudioQualityKey and AVSampleRateConverterAudioQualityKey.  When using this initializer, an audio settings dictionary must be fully specified, meaning that it must contain AVFormatIDKey, AVSampleRateKey, and AVNumberOfChannelsKey.  If no other channel layout information is available, a value of 1 for AVNumberOfChannelsKey will result in mono output and a value of 2 will result in stereo output.  If AVNumberOfChannelsKey specifies a channel count greater than 2, the dictionary must also specify a value for AVChannelLayoutKey.  For kAudioFormatLinearPCM, all relevant AVLinearPCM*Key keys must be included, and for kAudioFormatAppleLossless, AVEncoderBitDepthHintKey keys must be included.  See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.
  
-	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this initializer, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecH264 and AVVideoCodecJPEG.  AVVideoCodecH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
+	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this initializer, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecTypeH264 and AVVideoCodecTypeJPEG.  AVVideoCodecTypeH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
  */
-- (instancetype)initWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
+- (instancetype)initWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
 
 /*!
  @method initWithMediaType:outputSettings:sourceFormatHint:
@@ -126,7 +127,7 @@
  
 	An NSInvalidArgumentException will be thrown if the media type of the format description does not match the media type string passed into this method.
  */
-- (instancetype)initWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0) NS_DESIGNATED_INITIALIZER;
+- (instancetype)initWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0) NS_DESIGNATED_INITIALIZER;
 
 /*!
  @property mediaType
@@ -136,7 +137,7 @@
  @discussion
 	The value of this property is one of the media type strings defined in AVMediaFormat.h.
  */
-@property (nonatomic, readonly) NSString *mediaType;
+@property (nonatomic, readonly) AVMediaType mediaType;
 
 /*!
  @property outputSettings
@@ -450,6 +451,39 @@
  */
 @property (nonatomic, copy, nullable) NSURL *sampleReferenceBaseURL NS_AVAILABLE(10_10, 8_0);
 
+typedef NSString *AVAssetWriterInputMediaDataLocation NS_STRING_ENUM NS_AVAILABLE(10_13, 11_0);
+
+/*!
+ @constant AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData
+	Indicates that the media data should be interleaved with all other media data with this constant.
+ */
+AVF_EXPORT AVAssetWriterInputMediaDataLocation const AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData NS_AVAILABLE(10_13, 11_0);
+
+/*!
+ @constant AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved
+	Indicates that the media data should be laid out before all the media data with AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData and not be interleaved.
+ */
+AVF_EXPORT AVAssetWriterInputMediaDataLocation const AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved NS_AVAILABLE(10_13, 11_0);
+
+/*!
+ @property mediaDataLocation
+ @abstract
+	Specifies where the media data will be laid out and whether the media data will be interleaved as the main media data.
+
+ @discussion
+	If this value is set to AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved, AVAssetWriter tries to write the media data for this track before all the media data for AVAssetWriterInputs with this property set to AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData.
+
+	Use of this property is recommended for optimizing tracks that contain a small amount of data that is needed all at once, independent of playback time, such as chapter name tracks and chapter image tracks.
+	Keep it set to AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData for tracks whose media data that's needed only as its presentation time is approaching and, when multiple inputs are present that supply media data that will be played concurrently, should be interleaved for optimal access.
+
+	For file types that support preloading media data such as QuickTime movie file, if this value is set to AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved, AVAssetWriter will write an indication such as 'load' atom that the whole media data should be preloaded.
+
+	The default value is AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData, which means that the receiver will not write the indication and that the media data will be interleaved.
+
+	This property cannot be set after -startWriting has been called on the receiver.
+ */
+@property (nonatomic, copy) AVAssetWriterInputMediaDataLocation mediaDataLocation NS_AVAILABLE(10_13, 11_0);
+
 @end
 
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h	2016-09-12 23:29:46.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h	2017-05-24 00:37:43.000000000 -0400
@@ -3,11 +3,12 @@
  
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
  
  */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVAudioProcessingSettings.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMBase.h>
 #import <CoreMedia/CMTime.h>
@@ -44,7 +45,7 @@
 NS_CLASS_AVAILABLE(10_7, 4_0)
 @interface AVMutableAudioMix : AVAudioMix {
 @private
-    AVMutableAudioMixInternal    *_mutableAudioMix;
+    AVMutableAudioMixInternal    *_mutableAudioMix __attribute__((unused));
 }
 
 /*  
@@ -104,7 +105,7 @@
    Constants for various time pitch algorithms, e.g. AVAudioTimePitchSpectral, are defined in AVAudioProcessingSettings.h.
    Can be nil, in which case the audioTimePitchAlgorithm set on the AVPlayerItem, AVAssetExportSession, or AVAssetReaderAudioMixOutput on which the AVAudioMix is set will be used for the associated track.
 */
-@property (nonatomic, readonly, copy, nullable) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
+@property (nonatomic, readonly, copy, nullable) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
 
 /*!
  @property		audioTapProcessor
@@ -139,7 +140,7 @@
 NS_CLASS_AVAILABLE(10_7, 4_0)
 @interface AVMutableAudioMixInputParameters : AVAudioMixInputParameters {
 @private
-    AVMutableAudioMixInputParametersInternal    *_mutableInputParameters;
+    AVMutableAudioMixInputParametersInternal    *_mutableInputParameters __attribute__((unused));
 }
 
 /*  
@@ -169,7 +170,7 @@
    Constants for various time pitch algorithms, e.g. AVAudioTimePitchSpectral, are defined in AVAudioProcessingSettings.h.
    Can be nil, in which case the audioTimePitchAlgorithm set on the AVPlayerItem, AVAssetExportSession, or AVAssetReaderAudioMixOutput on which the AVAudioMix is set will be used for the associated track.
 */
-@property (nonatomic, copy, nullable) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
+@property (nonatomic, copy, nullable) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
 
 /*!
  @property		audioTapProcessor
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h	2016-08-05 01:59:17.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h	2017-05-24 00:28:26.000000000 -0400
@@ -3,13 +3,26 @@
  
     Framework:  AVFoundation
  
-	Copyright 2013 Apple Inc. All rights reserved.
+	Copyright 2013-2017 Apple Inc. All rights reserved.
  
  */
 
 #import <AVFoundation/AVBase.h>
 #import <Foundation/Foundation.h>
 
+
+/*!
+ @typedef AVAudioTimePitchAlgorithm
+ @abstract
+    The type of a time pitch algorithm.
+ @discussion
+	On OS X, the default algorithm for all time pitch operations is AVAudioTimePitchAlgorithmSpectral.  On iOS, the default algorithm for playback is AVAudioTimePitchAlgorithmLowQualityZeroLatency and the default for export & other offline processing is AVAudioTimePitchAlgorithmSpectral.
+
+	For scaled audio edits, i.e. when the timeMapping of an AVAssetTrackSegment is between timeRanges of unequal duration, it is important to choose an algorithm that supports the full range of edit rates present in the source media.  AVAudioTimePitchAlgorithmSpectral is often the best choice due to the highly inclusive range of rates it supports, assuming that it is desirable to maintain a constant pitch regardless of the edit rate.  If it is instead desirable to allow the pitch to vary with the edit rate, AVAudioTimePitchAlgorithmVarispeed is the best choice.
+ 
+*/
+typedef NSString * AVAudioTimePitchAlgorithm NS_STRING_ENUM;
+
 /*!
  @abstract		Values for time pitch algorithm
  
@@ -29,13 +42,8 @@
 				High quality, no pitch correction. Pitch varies with rate.
                 Variable rate from 1/32 to 32.
  
- @discussion
-	On OS X, the default algorithm for all time pitch operations is AVAudioTimePitchAlgorithmSpectral.  On iOS, the default algorithm for playback is AVAudioTimePitchAlgorithmLowQualityZeroLatency and the default for export & other offline processing is AVAudioTimePitchAlgorithmSpectral.
-
-	For scaled audio edits, i.e. when the timeMapping of an AVAssetTrackSegment is between timeRanges of unequal duration, it is important to choose an algorithm that supports the full range of edit rates present in the source media.  AVAudioTimePitchAlgorithmSpectral is often the best choice due to the highly inclusive range of rates it supports, assuming that it is desirable to maintain a constant pitch regardless of the edit rate.  If it is instead desirable to allow the pitch to vary with the edit rate, AVAudioTimePitchAlgorithmVarispeed is the best choice.
- 
 */
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmLowQualityZeroLatency NS_AVAILABLE_IOS(7_0);
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmTimeDomain NS_AVAILABLE(10_9, 7_0);
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmSpectral NS_AVAILABLE(10_9, 7_0);
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmVarispeed NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmLowQualityZeroLatency NS_AVAILABLE_IOS(7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmTimeDomain NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmSpectral NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmVarispeed NS_AVAILABLE(10_9, 7_0);
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h	2017-02-22 01:08:53.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h	2017-05-24 00:28:27.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
  */
 
@@ -16,30 +16,20 @@
 	#define AVF_EXPORT extern
 #endif
 
-// Annotation for classes that inherit -init from NSObject but cannot be usefully initialized using -init
-#define AV_INIT_UNAVAILABLE - (instancetype)init NS_UNAVAILABLE;
+// Annotation for classes that inherit -init and +new from NSObject but cannot be usefully initialized using -init or +new
+#define AV_INIT_UNAVAILABLE - (instancetype)init NS_UNAVAILABLE; \
+                            + (instancetype)new  NS_UNAVAILABLE;
 
 #ifndef __has_feature
 	#define __has_feature(FEATURE) 0
 #endif
 
-// Generics
-
-// Use when declaring a variable of a generic type
-#if __has_feature(objc_generics)
-	#define AV_GENERIC(BASETYPE, ...) BASETYPE<__VA_ARGS__>
-#else
-	#define AV_GENERIC(BASETYPE, ...) BASETYPE
+#ifndef NS_STRING_ENUM
+	#define NS_STRING_ENUM
 #endif
 
-// Use when declaring a generic class interface
-#define AV_GENERIC_CLASS AV_GENERIC
-
-// Use to refer to generic types in a generic class
-#if __has_feature(objc_generics)
-	#define AV_PARAMETERIZED_TYPE(TYPENAME, TYPEBOUNDS) TYPENAME
-#else
-	#define AV_PARAMETERIZED_TYPE(TYPENAME, TYPEBOUNDS) TYPEBOUNDS
+#ifndef NS_EXTENSIBLE_STRING_ENUM
+	#define NS_EXTENSIBLE_STRING_ENUM
 #endif
 
 // Pre-10.12
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h	2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,171 @@
+/*
+    File:  AVCameraCalibrationData.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+#import <simd/matrix_types.h>
+#import <CoreGraphics/CGGeometry.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class AVCameraCalibrationDataInternal;
+
+/*!
+ @class AVCameraCalibrationData
+ @abstract
+    AVCameraCalibrationData is a model object describing a camera's calibration information.
+ 
+ @discussion
+    When rendering effects to images produced by cameras, or performing computer vision tasks such as correcting images for geometric distortions, it is necessary to characterize the camera's calibration information, such as its pixel focal length, principal point, lens distortion characteristics, etc. AVCameraCalibrationData provides this information.
+ */
+NS_CLASS_AVAILABLE(10_13, 11_0) __TVOS_AVAILABLE(11_0) __WATCHOS_PROHIBITED
+@interface AVCameraCalibrationData : NSObject
+{
+@private
+    AVCameraCalibrationDataInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property intrinsicMatrix
+ @abstract
+    A camera's intrinsic (K) matrix describes its geometric properties.
+ 
+ @discussion
+    The intrinsic matrix allows one to transform 3D coordinates to 2D coordinates on an image plane using the pinhole camera model. All values are expressed in pixels. The elements in the matrix are:
+       /           \
+       | fx 0   ox |
+       | 0  fy  oy |
+       | 0  0   1  |
+       \           /
+    where fx and fy describe the focal length. For square pixels, their values are identical.
+    ox and oy are the offset of the principal point. The origin is the upper left of the frame.
+ */
+@property(nonatomic, readonly) matrix_float3x3 intrinsicMatrix;
+
+/*!
+ @property intrinsicMatrixReferenceDimensions
+ @abstract
+    The reference frame dimensions used in calculating a camera's principal point.
+ 
+ @discussion
+    A camera's intrinsic matrix expresses values in pixels with respect to a frame of this width and height.
+ */
+@property(nonatomic, readonly) CGSize intrinsicMatrixReferenceDimensions;
+
+/*!
+ @property extrinsicMatrix
+ @abstract
+    A camera's extrinsic matrix describes its pose (position and direction) in world coordinates.
+ 
+ @discussion
+    The extrinsic matrix consists of a unitless 3x3 rotation matrix (R) on the left and a translation (t) 3x1 column vector on the right. The translation vector's units are millimeters. The camera's pose is expressed with respect to a reference camera (camera-to-world view). If the rotation matrix is an identity matrix, then this camera is the reference camera. Note that a matrix_float4x3 matrix is column major with 3 rows and 4 columns.
+               /                       \
+       /   \   | r1,1  r1,2  r1,3 | t1 |
+       |R|t| = | r2,1  r2,2  r2,3 | t2 |
+       \   /   | r3,1  r3,2  r3,3 | t3 |
+               \                       /
+ */
+@property(nonatomic, readonly) matrix_float4x3 extrinsicMatrix;
+
+/*!
+ @property pixelSize
+ @abstract
+    The size of one pixel in millimeters
+ */
+@property(nonatomic, readonly) float pixelSize;
+
+/*!
+ @property lensDistortionLookupTable
+ @abstract
+    An NSData of floats describing the camera lens' radial distortions.
+ 
+ @discussion
+    Images captured by a camera are geometrically warped by radial distortions in the lens. In order to project from the 2D image plane back into the 3D world, the images must be distortion corrected, or made rectilinear. Lens distortion is modeled using a one-dimensional lookup table of 32-bit float values evenly distributed along a radius from the center of the distortion to the farthest corner, with each value representing an elongation or compression of the radius (1.0 for any given point indicates no elongation). This model assumes radially symmetric lens distortion. When dealing with AVDepthData, the disparity / depth map representations are geometrically distorted to align with images produced by the camera. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) NSData *lensDistortionLookupTable;
+
+/*!
+ @property inverseLensDistortionLookupTable
+ @abstract
+    An NSData of floats describing the inverse lookup table required to reapply the camera lens' radial distortions to a rectified image.
+ 
+ @discussion
+    See lensDistortionLookupTable. If you've rectified an image by removing the distortions characterized by the lensDistortionLookupTable, and now wish to go back to geometrically distorted, you may use the inverseLensDistortionLookupTable. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) NSData *inverseLensDistortionLookupTable;
+
+/*!
+ @property lensDistortionCenter
+ @abstract
+    A CGPoint describing the offset of the lens' distortion center from the top left.
+ 
+ @discussion
+    Due to geometric distortions in the image, the center of the distortion may not be equal to the optical center (principal point) of the lens. When making an image rectilinear, the distortion center should be used rather than the optical center of the image. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) CGPoint lensDistortionCenter;
+
+/*
+    The following reference implementation illustrates how to use the lensDistortionLookupTable, inverseLensDistortionLookupTable, and lensDistortionCenter properties to find points in the lens-distorted or undistorted (rectilinear, corrected) space. If you have a distorted image (such as a photo taken by a camera) and want to find a particular point in a corresponding undistorted image, you would call the sample method below using the inverseLensDistortionLookupTable. If you have an undistorted (aka distortion-corrected) image and want to find a point in the distorted image's space, you would call the sample method below using the lensDistortionLookupTable.
+ 
+    To apply distortion correction to an image, you'd begin with an empty destination buffer and iterate through it row by row, calling the sample implementation below for each point in the output image, passing the lensDistortionLookupTable to find the corresponding value in the distorted image, and write it to your output buffer. Please note that the "point", "opticalCenter", and "imageSize" parameters below must be in the same coordinate system, i.e. both at full resolution, or both scaled to a different resolution but with the same aspect ratio.
+ 
+- (CGPoint)lensDistortionPointForPoint:(CGPoint)point
+                           lookupTable:(NSData *)lookupTable
+               distortionOpticalCenter:(CGPoint)opticalCenter
+                             imageSize:(CGSize)imageSize
+{
+    // The lookup table holds the radial magnification for n linearly spaced radii.
+    // The first position corresponds to radius = 0
+    // The last position corresponds to the largest radius found in the image.
+ 
+    // Determine the maximum radius.
+    float delta_ocx_max = MAX( opticalCenter.x, imageSize.width  - opticalCenter.x );
+    float delta_ocy_max = MAX( opticalCenter.y, imageSize.height - opticalCenter.y );
+    float r_max = sqrtf( delta_ocx_max * delta_ocx_max + delta_ocy_max * delta_ocy_max );
+ 
+    // Determine the vector from the optical center to the given point.
+    float v_point_x = point.x - opticalCenter.x;
+    float v_point_y = point.y - opticalCenter.y;
+ 
+    // Determine the radius of the given point.
+    float r_point = sqrtf( v_point_x * v_point_x + v_point_y * v_point_y );
+ 
+    // Look up the radial magnification to apply in the provided lookup table
+    float magnification;
+    const float *lookupTableValues = lookupTable.bytes;
+    NSUInteger lookupTableCount = lookupTable.length / sizeof(float);
+ 
+    if ( r_point < r_max ) {
+        // Linear interpolation
+        float val   = r_point * ( lookupTableCount - 1 ) / r_max;
+        int   idx   = (int)val;
+        float frac  = val - idx;
+ 
+        float mag_1 = lookupTableValues[idx];
+        float mag_2 = lookupTableValues[idx + 1];
+ 
+        magnification = ( 1.0f - frac ) * mag_1 + frac * mag_2;
+    }
+    else {
+        magnification = lookupTableValues[lookupTableCount - 1];
+    }
+ 
+    // Apply radial magnification
+    float new_v_point_x = magnification * v_point_x;
+    float new_v_point_y = magnification * v_point_y;
+ 
+    // Construct output
+    return CGPointMake( opticalCenter.x + new_v_point_x, opticalCenter.y + new_v_point_y );
+}
+ */
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h	2016-09-23 21:02:18.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h	2017-05-24 00:28:07.000000000 -0400
@@ -1,15 +1,18 @@
 /*
     File:  AVCaptureAudioDataOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
-#import <AVFoundation/AVCaptureOutput.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <CoreMedia/CMSampleBuffer.h>
 
-#pragma mark - AVCaptureAudioDataOutput
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureAudioDataOutput
 
 @class AVCaptureAudioDataOutputInternal;
 @protocol AVCaptureAudioDataOutputSampleBufferDelegate;
@@ -26,92 +29,96 @@
 @interface AVCaptureAudioDataOutput : AVCaptureOutput 
 {
 @private
-	AVCaptureAudioDataOutputInternal *_internal;
+    AVCaptureAudioDataOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @method setSampleBufferDelegate:queue:
  @abstract
     Sets the receiver's delegate that will accept captured buffers and dispatch queue on which the delegate will be called.
-
+ 
  @param sampleBufferDelegate
     An object conforming to the AVCaptureAudioDataOutputSampleBufferDelegate protocol that will receive sample buffers after they are captured.
  @param sampleBufferCallbackQueue
     A dispatch queue on which all sample buffer delegate methods will be called.
-
+ 
  @discussion
     When a new audio sample buffer is captured it will be vended to the sample buffer delegate using the captureOutput:didOutputSampleBuffer:fromConnection: delegate method. All delegate methods will be called on the specified dispatch queue. If the queue is blocked when new samples are captured, those samples will be automatically dropped when they become sufficiently late. This allows clients to process existing samples on the same queue without having to manage the potential memory usage increases that would otherwise occur when that processing is unable to keep up with the rate of incoming samples.
-
+ 
     Clients that need to minimize the chances of samples being dropped should specify a queue on which a sufficiently small amount of processing is being done outside of receiving sample buffers. However, if such clients migrate extra processing to another queue, they are responsible for ensuring that memory usage does not grow without bound from samples that have not been processed.
-
+ 
     A serial dispatch queue must be used to guarantee that audio samples will be delivered in order. The sampleBufferCallbackQueue parameter may not be NULL, except when setting sampleBufferDelegate to nil.
  */
-- (void)setSampleBufferDelegate:(id<AVCaptureAudioDataOutputSampleBufferDelegate>)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue;
+- (void)setSampleBufferDelegate:(nullable id<AVCaptureAudioDataOutputSampleBufferDelegate>)sampleBufferDelegate queue:(nullable dispatch_queue_t)sampleBufferCallbackQueue;
 
 /*!
  @property sampleBufferDelegate
  @abstract
     The receiver's delegate.
-
+ 
  @discussion
     The value of this property is an object conforming to the AVCaptureAudioDataOutputSampleBufferDelegate protocol that will receive sample buffers after they are captured. The delegate is set using the setSampleBufferDelegate:queue: method.
  */
-@property(nonatomic, readonly) id<AVCaptureAudioDataOutputSampleBufferDelegate> sampleBufferDelegate;
+@property(nonatomic, readonly, nullable) id<AVCaptureAudioDataOutputSampleBufferDelegate> sampleBufferDelegate;
 
 /*!
  @property sampleBufferCallbackQueue
  @abstract
     The dispatch queue on which all sample buffer delegate methods will be called.
-
+ 
  @discussion
     The value of this property is a dispatch_queue_t. The queue is set using the setSampleBufferDelegate:queue: method.
  */
-@property(nonatomic, readonly) dispatch_queue_t sampleBufferCallbackQueue;
+@property(nonatomic, readonly, nullable) dispatch_queue_t sampleBufferCallbackQueue;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property audioSettings
  @abstract
     Specifies the settings used to decode or re-encode audio before it is output by the receiver.
-
+ 
  @discussion
-    The value of this property is an NSDictionary containing values for audio settings keys defined  in AVAudioSettings.h. When audioSettings is set to nil, the AVCaptureAudioDataOutput vends samples in their device native format.
+    The value of this property is an NSDictionary containing values for audio settings keys defined in AVAudioSettings.h. When audioSettings is set to nil, the AVCaptureAudioDataOutput vends samples in their device native format.
  */
-@property(nonatomic, copy) NSDictionary *audioSettings NS_AVAILABLE(10_7, NA);
+@property(nonatomic, copy, null_resettable) NSDictionary<NSString *, id> *audioSettings NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @method recommendedAudioSettingsForAssetWriterWithOutputFileType:
  @abstract
     Specifies the recommended settings for use with an AVAssetWriterInput.
-
+ 
  @param outputFileType
     Specifies the UTI of the file type to be written (see AVMediaFormat.h for a list of file format UTIs).
- 
- @return
+ @result
     A fully populated dictionary of keys and values that are compatible with AVAssetWriter.
  
  @discussion
     The value of this property is an NSDictionary containing values for compression settings keys defined in AVAudioSettings.h. This dictionary is suitable for use as the "outputSettings" parameter when creating an AVAssetWriterInput, such as,
-
+ 
        [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings sourceFormatHint:hint];
-    
+ 
     The dictionary returned contains all necessary keys and values needed by AVAssetWriter (see AVAssetWriterInput.h, -initWithMediaType:outputSettings: for a more in depth discussion). For QuickTime movie and ISO files, the recommended audio settings will always produce output comparable to that of AVCaptureMovieFileOutput.
-
+ 
     Note that the dictionary of settings is dependent on the current configuration of the receiver's AVCaptureSession and its inputs. The settings dictionary may change if the session's configuration changes. As such, you should configure your session first, then query the recommended audio settings.
  */
-- (NSDictionary *)recommendedAudioSettingsForAssetWriterWithOutputFileType:(NSString *)outputFileType NS_AVAILABLE_IOS(7_0);
+- (nullable NSDictionary *)recommendedAudioSettingsForAssetWriterWithOutputFileType:(AVFileType)outputFileType NS_AVAILABLE_IOS(7_0);
 
 @end
 
+
 /*!
  @protocol AVCaptureAudioDataOutputSampleBufferDelegate
  @abstract
     Defines an interface for delegates of AVCaptureAudioDataOutput to receive captured audio sample buffers.
  */
-__TVOS_PROHIBITED
+NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @protocol AVCaptureAudioDataOutputSampleBufferDelegate <NSObject>
 
 @optional
@@ -120,19 +127,21 @@
  @method captureOutput:didOutputSampleBuffer:fromConnection:
  @abstract
     Called whenever an AVCaptureAudioDataOutput instance outputs a new audio sample buffer.
-
- @param captureOutput
+ 
+ @param output
     The AVCaptureAudioDataOutput instance that output the samples.
  @param sampleBuffer
     A CMSampleBuffer object containing the audio samples and additional information about them, such as their format and presentation time.
  @param connection
     The AVCaptureConnection from which the audio was received.
-
+ 
  @discussion
     Delegates receive this message whenever the output captures and outputs new audio samples, decoding or re-encoding as specified by the audioSettings property. Delegates can use the provided sample buffer in conjunction with other APIs for further processing. This method will be called on the dispatch queue specified by the output's sampleBufferCallbackQueue property. This method is called periodically, so it must be efficient to prevent capture performance problems, including dropped audio samples.
-
+ 
     Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and then CFRelease it when they are finished with it.
  */
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
+- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
 
 @end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h	2016-08-05 01:59:18.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h	2017-05-24 00:28:27.000000000 -0400
@@ -1,17 +1,16 @@
 /*
     File:  AVCaptureAudioPreviewOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
-#import <AVFoundation/AVCaptureOutput.h>
-
+#import <AVFoundation/AVCaptureOutputBase.h>
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+NS_ASSUME_NONNULL_BEGIN
 
-#pragma mark - AVCaptureAudioPreviewOutput
+#pragma mark AVCaptureAudioPreviewOutput
 
 @class AVCaptureAudioPreviewOutputInternal;
 
@@ -23,28 +22,32 @@
  @discussion
     Instances of AVCaptureAudioPreviewOutput have an associated Core Audio output device that can be used to play audio being captured by the capture session. The unique ID of a Core Audio device can be obtained from its kAudioDevicePropertyDeviceUID property.
  */
-NS_CLASS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
 @interface AVCaptureAudioPreviewOutput : AVCaptureOutput 
 {
 @private
-	AVCaptureAudioPreviewOutputInternal *_internal;
+    AVCaptureAudioPreviewOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @property outputDeviceUniqueID
  @abstract
     Specifies the unique ID of the Core Audio output device being used to play preview audio.
-
+ 
  @discussion
     The value of this property is an NSString containing the unique ID of the Core Audio device to be used for output, or nil if the default system output should be used.
  */
-@property(nonatomic, copy) NSString *outputDeviceUniqueID;
+@property(nonatomic, copy, nullable) NSString *outputDeviceUniqueID;
 
 /*!
  @property volume
  @abstract
     Specifies the preview volume of the output.
-
+ 
  @discussion
     The value of this property is the preview volume of the receiver, where 1.0 is the maximum volume and 0.0 is muted. 
  */
@@ -52,4 +55,4 @@
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h	2017-05-24 00:28:07.000000000 -0400
@@ -0,0 +1,354 @@
+/*
+    File:  AVCaptureDataOutputSynchronizer.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutput.h>
+#import <CoreMedia/CMSampleBuffer.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureDataOutputSynchronizer
+
+@class AVCaptureDataOutputSynchronizerInternal;
+@class AVCaptureSynchronizedDataCollection;
+@protocol AVCaptureDataOutputSynchronizerDelegate;
+
+/*!
+ @class AVCaptureDataOutputSynchronizer
+ @abstract
+    AVCaptureDataOutputSynchronizer synchronizes the delivery of data from multiple capture data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, AVCaptureAudioDataOutput) to a single delegate callback.
+
+ @discussion
+    AVCaptureDataOutputSynchronizer is initialized with an array of data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, or AVCaptureAudioDataOutput) from which you'd like to receive a single, synchronized delegate callback. The first output in the array acts as the master data output and determines when the synchronized callback is delivered. When data is received for the master data output, it is held until all other data outputs have received data with an equal or later presentation time stamp, or it has been determined that there is no data for a particular output at the master data output's pts. Once all other outputs are ready, a single delegate callback is sent with all the data aligned with the master data output's data. Separate delegate callbacks are sent for any other data received with presentation time stamps earlier than the next master data output time.
+
+    For instance, if you specify a video data output as your first (master) output and a metadata output for detected faces as your second output, your data callback will not be called until there is face data ready for a video frame, or it is assured that there is no face metadata for that particular video frame.
+ 
+    Note that the AVCaptureDataOutputSynchronizer overrides each data output's -setSampleBufferDelegate:queue:, -setDepthDataDelegate:queue:, or -setMetadataObjectsDelegate:queue: method call. -[AVCaptureVideoDataOutput alwaysDiscardsLateVideoFrames] and -[AVCaptureDepthData alwaysDiscardsLateDepthData] properties are honored.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureDataOutputSynchronizer : NSObject
+{
+@private
+    AVCaptureDataOutputSynchronizerInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method initWithDataOutputs:
+ @abstract
+    Instantiates an AVCaptureDataOutputSynchronizer from one or more capture data outputs.
+ 
+ @param dataOutputs
+    An array of capture data outputs where the first is the master.
+ @result
+    A newly initialized AVCaptureDataOutputSynchronizer instance.
+ */
+- (instancetype)initWithDataOutputs:(NSArray<AVCaptureOutput *> *)dataOutputs;
+
+/*!
+ @property dataOutputs
+ @abstract
+    The data outputs provided in the initializer method.
+ */
+@property(readonly, retain) NSArray<AVCaptureOutput *> *dataOutputs;
+
+/*!
+ @method setDelegate:queue:
+ @abstract
+    Sets the receiver's delegate that will accept synchronized data and the dispatch queue on which the delegate will be called.
+ 
+ @param delegate
+    An object conforming to the AVCaptureDataOutputSynchronizerDelegate protocol that will receive synchronized data from the provided data outputs.
+ @param delegateCallbackQueue
+    A dispatch queue on which all AVCaptureDataOutputSynchronizerDelegate methods will be called.
+ 
+ @discussion
+    AVCaptureDataOutputSynchronizer gathers data from its dataOutputs, and when it determines that all data has been received for a given timestamp, it calls the specified delegate on the specified delegateCallbackQueue. AVCaptureDataOutputSynchronizer overrides all the data outputs' delegates and callbacks. Data outputs under the control of AVCaptureDataOutputSynchronizer do not fire delegate callbacks. Delegate callbacks are restored to individual data outputs when you call this method with nil as your delegate and NULL as your delegateCallbackQueue.
+ 
+    A serial dispatch queue must be used to guarantee that synchronized data will be delivered in order. The delegateCallbackQueue parameter may not be NULL, except when setting the delegate to nil.
+ */
+- (void)setDelegate:(nullable id<AVCaptureDataOutputSynchronizerDelegate>)delegate queue:(nullable dispatch_queue_t)delegateCallbackQueue;
+
+/*!
+ @property delegate
+ @abstract
+    The receiver's delegate.
+ 
+ @discussion
+    The value of this property is an object conforming to the AVCaptureDataOutputSynchronizerDelegate protocol that will receive synchronized data output. The delegate is set using the -setDelegate:queue: method. This property is key-value observable.
+ */
+@property(nullable, nonatomic, readonly) id<AVCaptureDataOutputSynchronizerDelegate> delegate;
+
+/*!
+ @property delegateCallbackQueue
+ @abstract
+    The dispatch queue on which all AVCaptureDataOutputSynchronizerDelegate methods will be called.
+ 
+ @discussion
+    The value of this property is a dispatch_queue_t. The queue is set using the -setDelegate:queue: method.
+ */
+@property(nullable, nonatomic, readonly) dispatch_queue_t delegateCallbackQueue;
+
+@end
+
+
+NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@protocol AVCaptureDataOutputSynchronizerDelegate <NSObject>
+
+@required
+/*!
+ @method captureOutputSynchronizer:didOutputSynchronizedData:
+ @abstract
+    Called when an AVCaptureDataOutputSynchronizer instance outputs synchronized data from one or more data outputs.
+ 
+ @param captureOutputSynchronizer
+    The AVCaptureDataOutputSynchronizer instance delivering synchronized data.
+ @param synchronizedDataCollection
+    A collection of synchronized data objects indexed by data output.
+ 
+ @discussion
+    The synchronized data collection only contains synchronized data for capture outputs with synchronized data ready.
+ */
+- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedDataCollection
+
+@class AVCaptureSynchronizedData;
+@class AVCaptureSynchronizedDataCollectionInternal;
+
+/*!
+ @class AVCaptureSynchronizedDataCollection
+ @abstract
+    A collection of AVCaptureSynchronizedData objects.
+ 
+ @discussion
+    AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedDataCollection: delegate method delivers a collection of AVCaptureSynchronizedData objects which can be iterated by use AVCaptureOutput. AVCaptureSynchronizedDataCollection supports object subscripting and fast enumeration of the data outputs as keys.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedDataCollection : NSObject <NSFastEnumeration>
+{
+@private
+    AVCaptureSynchronizedDataCollectionInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method synchronizedDataForCaptureOutput:
+ @abstract
+    Provides the synchronized data object for a given capture output.
+ 
+ @param captureOutput
+    The data output whose synchronized data you'd like to inspect.
+ @result
+    The synchronized data object associated with the provided output, or nil, if there is none.
+ */
+- (nullable AVCaptureSynchronizedData *)synchronizedDataForCaptureOutput:(AVCaptureOutput *)captureOutput;
+
+/*!
+ @method objectForKeyedSubscript:
+ @abstract
+    Method that provides support for object subscripting.
+ 
+ @param key
+    The data output whose synchronized data you'd like to inspect.
+ @result
+    The synchronized data object associated with the provided output, or nil, if there is none.
+ 
+ @discussion
+    AVCaptureSynchronizedDataCollection supports object subscripting. If you'd like to find the synchronized data for a given data output, simply:
+        AVCaptureSynchronizedData *synchronizedData = synchronizedDataCollection[dataOutput];
+ */
+- (nullable AVCaptureSynchronizedData *)objectForKeyedSubscript:(AVCaptureOutput *)key;
+
+/*!
+ @property count
+ @abstract
+    The number of items in the collection.
+ 
+ @discussion
+    Returns the number of data output / synchronized data pairs present in the collection.
+ */
+@property(readonly) NSUInteger count;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedData
+
+@class AVCaptureSynchronizedDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedData
+ @abstract
+    An abstract base class representing the data delivered by a data output through the AVCaptureDataOutputSynchronizer interface.
+ 
+ @discussion
+    AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback delivers a dictionary of key/value pairs, with the keys being the AVCaptureOutput instances returning data, and the values being concrete subclasses of AVCaptureSynchronizedData.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedData : NSObject
+{
+@private
+    AVCaptureSynchronizedDataInternal *_synchronizedDataInternal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property timestamp
+ @abstract
+    The time at which this synchronized data was captured.
+ 
+ @discussion
+    Synchronized data is always clocked to the masterClock of the AVCaptureSession to which the data output is connected.
+ */
+@property(readonly) CMTime timestamp;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedSampleBufferData
+
+@class AVCaptureSynchronizedSampleBufferDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedSampleBufferData
+ @abstract
+    An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureVideoDataOutput or AVCaptureAudioDataOutput.
+
+ @discussion
+    Synchronized sample buffer data is valid for the duration of AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback. To extend the sample buffer data beyond the callback, you must CFRetain it, and later call CFRelease when you're done with it.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedSampleBufferData : AVCaptureSynchronizedData
+{
+@private
+    AVCaptureSynchronizedSampleBufferDataInternal *_internal;
+}
+
+/*!
+ @property sampleBuffer
+ @abstract
+    A sample buffer containing video or audio data.
+ 
+ @discussion
+    If sampleBufferWasDropped is YES, the returned sampleBuffer was dropped before it could be delivered to you, and thus this sample buffer is a shell containing metadata and format information, but no actual pixel data. This property is never NULL. If a data output has no data to return, it is simply not present in the dictionary of synchronized data returned by AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback.
+ */
+@property(readonly) CMSampleBufferRef sampleBuffer;
+
+/*!
+ @property sampleBufferWasDropped
+ @abstract
+    YES if the sample buffer was dropped.
+ 
+ @discussion
+    AVCaptureVideoDataOutput has a delegate callback for dropped sample buffers. AVCaptureAudioDataOutput does not. Therefore, sampleBufferWasDropped may be YES for video, but never for audio.
+ */
+@property(readonly) BOOL sampleBufferWasDropped;
+
+/*!
+ @property droppedReason
+ @abstract
+    If sampleBufferWasDropped is YES, the reason for the drop, otherwise AVCaptureOutputDataDroppedReasonNone.
+ 
+ @discussion
+    AVCaptureOutputDataDroppedReasons are defined in AVCaptureOutputBase.h.
+ */
+@property(readonly) AVCaptureOutputDataDroppedReason droppedReason;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedMetadataObjectData
+
+@class AVCaptureSynchronizedMetadataObjectDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedMetadataObjectData
+ @abstract
+    An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureMetadataOutput.
+ 
+ @discussion
+    A single AVCaptureMetadataOutput may be configured to deliver multiple kinds of metadata objects (such as QRCodes and detected faces). AVCaptureSynchronizedMetadataObjectData's -metadataObjects array may contain multiple AVMetadataObject subclasses, depending on how the AVCaptureMetadataOutput was configured. All synchronized metadata objects share a common timestamp.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedMetadataObjectData : AVCaptureSynchronizedData
+{
+@private
+    AVCaptureSynchronizedMetadataObjectDataInternal *_internal;
+}
+
+/*!
+ @property metadataObjects
+ @abstract
+    An array of AVMetadataObject subclasses.
+ 
+ @discussion
+    -metadataObjects is never nil. If no metadata objects are present for a given time, an empty array is returned.
+ */
+@property(readonly) NSArray<AVMetadataObject *> *metadataObjects;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedDepthData
+
+@class AVCaptureSynchronizedDepthDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedDepthData
+ @abstract
+    An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureDepthDataOutput.
+ 
+ @discussion
+    Depth data, like video, may be dropped if not serviced in a timely fashion.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedDepthData : AVCaptureSynchronizedData
+{
+@private
+    AVCaptureSynchronizedDepthDataInternal *_internal;
+}
+
+/*!
+ @property depthData
+ @abstract
+    An instance of AVDepthData.
+ 
+ @discussion
+    If depthDataWasDropped is YES, the returned depthData was dropped before it could be delivered to you, and thus this AVDepthDAta is a shell containing format information and calibration data, but no actual pixel map data. This property is never nil. If a data output has no data to return, it is simply not present in the dictionary of synchronized data returned by AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback.
+ */
+@property(readonly) AVDepthData *depthData;
+
+/*!
+ @property depthDataWasDropped
+ @abstract
+    YES if the depth data was dropped.
+ 
+ @discussion
+    If YES, inspect -droppedReason for the reason.
+ */
+@property(readonly) BOOL depthDataWasDropped;
+
+/*!
+ @property droppedReason
+ @abstract
+    If depthDataWasDropped is YES, the reason for the drop, otherwise AVCaptureOutputDataDroppedReasonNone.
+ 
+ @discussion
+    AVCaptureOutputDataDroppedReasons are defined in AVCaptureOutputBase.h.
+ */
+@property(readonly) AVCaptureOutputDataDroppedReason droppedReason;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h	2017-05-24 00:28:07.000000000 -0400
@@ -0,0 +1,160 @@
+/*
+    File:  AVCaptureDepthDataOutput.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureDepthDataOutput
+
+@class AVDepthData;
+
+@class AVCaptureDepthDataOutputInternal;
+@protocol AVCaptureDepthDataOutputDelegate;
+
+/*!
+ @class AVCaptureDepthDataOutput
+ @abstract
+    AVCaptureDepthDataOutput is a concrete subclass of AVCaptureOutput that can be used to process depth data in a streaming fashion.
+ 
+ @discussion
+    Instances of AVCaptureDepthDataOutput capture AVDepthData objects expressing disparity/depth. Applications can access the frames with the captureOutput:didOutputDepthData:fromConnection: delegate method.
+ 
+    AVCaptureDepthDataOutput always provides depth data in the format expressed by its source's -[AVCaptureDevice activeDepthDataFormat] property. If you wish to receive depth data in another format, you may choose from the -[AVCaptureDevice activeFormat]'s -[AVCaptureDeviceFormat supportedDepthDataFormats], and set it using -[AVCaptureDevice setActiveDepthDataFormat:].
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureDepthDataOutput : AVCaptureOutput
+{
+@private
+    AVCaptureDepthDataOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @method setDelegate:queue:
+ @abstract
+    Sets the receiver's delegate that receives captured depth data and the dispatch queue on which the delegate is called.
+ 
+ @param delegate
+    An object conforming to the AVCaptureDepthDataOutputDelegate protocol that receives depth data in a streaming fashion.
+ @param callbackQueue
+    A dispatch queue on which all delegate methods are called.
+ 
+ @discussion
+    The depth data output vends captured depth data to its delegate using the methods specified in the AVCaptureDepthOutputDelegate protocol. All delegate methods are called on the specified dispatch queue. If the callback queue is blocked when new depth data is captured, that depth data is automatically dropped at a time determined by the value of the alwaysDiscardsLateDepthData property. This allows clients to process existing depth data on the same queue without having to manage the potential memory usage increases that would otherwise occur when that processing is unable to keep up with the rate of incoming depth data.
+ 
+    Clients who need to minimize the chances of depth data being dropped should provide a dedicated queue and not share it with other data outputs. Processing of depth data may be deferred to another queue, but beware that the depth data pixel buffer maps may come from a finite buffer pool, which may be starved if your deferred processing fails to keep up.
+ 
+    A serial dispatch queue must be used to guarantee that depth data will be delivered in order. The callbackQueue parameter may not be NULL, except when setting the delegate to nil.
+ */
+- (void)setDelegate:(id<AVCaptureDepthDataOutputDelegate>)delegate callbackQueue:(dispatch_queue_t)callbackQueue;
+
+/*!
+ @property delegate
+ @abstract
+    The receiver's delegate.
+ 
+ @discussion
+    The value of this property is an object conforming to the AVCaptureDepthDataOutputDelegate protocol that receives depth data as it is captured. The delegate is set using the setDelegate:queue: method.
+ */
+@property(nonatomic, readonly) id<AVCaptureDepthDataOutputDelegate> delegate;
+
+/*!
+ @property delegateCallbackQueue
+ @abstract
+    The dispatch queue on which all delegate methods are called.
+ 
+ @discussion
+    The value of this property is a dispatch_queue_t. The queue is set using the setDelegate:queue: method.
+ */
+@property(nonatomic, readonly) dispatch_queue_t delegateCallbackQueue;
+
+/*!
+ @property alwaysDiscardsLateDepthData
+ @abstract
+    Specifies whether the receiver should always discard any depth data that is not processed before the next depth data is captured.
+ 
+ @discussion
+    When the value of this property is YES, the receiver will immediately discard depth data that are captured while the depthDataCallbackQueue is blocked. When the value of this property is NO, delegates will be allowed more time to process old depth data before new depth data are discarded, but application memory usage may increase as a result. The default value is YES.
+ */
+@property(nonatomic) BOOL alwaysDiscardsLateDepthData;
+
+/*!
+ @property filteringEnabled
+ @abstract
+    Specifies whether the depth data output should filter depth data to smooth out noise and fill invalid values.
+
+ @discussion
+    When the value of this property is YES, the receiver temporally filters the stream of AVDepthData objects to reduce noise, as well as fill invalid values. Invalid values (NaN) may be present in AVDepthData pixel buffer maps due to factors such as low light or lens occlusion. When filtering is enabled, the depth data output interpolates missing depth data values. Filtering should be disabled if you desire the raw depth data values. The default value is YES.
+ */
+@property(nonatomic, getter=isFilteringEnabled) BOOL filteringEnabled;
+
+@end
+
+
+/*!
+ @protocol AVCaptureDepthDataOutputDelegate
+ @abstract
+    Defines an interface for delegates of AVCaptureDepthDataOutput to receive captured depth data and be notified of late depth data that were dropped.
+ */
+NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@protocol AVCaptureDepthDataOutputDelegate <NSObject>
+
+@optional
+
+/*!
+ @method depthDataOutput:didOutputDepthData:timestamp:connection:
+ @abstract
+    Called whenever an AVCaptureDepthDataOutput instance outputs a new depth data object.
+ 
+ @param output
+    The AVCaptureDepthDataOutput instance vending the depth data.
+ @param depthData
+    An AVDepthData object containing the depth/disparity data.
+ @param timestamp
+    A CMTime indicating when the depth data was captured.
+ @param connection
+    The AVCaptureConnection through which the depth data is received.
+ 
+ @discussion
+    The delegate receives this message whenever the depth data output captures and outputs a new depth data object. This method is called on the dispatch queue specified by the output's delegateCallbackQueue property. This method is called frequently. Care must be taken to process the depth data quickly in order to prevent dropped depth data.
+ 
+    Clients that need to reference the AVDepthData object outside of the scope of this method must retain it and then release it when they are finished with it (in a MRR app).
+ 
+    Note that to maintain optimal performance, AVDepthData pixel buffer maps may be backed by a finite memory pool. If AVDepthData objects are held onto for too long, capture inputs will no longer be able to copy new depth data into memory, resulting in droppage. If your application is causing depth data drops by holding on to provided depth data objects for too long, consider copying the pixel buffer map data into a new pixel buffer so that the AVDepthData backing memory can be reused more quickly.
+ */
+- (void)depthDataOutput:(AVCaptureDepthDataOutput *)output didOutputDepthData:(AVDepthData *)depthData timestamp:(CMTime)timestamp connection:(AVCaptureConnection *)connection;
+
+/*!
+ @method depthDataOutput:didDropDepthData:timestamp:connection:reason:
+ @abstract
+    Called once for each depth data that is discarded.
+ 
+ @param output
+    The AVCaptureDepthDataOutput instance that dropped the depth data.
+ @param depthData
+    A depth data object containing information about the dropped depth, such as its native depth type. This depth data object produces nil CVPixelBuffers for depth / disparity as it has no backing depth map.
+ @param timestamp
+    A CMTime indicating when the depth data was captured.
+ @param connection
+    The AVCaptureConnection from which the dropped depth data object was received.
+ @param reason
+    The reason the depth data object was dropped.
+ 
+ @discussion
+    Delegates receive this message whenever a depth data object is dropped. This method is called once for each dropped depth data. The object passed to this delegate method will contain a shell of an AVDepthData that contains no actual depth data backing pixel buffer, as well as a presentation time stamp and a reason for the drop. This method will be called on the dispatch queue specified by the output's delegateCallbackQueue property. Because this method is called on the same dispatch queue that outputs depth data, it must be efficient to prevent further capture performance problems, such as additional drops.
+  */
+- (void)depthDataOutput:(AVCaptureDepthDataOutput *)output didDropDepthData:(AVDepthData *)depthData timestamp:(CMTime)timestamp connection:(AVCaptureConnection *)connection reason:(AVCaptureOutputDataDroppedReason)reason;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2016-11-04 20:52:43.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2017-05-24 00:28:27.000000000 -0400
@@ -1,27 +1,26 @@
 /*
-	File:  AVCaptureDevice.h
+    File:  AVCaptureDevice.h
  
-	Framework:  AVFoundation
+    Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVCaptureSessionPreset.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMFormatDescription.h>
-#if (TARGET_OS_EMBEDDED || TARGET_OS_IPHONE || TARGET_OS_WIN32)
-	#include <CoreGraphics/CGBase.h>
-	#include <CoreGraphics/CGGeometry.h>
-#elif TARGET_OS_MAC
-	#include <ApplicationServices/../Frameworks/CoreGraphics.framework/Headers/CGBase.h>
-	#include <ApplicationServices/../Frameworks/CoreGraphics.framework/Headers/CGGeometry.h>
-#endif
+#import <CoreGraphics/CGBase.h>
+#import <CoreGraphics/CGGeometry.h>
+
+NS_ASSUME_NONNULL_BEGIN
 
 /*!
  @constant AVCaptureDeviceWasConnectedNotification
  @abstract
     Posted when a device becomes available on the system.
-
+ 
  @discussion
     The notification object is an AVCaptureDevice instance representing the device that became available.
  */
@@ -31,14 +30,14 @@
  @constant AVCaptureDeviceWasDisconnectedNotification
  @abstract
     Posted when a device becomes unavailable on the system.
-
+ 
  @discussion
     The notification object is an AVCaptureDevice instance representing the device that became unavailable.
  */
 AVF_EXPORT NSString *const AVCaptureDeviceWasDisconnectedNotification NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
 /*!
- @constant  AVCaptureDeviceSubjectAreaDidChangeNotification
+ @constant AVCaptureDeviceSubjectAreaDidChangeNotification
  @abstract
     Posted when the instance of AVCaptureDevice has detected a substantial change to the video subject area.
  
@@ -47,20 +46,21 @@
   */
 AVF_EXPORT NSString *const AVCaptureDeviceSubjectAreaDidChangeNotification NS_AVAILABLE_IOS(5_0) __TVOS_PROHIBITED;
 
+
+#pragma mark - AVCaptureDevice
+
 @class AVCaptureDeviceFormat;
-#if TARGET_OS_MAC && ! (TARGET_OS_EMBEDDED || TARGET_OS_IPHONE || TARGET_OS_WIN32)
 @class AVCaptureDeviceInputSource;
-#endif
 @class AVCaptureDeviceInternal;
 
 /*!
  @class AVCaptureDevice
  @abstract
     An AVCaptureDevice represents a physical device that provides realtime input media data, such as video and audio.
-
+ 
  @discussion
     Each instance of AVCaptureDevice corresponds to a device, such as a camera or microphone. Instances of AVCaptureDevice cannot be created directly. An array of all currently available devices can also be obtained using the AVCaptureDeviceDiscoverySession. Devices can provide one or more streams of a given media type. Applications can search for devices matching desired criteria by using AVCaptureDeviceDiscoverySession, or may obtain a reference to the default device matching desired criteria by using +[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
-
+ 
     Instances of AVCaptureDevice can be used to provide media data to an AVCaptureSession by creating an AVCaptureDeviceInput with the device and adding that to the capture session.
  */
 NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
@@ -70,6 +70,8 @@
     AVCaptureDeviceInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @method devices
  @abstract
@@ -77,62 +79,62 @@
  
  @result
     An NSArray of AVCaptureDevice instances for each available device.
-
+ 
  @discussion
     This method returns an array of AVCaptureDevice instances for input devices currently connected and available for capture. The returned array contains all devices that are available at the time the method is called. Applications should observe AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified when the list of available devices has changed.
  */
-+ (NSArray *)devices NS_DEPRECATED_IOS(4_0, 10_0, "Use AVCaptureDeviceDiscoverySession instead.");
++ (NSArray<AVCaptureDevice *> *)devices NS_DEPRECATED(10_7, NA, 4_0, 10_0, "Use AVCaptureDeviceDiscoverySession instead.");
 
 /*!
  @method devicesWithMediaType:
  @abstract
     Returns an array of devices currently available for use as sources of media with the given media type.
-
+ 
  @param mediaType
     The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by each returned device.
  @result
     An NSArray of AVCaptureDevice instances for each available device.
-
+ 
  @discussion
     This method returns an array of AVCaptureDevice instances for input devices currently connected and available for capture that provide media of the given type. Media type constants are defined in AVMediaFormat.h. The returned array contains all devices that are available at the time the method is called. Applications should observe AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified when the list of available devices has changed.
  */
-+ (NSArray *)devicesWithMediaType:(NSString *)mediaType NS_DEPRECATED_IOS(4_0, 10_0, "Use AVCaptureDeviceDiscoverySession instead.");
++ (NSArray<AVCaptureDevice *> *)devicesWithMediaType:(AVMediaType)mediaType NS_DEPRECATED(10_7, NA, 4_0, 10_0, "Use AVCaptureDeviceDiscoverySession instead.");
 
 /*!
  @method defaultDeviceWithMediaType:
  @abstract
     Returns an AVCaptureDevice instance for the default device of the given media type.
-
+ 
  @param mediaType
     The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by the returned device.
  @result
     The default device with the given media type, or nil if no device with that media type exists.
-
+ 
  @discussion
     This method returns the default device of the given media type currently available on the system. For example, for AVMediaTypeVideo, this method will return the built in camera that is primarily used for capture and recording. Media type constants are defined in AVMediaFormat.h.
  */
-+ (AVCaptureDevice *)defaultDeviceWithMediaType:(NSString *)mediaType;
++ (nullable AVCaptureDevice *)defaultDeviceWithMediaType:(AVMediaType)mediaType;
 
 /*!
  @method deviceWithUniqueID:
  @abstract
     Returns an AVCaptureDevice instance with the given unique ID.
-
+ 
  @param deviceUniqueID
     The unique ID of the device instance to be returned.
  @result
     An AVCaptureDevice instance with the given unique ID, or nil if no device with that unique ID is available.
-
+ 
  @discussion
     Every available capture device has a unique ID that persists on one system across device connections and disconnections, application restarts, and reboots of the system itself. This method can be used to recall or track the status of a specific device whose unique ID has previously been saved.
  */
-+ (AVCaptureDevice *)deviceWithUniqueID:(NSString *)deviceUniqueID;
++ (nullable AVCaptureDevice *)deviceWithUniqueID:(NSString *)deviceUniqueID;
 
 /*!
  @property uniqueID
  @abstract
     An ID unique to the model of device corresponding to the receiver.
-
+ 
  @discussion
     Every available capture device has a unique ID that persists on one system across device connections and disconnections, application restarts, and reboots of the system itself. Applications can store the value returned by this property to recall or track the status of a specific device in the future.
  */
@@ -142,7 +144,7 @@
  @property modelID
  @abstract
     The model ID of the receiver.
-
+ 
  @discussion
     The value of this property is an identifier unique to all devices of the same model. The value is persistent across device connections and disconnections, and across different systems. For example, the model ID of the camera built in to two identical iPhone models will be the same even though they are different physical devices.
  */
@@ -158,35 +160,35 @@
  */
 @property(nonatomic, readonly) NSString *localizedName;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property manufacturer
  @abstract
     The human-readable manufacturer name for the receiver.
-
+ 
  @discussion
     This property can be used to identify capture devices from a particular manufacturer. All Apple devices return "Apple Inc.". Devices from third party manufacturers may return an empty string.
  */
-@property(nonatomic, readonly) NSString *manufacturer NS_AVAILABLE(10_9, NA);
+@property(nonatomic, readonly) NSString *manufacturer NS_AVAILABLE_MAC(10_9);
 
 /*!
  @property transportType
  @abstract
     The transport type of the receiver (e.g. USB, PCI, etc).
-
+ 
  @discussion
     This property can be used to discover the transport type of a capture device. Transport types are defined in <IOKit/audio/IOAudioTypes.h> as kIOAudioDeviceTransportType*.
  */
-@property(nonatomic, readonly) int32_t transportType NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly) int32_t transportType NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @method hasMediaType:
  @abstract
     Returns whether the receiver provides media with the given media type.
-
+ 
  @param mediaType
     A media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed.
  @result
@@ -195,28 +197,28 @@
  @discussion
     Media type constants are defined in AVMediaFormat.h.
  */
-- (BOOL)hasMediaType:(NSString *)mediaType;
+- (BOOL)hasMediaType:(AVMediaType)mediaType;
 
 /*!
  @method lockForConfiguration:
  @abstract
     Requests exclusive access to configure device hardware properties.
-
+ 
  @param outError
     On return, if the device could not be locked, points to an NSError describing why the failure occurred.
  @result
     A BOOL indicating whether the device was successfully locked for configuration.
-
+ 
  @discussion
     In order to set hardware properties on an AVCaptureDevice, such as focusMode and exposureMode, clients must first acquire a lock on the device. Clients should only hold the device lock if they require settable device properties to remain unchanged. Holding the device lock unnecessarily may degrade capture quality in other applications sharing the device.
  */
-- (BOOL)lockForConfiguration:(NSError **)outError;
+- (BOOL)lockForConfiguration:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @method unlockForConfiguration
  @abstract
     Release exclusive control over device hardware properties.
-
+ 
  @discussion
     This method should be called to match an invocation of lockForConfiguration: when an application no longer needs to keep device hardware properties from changing automatically.
  */
@@ -226,81 +228,87 @@
  @method supportsAVCaptureSessionPreset:
  @abstract
     Returns whether the receiver can be used in an AVCaptureSession configured with the given preset.
-
+ 
  @param preset
     An AVCaptureSession preset.
  @result
     YES if the receiver can be used with the given preset, NO otherwise.
-
+ 
  @discussion
     An AVCaptureSession instance can be associated with a preset that configures its inputs and outputs to fulfill common use cases. This method can be used to determine if the receiver can be used in a capture session with the given preset. Presets are defined in AVCaptureSession.h.
  */
-- (BOOL)supportsAVCaptureSessionPreset:(NSString *)preset;
+- (BOOL)supportsAVCaptureSessionPreset:(AVCaptureSessionPreset)preset;
 
 /*!
  @property connected
  @abstract
     Indicates whether the device is connected and available to the system.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the device represented by the receiver is connected and available for use as a capture device. Clients can key value observe the value of this property to be notified when a device is no longer available. When the value of this property becomes NO for a given instance, it will not become YES again. If the same physical device again becomes available to the system, it will be represented using a new instance of AVCaptureDevice.
  */
 @property(nonatomic, readonly, getter=isConnected) BOOL connected;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property inUseByAnotherApplication
  @abstract
     Indicates whether the device is in use by another application.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the device represented by the receiver is in use by another application. Clients can key value observe the value of this property to be notified when another app starts or stops using this device.
  */
-@property(nonatomic, readonly, getter=isInUseByAnotherApplication) BOOL inUseByAnotherApplication NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly, getter=isInUseByAnotherApplication) BOOL inUseByAnotherApplication NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property suspended
  @abstract
     Indicates whether the device is suspended.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the device represented by the receiver is currently suspended. Some devices disallow data capture due to a feature on the device. For example, isSuspended returns YES for the external iSight when its privacy iris is closed, or for the internal iSight on a notebook when the notebook's display is closed. Clients can key value observe the value of this property to be notified when the device becomes suspended or unsuspended.
  */
-@property(nonatomic, readonly, getter=isSuspended) BOOL suspended NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly, getter=isSuspended) BOOL suspended NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property linkedDevices
  @abstract
     An array of AVCaptureDevice objects physically linked to the receiver.
-
+ 
  @discussion
     The value of this property is an array of AVCaptureDevice objects that are a part of the same physical device as the receiver. For example, for the external iSight camera, linkedDevices returns an array containing an AVCaptureDevice for the external iSight microphone.
  */
-@property(nonatomic, readonly) NSArray *linkedDevices NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly) NSArray<AVCaptureDevice *> *linkedDevices NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @property formats
  @abstract
     An array of AVCaptureDeviceFormat objects supported by the receiver.
-
+ 
  @discussion
     This property can be used to enumerate the formats natively supported by the receiver. The capture device's activeFormat property may be set to one of the formats in this array. Clients can observe automatic changes to the receiver's formats by key value observing this property.
  */
-@property(nonatomic, readonly) NSArray *formats NS_AVAILABLE(10_7, 7_0);
+@property(nonatomic, readonly) NSArray<AVCaptureDeviceFormat *> *formats NS_AVAILABLE(10_7, 7_0);
 
 /*!
  @property activeFormat
  @abstract
     The currently active format of the receiver.
-
+ 
  @discussion
-    This property can be used to get or set the currently active device format. -setActiveFormat: throws an NSInvalidArgumentException if set to a format not present in the formats array. -setActiveFormat: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeFormat by key value observing this property.
+    This property can be used to get or set the currently active device format.
+ 
+    -setActiveFormat: throws an NSInvalidArgumentException if set to a format not present in the formats array.
+ 
+    -setActiveFormat: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ 
+    Clients can observe automatic changes to the receiver's activeFormat by key value observing this property.
  
     On iOS, use of AVCaptureDevice's setActiveFormat: and AVCaptureSession's setSessionPreset: are mutually exclusive. If you set a capture device's active format, the session to which it is attached changes its preset to AVCaptureSessionPresetInputPriority. Likewise if you set the AVCaptureSession's sessionPreset property, the session assumes control of its input devices, and configures their activeFormat appropriately. Note that audio devices do not expose any user-configurable formats on iOS. To configure audio input on iOS, you should use the AVAudioSession APIs instead (see AVAudioSession.h).
-    
+ 
     The activeFormat, activeVideoMinFrameDuration, and activeVideoMaxFrameDuration properties may be set simultaneously by using AVCaptureSession's begin/commitConfiguration methods:
  
     [session beginConfiguration]; // the session to which the receiver's AVCaptureDeviceInput is added.
@@ -320,9 +328,15 @@
  @property activeVideoMinFrameDuration
  @abstract
     A property indicating the receiver's current active minimum frame duration (the reciprocal of its max frame rate).
-
+ 
  @discussion
-    An AVCaptureDevice's activeVideoMinFrameDuration property is the reciprocal of its active maximum frame rate. To limit the max frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMinFrameDuration to its default value for the given activeFormat. -setActiveVideoMinFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value. -setActiveVideoMinFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeVideoMinFrameDuration by key value observing this property.
+    An AVCaptureDevice's activeVideoMinFrameDuration property is the reciprocal of its active maximum frame rate. To limit the max frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMinFrameDuration to its default value for the given activeFormat.
+ 
+    -setActiveVideoMinFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.
+ 
+    -setActiveVideoMinFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ 
+    Clients can observe automatic changes to the receiver's activeVideoMinFrameDuration by key value observing this property.
  
     On iOS, the receiver's activeVideoMinFrameDuration resets to its default value under the following conditions:
         - The receiver's activeFormat changes
@@ -337,9 +351,15 @@
  @property activeVideoMaxFrameDuration
  @abstract
     A property indicating the receiver's current active maximum frame duration (the reciprocal of its min frame rate).
-
+ 
  @discussion
-    An AVCaptureDevice's activeVideoMaxFrameDuration property is the reciprocal of its active minimum frame rate. To limit the min frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMaxFrameDuration to its default value for the given activeFormat. -setActiveVideoMaxFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value. -setActiveVideoMaxFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeVideoMaxFrameDuration by key value observing this property.
+    An AVCaptureDevice's activeVideoMaxFrameDuration property is the reciprocal of its active minimum frame rate. To limit the min frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMaxFrameDuration to its default value for the given activeFormat.
+ 
+    -setActiveVideoMaxFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.
+ 
+    -setActiveVideoMaxFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ 
+    Clients can observe automatic changes to the receiver's activeVideoMaxFrameDuration by key value observing this property.
  
     On iOS, the receiver's activeVideoMaxFrameDuration resets to its default value under the following conditions:
         - The receiver's activeFormat changes
@@ -350,7 +370,7 @@
  */
 @property(nonatomic) CMTime activeVideoMaxFrameDuration NS_AVAILABLE(10_9, 7_0);
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property inputSources
@@ -360,19 +380,19 @@
  @discussion
     Some devices can capture data from one of multiple data sources (different input jacks on the same audio device, for example). For devices with multiple possible data sources, inputSources can be used to enumerate the possible choices. Clients can observe automatic changes to the receiver's inputSources by key value observing this property.
  */
-@property(nonatomic, readonly) NSArray *inputSources NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly) NSArray<AVCaptureDeviceInputSource *> *inputSources NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property activeInputSource
  @abstract
     The currently active input source of the receiver.
-
+ 
  @discussion
     This property can be used to get or set the currently active device input source. -setActiveInputSource: throws an NSInvalidArgumentException if set to a value not present in the inputSources array. -setActiveInputSource: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeInputSource by key value observing this property.
  */
-@property(nonatomic, retain) AVCaptureDeviceInputSource *activeInputSource NS_AVAILABLE(10_7, NA);
+@property(nonatomic, retain, nullable) AVCaptureDeviceInputSource *activeInputSource NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 @end
 
@@ -381,7 +401,7 @@
  @enum AVCaptureDevicePosition
  @abstract
     Constants indicating the physical position of an AVCaptureDevice's hardware on the system.
-
+ 
  @constant AVCaptureDevicePositionUnspecified
     Indicates that the device's position relative to the system hardware is unspecified.
  @constant AVCaptureDevicePositionBack
@@ -390,18 +410,19 @@
     Indicates that the device is physically located on the front of the system hardware.
  */
 typedef NS_ENUM(NSInteger, AVCaptureDevicePosition) {
-	AVCaptureDevicePositionUnspecified         = 0,
-	AVCaptureDevicePositionBack                = 1,
-	AVCaptureDevicePositionFront               = 2
+    AVCaptureDevicePositionUnspecified = 0,
+    AVCaptureDevicePositionBack        = 1,
+    AVCaptureDevicePositionFront       = 2,
 } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
+
 @interface AVCaptureDevice (AVCaptureDevicePosition)
 
 /*!
  @property position
  @abstract
     Indicates the physical position of an AVCaptureDevice's hardware on the system.
-
+ 
  @discussion
     The value of this property is an AVCaptureDevicePosition indicating where the receiver's device is physically located on the system hardware.
  */
@@ -439,33 +460,34 @@
 /*!
  @constant AVCaptureDeviceTypeBuiltInDualCamera
     A device that consists of two fixed focal length cameras, one wide and one telephoto. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession.
-
+ 
     A device of this device type supports the following new features:
     - Auto switching from one camera to the other when zoom factor, light level, and focus position allow this.
     - Higher quality zoom for still captures by fusing images from both cameras.
-
+ 
     A device of this device type does not support the following features:
     - AVCaptureExposureModeCustom and manual exposure bracketing.
     - Locking focus with a lens position other than AVCaptureLensPositionCurrent.
     - Locking auto white balance with device white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
-
+ 
     Even when locked, exposure duration, ISO, aperture, white balance gains, or lens position may change when the device switches from one camera to the other. The overall exposure, white balance, and focus position however should be consistent.
  */
 AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDualCamera NS_AVAILABLE_IOS(10_2) __TVOS_PROHIBITED;
 
 /*!
  @constant AVCaptureDeviceTypeBuiltInDuoCamera
-    A deprecated synonym for AVCaptureDeviceTypeBuiltIntDualCamera. Please use AVCaptureDeviceTypeBuiltInDualCamera instead.
+    A deprecated synonym for AVCaptureDeviceTypeBuiltInDualCamera. Please use AVCaptureDeviceTypeBuiltInDualCamera instead.
  */
 AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDuoCamera NS_DEPRECATED_IOS(10_0, 10_2, "Use AVCaptureDeviceTypeBuiltInDualCamera instead") __TVOS_PROHIBITED;
 
+
 @interface AVCaptureDevice (AVCaptureDeviceType)
 
 /*!
  @property deviceType
  @abstract
     The type of the capture device.
-
+ 
  @discussion
     A capture device's type never changes.
  */
@@ -475,7 +497,7 @@
  @method defaultDeviceWithDeviceType:
  @abstract
     Returns an AVCaptureDevice instance for the default device of the given device type, media type, and position.
-
+ 
  @param deviceType
     The device type supported by the returned device. It must be a valid AVCaptureDeviceType.
  @param mediaType
@@ -484,11 +506,11 @@
     The position supported by the returned device. Pass AVCaptureDevicePositionUnspecified to consider devices with any position.
  @result
     The default device with the given device type, media type and position or nil if no device with that media type exists and nil otherwise.
-
+ 
  @discussion
     This method returns the default device of the given combination of device type, media type, and position currently available on the system.
  */
-+ (AVCaptureDevice *)defaultDeviceWithDeviceType:(AVCaptureDeviceType)deviceType mediaType:(NSString *)mediaType position:(AVCaptureDevicePosition)position NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
++ (nullable AVCaptureDevice *)defaultDeviceWithDeviceType:(AVCaptureDeviceType)deviceType mediaType:(nullable AVMediaType)mediaType position:(AVCaptureDevicePosition)position NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
 
 @end
 
@@ -497,7 +519,7 @@
  @enum AVCaptureFlashMode
  @abstract
     Constants indicating the mode of the flash on the receiver's device, if it has one.
-
+ 
  @constant AVCaptureFlashModeOff
     Indicates that the flash should always be off.
  @constant AVCaptureFlashModeOn
@@ -506,18 +528,19 @@
     Indicates that the flash should be used automatically depending on ambient light conditions.
  */
 typedef NS_ENUM(NSInteger, AVCaptureFlashMode) {
-	AVCaptureFlashModeOff  = 0,
-	AVCaptureFlashModeOn   = 1,
-	AVCaptureFlashModeAuto = 2
+    AVCaptureFlashModeOff  = 0,
+    AVCaptureFlashModeOn   = 1,
+    AVCaptureFlashModeAuto = 2,
 } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
+
 @interface AVCaptureDevice (AVCaptureDeviceFlash)
 
 /*!
  @property hasFlash
  @abstract
     Indicates whether the receiver has a flash.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the receiver has a flash. The receiver's flashMode property can only be set when this property returns YES.
  */
@@ -547,12 +570,12 @@
  @method isFlashModeSupported:
  @abstract
     Returns whether the receiver supports the given flash mode.
-
+ 
  @param flashMode
     An AVCaptureFlashMode to be checked.
  @result
     YES if the receiver supports the given flash mode, NO otherwise.
-
+ 
  @discussion
     The receiver's flashMode property can only be set to a certain mode if this method returns YES for that mode.
  */
@@ -562,7 +585,7 @@
  @property flashMode
  @abstract
     Indicates current mode of the receiver's flash, if it has one.
-
+ 
  @discussion
     The value of this property is an AVCaptureFlashMode that determines the mode of the receiver's flash, if it has one. -setFlashMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isFlashModeSupported:). -setFlashMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's flashMode by key value observing this property.
  
@@ -577,7 +600,7 @@
  @enum AVCaptureTorchMode
  @abstract
     Constants indicating the mode of the torch on the receiver's device, if it has one.
-
+ 
  @constant AVCaptureTorchModeOff
     Indicates that the torch should always be off.
  @constant AVCaptureTorchModeOn
@@ -586,16 +609,18 @@
     Indicates that the torch should be used automatically depending on ambient light conditions.
  */
 typedef NS_ENUM(NSInteger, AVCaptureTorchMode) {
-	AVCaptureTorchModeOff  = 0,
-	AVCaptureTorchModeOn   = 1,
-	AVCaptureTorchModeAuto = 2,
+    AVCaptureTorchModeOff  = 0,
+    AVCaptureTorchModeOn   = 1,
+    AVCaptureTorchModeAuto = 2,
 } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
+
 /*!
  @constant AVCaptureMaxAvailableTorchLevel
     A special value that may be passed to -setTorchModeWithLevel:error: to set the torch to the maximum level currently available. Under thermal duress, the maximum available torch level may be less than 1.0.
  */
-extern const float AVCaptureMaxAvailableTorchLevel;
+AVF_EXPORT const float AVCaptureMaxAvailableTorchLevel NS_AVAILABLE_IOS(6_0) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceTorch)
 
@@ -603,7 +628,7 @@
  @property hasTorch
  @abstract
     Indicates whether the receiver has a torch.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the receiver has a torch. The receiver's torchMode property can only be set when this property returns YES.
  */
@@ -633,7 +658,7 @@
  @property torchLevel
  @abstract
     Indicates the receiver's current torch brightness level as a floating point value.
-
+ 
  @discussion
     The value of this property is a float indicating the receiver's torch level from 0.0 (off) -> 1.0 (full). This property is key-value observable.
  */
@@ -643,12 +668,12 @@
  @method isTorchModeSupported:
  @abstract
     Returns whether the receiver supports the given torch mode.
-
+ 
  @param torchMode
     An AVCaptureTorchMode to be checked.
  @result
     YES if the receiver supports the given torch mode, NO otherwise.
-
+ 
  @discussion
     The receiver's torchMode property can only be set to a certain mode if this method returns YES for that mode.
  */
@@ -658,7 +683,7 @@
  @property torchMode
  @abstract
     Indicates current mode of the receiver's torch, if it has one.
-
+ 
  @discussion
     The value of this property is an AVCaptureTorchMode that determines the mode of the receiver's torch, if it has one. -setTorchMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isTorchModeSupported:). -setTorchMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's torchMode by key value observing this property.
  */
@@ -668,11 +693,11 @@
  @method setTorchModeOnWithLevel:error:
  @abstract
     Sets the current mode of the receiver's torch to AVCaptureTorchModeOn at the specified level.
-
+ 
  @discussion
     This method sets the torch mode to AVCaptureTorchModeOn at a specified level. torchLevel must be a value between 0 and 1, or the special value AVCaptureMaxAvailableTorchLevel. The specified value may not be available if the iOS device is too hot. This method throws an NSInvalidArgumentException if set to an unsupported level. If the specified level is valid, but unavailable, the method returns NO with AVErrorTorchLevelUnavailable. -setTorchModeOnWithLevel:error: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's torchMode by key value observing the torchMode property.
  */
-- (BOOL)setTorchModeOnWithLevel:(float)torchLevel error:(NSError **)outError NS_AVAILABLE_IOS(6_0);
+- (BOOL)setTorchModeOnWithLevel:(float)torchLevel error:(NSError * _Nullable * _Nullable)outError NS_AVAILABLE_IOS(6_0);
 
 @end
 
@@ -681,7 +706,7 @@
  @enum AVCaptureFocusMode
  @abstract
     Constants indicating the mode of the focus on the receiver's device, if it has one.
-
+ 
  @constant AVCaptureFocusModeLocked
     Indicates that the focus should be locked at the lens' current position.
  @constant AVCaptureFocusModeAutoFocus
@@ -690,11 +715,12 @@
     Indicates that the device should automatically focus when needed.
  */
 typedef NS_ENUM(NSInteger, AVCaptureFocusMode) {
-	AVCaptureFocusModeLocked              = 0,
-	AVCaptureFocusModeAutoFocus           = 1,
-	AVCaptureFocusModeContinuousAutoFocus = 2,
+    AVCaptureFocusModeLocked              = 0,
+    AVCaptureFocusModeAutoFocus           = 1,
+    AVCaptureFocusModeContinuousAutoFocus = 2,
 } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
+
 /*!
  @enum AVCaptureAutoFocusRangeRestriction
  @abstract
@@ -708,23 +734,24 @@
     Indicates that the autofocus system should restrict the focus range for subject matter that is far from the camera.
  */
 typedef NS_ENUM(NSInteger, AVCaptureAutoFocusRangeRestriction) {
-	AVCaptureAutoFocusRangeRestrictionNone = 0,
-	AVCaptureAutoFocusRangeRestrictionNear = 1,
-	AVCaptureAutoFocusRangeRestrictionFar  = 2,
+    AVCaptureAutoFocusRangeRestrictionNone = 0,
+    AVCaptureAutoFocusRangeRestrictionNear = 1,
+    AVCaptureAutoFocusRangeRestrictionFar  = 2,
 } NS_AVAILABLE_IOS(7_0) __TVOS_PROHIBITED;
 
+
 @interface AVCaptureDevice (AVCaptureDeviceFocus)
 
 /*!
  @method isFocusModeSupported:
  @abstract
     Returns whether the receiver supports the given focus mode.
-
+ 
  @param focusMode
     An AVCaptureFocusMode to be checked.
  @result
     YES if the receiver supports the given focus mode, NO otherwise.
-
+ 
  @discussion
     The receiver's focusMode property can only be set to a certain mode if this method returns YES for that mode.
  */
@@ -734,7 +761,7 @@
  @property lockingFocusWithCustomLensPositionSupported
  @abstract
     Indicates whether the receiver supports a lens position other than AVCaptureLensPositionCurrent.
-
+ 
  @discussion
     If lockingFocusWithCustomLensPositionSupported returns NO, setFocusModeLockedWithLensPosition: may only be called with AVCaptureLensPositionCurrent. Passing any other lens position will result in an exception.
  */
@@ -744,7 +771,7 @@
  @property focusMode
  @abstract
     Indicates current focus mode of the receiver, if it has one.
-
+ 
  @discussion
     The value of this property is an AVCaptureFocusMode that determines the receiver's focus mode, if it has one. -setFocusMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isFocusModeSupported:). -setFocusMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's focusMode by key value observing this property.
  */
@@ -754,7 +781,7 @@
  @property focusPointOfInterestSupported
  @abstract
     Indicates whether the receiver supports focus points of interest.
-
+ 
  @discussion
     The receiver's focusPointOfInterest property can only be set if this property returns YES.
  */
@@ -764,7 +791,7 @@
  @property focusPointOfInterest
  @abstract
     Indicates current focus point of interest of the receiver, if it has one.
-
+ 
  @discussion
     The value of this property is a CGPoint that determines the receiver's focus point of interest, if it has one. A value of (0,0) indicates that the camera should focus on the top left corner of the image, while a value of (1,1) indicates that it should focus on the bottom right. The default value is (0.5,0.5). -setFocusPointOfInterest: throws an NSInvalidArgumentException if isFocusPointOfInterestSupported returns NO. -setFocusPointOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's focusPointOfInterest by key value observing this property. Note that setting focusPointOfInterest alone does not initiate a focus operation. After setting focusPointOfInterest, call -setFocusMode: to apply the new point of interest.
  */
@@ -774,7 +801,7 @@
  @property adjustingFocus
  @abstract
     Indicates whether the receiver is currently performing a focus scan to adjust focus.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the receiver's camera focus is being automatically adjusted by means of a focus scan, because its focus mode is AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus. Clients can observe the value of this property to determine whether the camera's focus is stable.
  @seealso lensPosition
@@ -836,7 +863,7 @@
  @constant AVCaptureLensPositionCurrent
     A special value that may be passed as the lensPosition parameter of setFocusModeLockedWithLensPosition:completionHandler: to indicate that the caller does not wish to specify a value for the lensPosition property, and that it should instead be set to its current value. Note that the device may be adjusting lensPosition at the time of the call, in which case the value at which lensPosition is locked may differ from the value obtained by querying the lensPosition property.
  */
-AVF_EXPORT const float AVCaptureLensPositionCurrent NS_AVAILABLE_IOS(8_0);
+AVF_EXPORT const float AVCaptureLensPositionCurrent NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
 /*!
  @method setFocusModeLockedWithLensPosition:completionHandler:
@@ -851,7 +878,7 @@
  @discussion
     This is the only way of setting lensPosition. This method throws an NSRangeException if lensPosition is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
  */
-- (void)setFocusModeLockedWithLensPosition:(float)lensPosition completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+- (void)setFocusModeLockedWithLensPosition:(float)lensPosition completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 @end
 
@@ -860,7 +887,7 @@
  @enum AVCaptureExposureMode
  @abstract
     Constants indicating the mode of the exposure on the receiver's device, if it has adjustable exposure.
-
+ 
  @constant AVCaptureExposureModeLocked
     Indicates that the exposure should be locked at its current value.
  @constant AVCaptureExposureModeAutoExpose
@@ -871,24 +898,25 @@
     Indicates that the device should only adjust exposure according to user provided ISO, exposureDuration values.
  */
 typedef NS_ENUM(NSInteger, AVCaptureExposureMode) {
-	AVCaptureExposureModeLocked                            = 0,
-	AVCaptureExposureModeAutoExpose                        = 1,
-	AVCaptureExposureModeContinuousAutoExposure	           = 2,
-	AVCaptureExposureModeCustom NS_ENUM_AVAILABLE_IOS(8_0) = 3,
+    AVCaptureExposureModeLocked                            = 0,
+    AVCaptureExposureModeAutoExpose                        = 1,
+    AVCaptureExposureModeContinuousAutoExposure            = 2,
+    AVCaptureExposureModeCustom NS_ENUM_AVAILABLE_IOS(8_0) = 3,
 } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
+
 @interface AVCaptureDevice (AVCaptureDeviceExposure)
 
 /*!
  @method isExposureModeSupported:
  @abstract
     Returns whether the receiver supports the given exposure mode.
-
+ 
  @param exposureMode
     An AVCaptureExposureMode to be checked.
  @result
     YES if the receiver supports the given exposure mode, NO otherwise.
-
+ 
  @discussion
     The receiver's exposureMode property can only be set to a certain mode if this method returns YES for that mode.
  */
@@ -898,7 +926,7 @@
  @property exposureMode
  @abstract
     Indicates current exposure mode of the receiver, if it has adjustable exposure.
-
+ 
  @discussion
     The value of this property is an AVCaptureExposureMode that determines the receiver's exposure mode, if it has adjustable exposure. -setExposureMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isExposureModeSupported:). -setExposureMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. When using AVCaptureStillImageOutput with automaticallyEnablesStillImageStabilizationWhenAvailable set to YES (the default behavior), the receiver's ISO and exposureDuration values may be overridden by automatic still image stabilization values if the scene is dark enough to warrant still image stabilization. To ensure that the receiver's ISO and exposureDuration values are honored while in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set AVCaptureStillImageOutput's automaticallyEnablesStillImageStabilizationWhenAvailable property to NO. Clients can observe automatic changes to the receiver's exposureMode by key value observing this property.
  */
@@ -918,7 +946,7 @@
  @property exposurePointOfInterest
  @abstract
     Indicates current exposure point of interest of the receiver, if it has one.
-
+ 
  @discussion
     The value of this property is a CGPoint that determines the receiver's exposure point of interest, if it has adjustable exposure. A value of (0,0) indicates that the camera should adjust exposure based on the top left corner of the image, while a value of (1,1) indicates that it should adjust exposure based on the bottom right corner. The default value is (0.5,0.5). -setExposurePointOfInterest: throws an NSInvalidArgumentException if isExposurePointOfInterestSupported returns NO. -setExposurePointOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Note that setting exposurePointOfInterest alone does not initiate an exposure operation. After setting exposurePointOfInterest, call -setExposureMode: to apply the new point of interest.
  */
@@ -928,7 +956,7 @@
  @property adjustingExposure
  @abstract
     Indicates whether the receiver is currently adjusting camera exposure.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the receiver's camera exposure is being automatically adjusted because its exposure mode is AVCaptureExposureModeAutoExpose or AVCaptureExposureModeContinuousAutoExposure. Clients can observe the value of this property to determine whether the camera exposure is stable or is being automatically adjusted.
  */
@@ -974,7 +1002,7 @@
  @constant AVCaptureISOCurrent
     A special value that may be passed as the ISO parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate that the caller does not wish to specify a value for the ISO property, and that it should instead be set to its current value. Note that the device may be adjusting ISO at the time of the call, in which case the value to which ISO is set may differ from the value obtained by querying the ISO property.
  */
-AVF_EXPORT const float AVCaptureISOCurrent NS_AVAILABLE_IOS(8_0);
+AVF_EXPORT const float AVCaptureISOCurrent NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
 /*!
  @method setExposureModeCustomWithDuration:ISO:completionHandler:
@@ -991,7 +1019,7 @@
  @discussion
     This is the only way of setting exposureDuration and ISO. This method throws an NSRangeException if either exposureDuration or ISO is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. When using AVCaptureStillImageOutput with automaticallyEnablesStillImageStabilizationWhenAvailable set to YES (the default behavior), the receiver's ISO and exposureDuration values may be overridden by automatic still image stabilization values if the scene is dark enough to warrant still image stabilization. To ensure that the receiver's ISO and exposureDuration values are honored while in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set AVCaptureStillImageOutput's automaticallyEnablesStillImageStabilizationWhenAvailable property to NO.
  */
-- (void)setExposureModeCustomWithDuration:(CMTime)duration ISO:(float)ISO completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+- (void)setExposureModeCustomWithDuration:(CMTime)duration ISO:(float)ISO completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 /*!
  @property exposureTargetOffset
@@ -1037,7 +1065,7 @@
  @constant AVCaptureExposureTargetBiasCurrent
     A special value that may be passed as the bias parameter of setExposureTargetBias:completionHandler: to indicate that the caller does not wish to specify a value for the exposureTargetBias property, and that it should instead be set to its current value.
  */
-AVF_EXPORT const float AVCaptureExposureTargetBiasCurrent NS_AVAILABLE_IOS(8_0);
+AVF_EXPORT const float AVCaptureExposureTargetBiasCurrent NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
 /*!
  @method setExposureTargetBias:completionHandler:
@@ -1052,7 +1080,7 @@
  @discussion
     This is the only way of setting exposureTargetBias. This method throws an NSRangeException if exposureTargetBias is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
  */
-- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+- (void)setExposureTargetBias:(float)bias completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 @end
 
@@ -1061,7 +1089,7 @@
  @enum AVCaptureWhiteBalanceMode
  @abstract
     Constants indicating the mode of the white balance on the receiver's device, if it has adjustable white balance.
-
+ 
  @constant AVCaptureWhiteBalanceModeLocked
     Indicates that the white balance should be locked at its current value.
  @constant AVCaptureWhiteBalanceModeAutoWhiteBalance
@@ -1070,11 +1098,12 @@
     Indicates that the device should automatically adjust white balance when needed.
  */
 typedef NS_ENUM(NSInteger, AVCaptureWhiteBalanceMode) {
-	AVCaptureWhiteBalanceModeLocked				        = 0,
-	AVCaptureWhiteBalanceModeAutoWhiteBalance	        = 1,
+    AVCaptureWhiteBalanceModeLocked                     = 0,
+    AVCaptureWhiteBalanceModeAutoWhiteBalance           = 1,
     AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
 } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
+
 /*!
  @typedef AVCaptureWhiteBalanceGains
  @abstract
@@ -1086,6 +1115,7 @@
     float blueGain;
 } AVCaptureWhiteBalanceGains NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
+
 /*!
  @typedef AVCaptureWhiteBalanceChromaticityValues
  @abstract
@@ -1096,28 +1126,30 @@
     float y;
 } AVCaptureWhiteBalanceChromaticityValues NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
+
 /*!
  @typedef AVCaptureWhiteBalanceTemperatureAndTintValues
  @abstract
     Structure containing a white balance color correlated temperature in kelvin, plus a tint value in the range of [-150 - +150].
  */
 typedef struct {
-	float temperature;
-	float tint;
+    float temperature;
+    float tint;
 } AVCaptureWhiteBalanceTemperatureAndTintValues NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
+
 @interface AVCaptureDevice (AVCaptureDeviceWhiteBalance)
 
 /*!
  @method isWhiteBalanceModeSupported:
  @abstract
     Returns whether the receiver supports the given white balance mode.
-
+ 
  @param whiteBalanceMode
     An AVCaptureWhiteBalanceMode to be checked.
  @result
     YES if the receiver supports the given white balance mode, NO otherwise.
-
+ 
  @discussion
     The receiver's whiteBalanceMode property can only be set to a certain mode if this method returns YES for that mode.
  */
@@ -1127,7 +1159,7 @@
  @property lockingWhiteBalanceWithCustomDeviceGainsSupported
  @abstract
     Indicates whether the receiver supports white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
-
+ 
  @discussion
     If lockingWhiteBalanceWithCustomDeviceGainsSupported returns NO, setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains: may only be called with AVCaptureWhiteBalanceGainsCurrent. Passing any other white balance gains will result in an exception.
  */
@@ -1137,7 +1169,7 @@
  @property whiteBalanceMode
  @abstract
     Indicates current white balance mode of the receiver, if it has adjustable white balance.
-
+ 
  @discussion
     The value of this property is an AVCaptureWhiteBalanceMode that determines the receiver's white balance mode, if it has adjustable white balance. -setWhiteBalanceMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isWhiteBalanceModeSupported:). -setWhiteBalanceMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's whiteBalanceMode by key value observing this property.
  */
@@ -1147,7 +1179,7 @@
  @property adjustingWhiteBalance
  @abstract
     Indicates whether the receiver is currently adjusting camera white balance.
-
+ 
  @discussion
     The value of this property is a BOOL indicating whether the receiver's camera white balance is being automatically adjusted because its white balance mode is AVCaptureWhiteBalanceModeAutoWhiteBalance or AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance. Clients can observe the value of this property to determine whether the camera white balance is stable or is being automatically adjusted.
  */
@@ -1197,12 +1229,12 @@
  @param whiteBalanceGains
     The white balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent can be used to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains.
  @param handler
-    A block to be called when white balance gains have been set to the values specified and whiteBalanceMode is set to AVCaptureWhiteBalanceModeLocked. If setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison  with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. This parameter may be nil if synchronization is not required.
+    A block to be called when white balance gains have been set to the values specified and whiteBalanceMode is set to AVCaptureWhiteBalanceModeLocked. If setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. This parameter may be nil if synchronization is not required.
  
  @discussion
     For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported. Gain values are normalized to the minimum channel value to avoid brightness changes (e.g. R:2 G:2 B:4 will be normalized to R:1 G:1 B:2). This method throws an NSRangeException if any of the whiteBalanceGains are set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
  */
-- (void)setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:(AVCaptureWhiteBalanceGains)whiteBalanceGains completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+- (void)setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:(AVCaptureWhiteBalanceGains)whiteBalanceGains completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 /*!
  @method chromaticityValuesForDeviceWhiteBalanceGains:
@@ -1211,7 +1243,7 @@
  
  @param whiteBalanceGains
     White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
- @return
+ @result
     A fully populated AVCaptureWhiteBalanceChromaticityValues structure containing device-independent values.
  
  @discussion
@@ -1226,8 +1258,7 @@
  
  @param chromaticityValues
     Little x, little y chromaticity values as described in the documentation for AVCaptureWhiteBalanceChromaticityValues.
- 
- @return
+ @result
     A fully populated AVCaptureWhiteBalanceGains structure containing device-specific RGB gain values.
  
  @discussion
@@ -1242,7 +1273,7 @@
  
  @param whiteBalanceGains
     White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
- @return
+ @result
     A fully populated AVCaptureWhiteBalanceTemperatureAndTintValues structure containing device-independent values.
  
  @discussion
@@ -1257,8 +1288,7 @@
  
  @param tempAndTintValues
     Temperature and tint values as described in the documentation for AVCaptureWhiteBalanceTemperatureAndTintValues.
- 
- @return
+ @result
     A fully populated AVCaptureWhiteBalanceGains structure containing device-specific RGB gain values.
  
  @discussion
@@ -1331,7 +1361,7 @@
  
     -setVideoZoomFactor: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
  
- @seealso AVCaptureDeviceFormat AVCaptureDeviceFormat - videoMaxZoomFactor and videoZoomFactorUpscaleThreshold
+ @seealso -[AVCaptureDeviceFormat videoMaxZoomFactor] and -[AVCaptureDeviceFormat videoZoomFactorUpscaleThreshold]
  */
 @property(nonatomic) CGFloat videoZoomFactor NS_AVAILABLE_IOS(7_0);
 
@@ -1359,7 +1389,7 @@
  @discussion
     Clients can observe this value to determine when a ramp begins or completes.
  */
-@property(nonatomic,readonly,getter=isRampingVideoZoom) BOOL rampingVideoZoom NS_AVAILABLE_IOS(7_0);
+@property(nonatomic, readonly, getter=isRampingVideoZoom) BOOL rampingVideoZoom NS_AVAILABLE_IOS(7_0);
 
 /*!
  @method cancelVideoZoomRamp
@@ -1373,6 +1403,16 @@
  */
 - (void)cancelVideoZoomRamp NS_AVAILABLE_IOS(7_0);
 
+/*!
+ @property dualCameraSwitchOverVideoZoomFactor
+ @abstract
+    The video zoom factor at or above which a DualCamera can select between its wide angle camera and its telephoto camera.
+ 
+ @discussion
+    This is the zoom factor at which the wide angle camera's field of view matches telephoto camera's full field of view. On non-DualCamera devices this will return 1.0.
+ */
+@property(atomic, readonly) CGFloat dualCameraSwitchOverVideoZoomFactor NS_AVAILABLE_IOS(11_0);
+
 @end
 
 
@@ -1391,12 +1431,13 @@
     The client is authorized to access the hardware supporting a media type.
  */
 typedef NS_ENUM(NSInteger, AVAuthorizationStatus) {
-	AVAuthorizationStatusNotDetermined = 0,
-	AVAuthorizationStatusRestricted,
-	AVAuthorizationStatusDenied,
-	AVAuthorizationStatusAuthorized
+    AVAuthorizationStatusNotDetermined = 0,
+    AVAuthorizationStatusRestricted    = 1,
+    AVAuthorizationStatusDenied        = 2,
+    AVAuthorizationStatusAuthorized    = 3,
 } NS_AVAILABLE_IOS(7_0) __TVOS_PROHIBITED;
-		
+
+
 @interface AVCaptureDevice (AVCaptureDeviceAuthorization)
 
 /*!
@@ -1406,14 +1447,13 @@
  
  @param mediaType
     The media type, either AVMediaTypeVideo or AVMediaTypeAudio
- 
  @result
     The authorization status of the client
  
  @discussion
     This method returns the AVAuthorizationStatus of the client for accessing the underlying hardware supporting the media type. Media type constants are defined in AVMediaFormat.h. If any media type other than AVMediaTypeVideo or AVMediaTypeAudio is supplied, an NSInvalidArgumentException will be thrown. If the status is AVAuthorizationStatusNotDetermined, you may use the +requestAccessForMediaType:completionHandler: method to request access by prompting the user.
  */
-+ (AVAuthorizationStatus)authorizationStatusForMediaType:(NSString *)mediaType NS_AVAILABLE_IOS(7_0);
++ (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType NS_AVAILABLE_IOS(7_0);
 
 /*!
  @method requestAccessForMediaType:completionHandler:
@@ -1436,28 +1476,34 @@
 
     The completion handler is called on an arbitrary dispatch queue. Is it the client's responsibility to ensure that any UIKit-related updates are called on the main queue or main thread as a result.
  */
-+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler NS_AVAILABLE_IOS(7_0);
++ (void)requestAccessForMediaType:(AVMediaType)mediaType completionHandler:(void (^)(BOOL granted))handler NS_AVAILABLE_IOS(7_0);
 
 @end
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
 
-typedef float AVCaptureDeviceTransportControlsSpeed; 
+/*!
+ @typedef
+ @abstract
+    A constant that is used to specify the transport controls' speed.
+ */
+typedef float AVCaptureDeviceTransportControlsSpeed NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED;
+
 
 /*!
  @enum AVCaptureDeviceTransportControlsPlaybackMode
  @abstract
     Constants indicating the transport controls' current mode of play back, if it has one.
-
+ 
  @constant AVCaptureDeviceTransportControlsNotPlayingMode
     Indicates that the tape transport is not threaded through the play head.
  @constant AVCaptureDeviceTransportControlsPlayingMode
     Indicates that the tape transport is threaded through the play head.
  */
 typedef NS_ENUM(NSInteger, AVCaptureDeviceTransportControlsPlaybackMode) {
-	AVCaptureDeviceTransportControlsNotPlayingMode      = 0,
-	AVCaptureDeviceTransportControlsPlayingMode         = 1
-} NS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED;
+    AVCaptureDeviceTransportControlsNotPlayingMode = 0,
+    AVCaptureDeviceTransportControlsPlayingMode    = 1,
+} NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceTransportControls)
 
@@ -1465,56 +1511,54 @@
  @property transportControlsSupported
  @abstract
     Returns whether the receiver supports transport control commands.
-
+ 
  @discussion
     For devices with transport controls, such as AVC tape-based camcorders or pro capture devices with RS422 deck control, the value of this property is YES. If transport controls are not supported, none of the associated transport control methods and properties are available on the receiver.
  */
-@property(nonatomic, readonly) BOOL transportControlsSupported NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly) BOOL transportControlsSupported NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property transportControlsPlaybackMode
  @abstract
     Returns the receiver's current playback mode.
-
+ 
  @discussion
     For devices that support transport control, this property may be queried to discover the current playback mode.
  */
-@property(nonatomic, readonly) AVCaptureDeviceTransportControlsPlaybackMode transportControlsPlaybackMode NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly) AVCaptureDeviceTransportControlsPlaybackMode transportControlsPlaybackMode NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property transportControlsSpeed
  @abstract
     Returns the receiver's current playback speed as a floating point value.
-
+ 
  @discussion
-    For devices that support transport control, this property may be queried to discover the  current playback speed of the deck.
+    For devices that support transport control, this property may be queried to discover the current playback speed of the deck.
     0.0 -> stopped.
     1.0 -> forward at normal speed.
     -1.0-> reverse at normal speed.
     2.0 -> forward at 2x normal speed.
     etc.
  */
-@property(nonatomic, readonly) AVCaptureDeviceTransportControlsSpeed transportControlsSpeed NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly) AVCaptureDeviceTransportControlsSpeed transportControlsSpeed NS_AVAILABLE_MAC(10_7);
 
 /*!
  @method setTransportControlsPlaybackMode:speed:
  @abstract
     Sets both the transport controls playback mode and speed in a single method.
-
+ 
  @param mode
     A AVCaptureDeviceTransportControlsPlaybackMode indicating whether the deck should be put into play mode.
-@param speed
+ @param speed
     A AVCaptureDeviceTransportControlsSpeed indicating the speed at which to wind or play the tape.
-
+ 
  @discussion
     A method for setting the receiver's transport controls playback mode and speed. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown.
  */
-- (void)setTransportControlsPlaybackMode:(AVCaptureDeviceTransportControlsPlaybackMode)mode speed:(AVCaptureDeviceTransportControlsSpeed)speed NS_AVAILABLE(10_7, NA);
+- (void)setTransportControlsPlaybackMode:(AVCaptureDeviceTransportControlsPlaybackMode)mode speed:(AVCaptureDeviceTransportControlsSpeed)speed NS_AVAILABLE_MAC(10_7);
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
 
 @interface AVCaptureDevice (AVCaptureDeviceHighDynamicRangeSupport)
 
@@ -1554,10 +1598,11 @@
     The P3 D65 wide color space which uses Illuminant D65 as the white point.
  */
 typedef NS_ENUM(NSInteger, AVCaptureColorSpace) {
-	AVCaptureColorSpace_sRGB   NS_SWIFT_NAME(sRGB)   = 0,
-	AVCaptureColorSpace_P3_D65 NS_SWIFT_NAME(P3_D65) = 1,
+    AVCaptureColorSpace_sRGB   = 0,
+    AVCaptureColorSpace_P3_D65 = 1,
 } NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
 
+
 @interface AVCaptureDevice (AVCaptureDeviceColorSpaceSupport)
 
 /*!
@@ -1573,22 +1618,65 @@
 @end
 
 
+@interface AVCaptureDevice (AVCaptureDeviceDepthSupport)
+
+/*!
+ @property activeDepthDataFormat
+ @abstract
+    The currently active depth data format of the receiver.
+
+ @discussion
+    This property can be used to get or set the device's currently active depth data format. -setActiveDepthDataFormat: throws an NSInvalidArgumentException if set to a format not present in the activeFormat's -supportedDepthDataFormats array. -setActiveDepthDataFormat: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeDepthDataFormat by key value observing this property. On devices where depth data is not supported, this property returns nil.
+ 
+    The frame rate of depth data may not be set directly. Depth data frame rate is synchronized to the device's activeMin/MaxFrameDurations. It may match the device's current frame rate, or lower, if depth data cannot be produced fast enough for the active video frame rate.
+ 
+    Delivery of depth data to a AVCaptureDepthDataOutput may increase the system load, resulting in a reduced video frame rate for thermal sustainability.
+ */
+@property(nonatomic, retain, nullable) AVCaptureDeviceFormat *activeDepthDataFormat NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property minAvailableVideoZoomFactor
+ @abstract
+    Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
+ 
+ @discussion
+    On non-dual camera devices the minAvailableVideoZoomFactor is always 1.0. On a dual camera device the minAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat videoMinZoomFactorForDepthDataDelivery]). If the device's videoZoomFactor property is assigned a value smaller than 1.0, an NSRangeException is thrown. Setting the videoZoomFactor to a value greater than or equal to 1.0, but lower than minAvailableVideoZoomFactor results in the value being clamped to the minAvailableVideoZoomFactor. Clients can key value observe the value of this property.
+ */
+@property(nonatomic, readonly) CGFloat minAvailableVideoZoomFactor NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property maxAvailableVideoZoomFactor
+ @abstract
+    Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
+ 
+ @discussion
+    On non-dual camera devices the maxAvailableVideoZoomFactor is always equal to the activeFormat.videoMaxZoomFactor. On a dual camera device the maxAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat videoMaxZoomFactorForDepthDataDelivery]). If the device's videoZoomFactor property is assigned a value greater than activeFormat.videoMaxZoomFactor, an NSRangeException is thrown. Setting the videoZoomFactor to a value less than or equal to activeFormat.videoMaxZoomFactor, but greater than maxAvailableVideoZoomFactor results in the value being clamped to the maxAvailableVideoZoomFactor. Clients can key value observe the value of this property.
+ */
+@property(nonatomic, readonly) CGFloat maxAvailableVideoZoomFactor NS_AVAILABLE_IOS(11_0);
+
+@end
+
+
+#pragma mark - AVCaptureDeviceDiscoverySession
+
 /*!
  @class AVCaptureDeviceDiscoverySession
  @abstract
     The AVCaptureDeviceDiscoverySession allows clients to search for devices by certain criteria.
-
+ 
  @discussion
     This class allows clients to discover devices by providing certain search criteria. The objective of this class is to help find devices by device type and optionally by media type or position and allow you to key-value observe changes to the returned devices list.
  */
 NS_CLASS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED
 @interface AVCaptureDeviceDiscoverySession : NSObject
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @method discoverySessionWithDeviceTypes:
  @abstract
     Returns an AVCaptureDeviceDiscoverySession instance for the given device types, media type, and position.
-
+ 
  @param deviceTypes
     An array specifying the device types to include in the list of discovered devices.
  @param mediaType
@@ -1597,34 +1685,34 @@
     The position to include in the list of discovered devices. Pass AVCaptureDevicePositionUnspecified to search for devices with any position.
  @result
     The AVCaptureDeviceDiscoverySession from which the list of devices can be obtained.
-
+ 
  @discussion
     The list of device types is mandatory. This is used to make sure that clients only get access to devices of types they expect. This prevents new device types from automatically being included in the list of devices.
  */
-+ (instancetype)discoverySessionWithDeviceTypes:(NSArray<AVCaptureDeviceType> *)deviceTypes mediaType:(NSString *)mediaType position:(AVCaptureDevicePosition)position;
-
-- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)discoverySessionWithDeviceTypes:(NSArray<AVCaptureDeviceType> *)deviceTypes mediaType:(nullable AVMediaType)mediaType position:(AVCaptureDevicePosition)position;
 
 /*!
  @property devices
  @abstract
     The list of devices that comply to the search criteria specified on the discovery session.
-
+ 
  @discussion
-    The returned array contains only devices that are available at the time the method is called. Applications can key-value observe this property to be notified when the list of available devices has changed.
+    The returned array contains only devices that are available at the time the method is called. Applications can key-value observe this property to be notified when the list of available devices has changed. For apps linked against iOS 10, the devices returned are unsorted. For apps linked against iOS 11 or later, the devices are sorted by AVCaptureDeviceType, matching the order specified in the deviceTypes parameter of +[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:mediaType:position:]. If a position of AVCaptureDevicePositionUnspecified is specified, the results are further ordered by position in the AVCaptureDevicePosition enum.
  */
 @property(nonatomic, readonly) NSArray<AVCaptureDevice *> *devices;
 
 @end
 
 
+#pragma mark - AVFrameRateRange
+
 @class AVFrameRateRangeInternal;
 
 /*!
  @class AVFrameRateRange
  @abstract
     An AVFrameRateRange expresses a range of valid frame rates as min and max rate and min and max duration.
-
+ 
  @discussion
     An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat wraps a CMFormatDescription and expresses a range of valid video frame rates as an NSArray of AVFrameRateRange objects. AVFrameRateRange expresses min and max frame rate as a rate in frames per second and duration (CMTime). An AVFrameRateRange object is immutable. Its values do not change for the life of the object.
  */
@@ -1635,11 +1723,13 @@
     AVFrameRateRangeInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property minFrameRate
  @abstract
     A Float64 indicating the minimum frame rate supported by this range.
-
+ 
  @discussion
     This read-only property indicates the minimum frame rate supported by this range in frames per second.
  */
@@ -1649,7 +1739,7 @@
  @property maxFrameRate
  @abstract
     A Float64 indicating the maximum frame rate supported by this range.
-
+ 
  @discussion
     This read-only property indicates the maximum frame rate supported by this range in frames per second.
  */
@@ -1659,7 +1749,7 @@
  @property maxFrameDuration
  @abstract
     A CMTime indicating the maximum frame duration supported by this range.
-
+ 
  @discussion
     This read-only property indicates the maximum frame duration supported by this range. It is the reciprocal of minFrameRate, and expresses minFrameRate as a duration.
  */
@@ -1669,7 +1759,7 @@
  @property minFrameDuration
  @abstract
     A CMTime indicating the minimum frame duration supported by this range.
-
+ 
  @discussion
     This read-only property indicates the minimum frame duration supported by this range. It is the reciprocal of maxFrameRate, and expresses maxFrameRate as a duration.
  */
@@ -1694,7 +1784,7 @@
  */
 typedef NS_ENUM(NSInteger, AVCaptureVideoStabilizationMode) {
     AVCaptureVideoStabilizationModeOff       = 0,
-    AVCaptureVideoStabilizationModeStandard	 = 1,
+    AVCaptureVideoStabilizationModeStandard  = 1,
     AVCaptureVideoStabilizationModeCinematic = 2,
     AVCaptureVideoStabilizationModeAuto      = -1,
 } NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
@@ -1713,19 +1803,22 @@
     Indicates that autofocus is achieved by phase detection. Phase detection has the ability to achieve focus in many cases without a focus scan. Phase detection autofocus is typically less visually intrusive than contrast detection autofocus.
  */
 typedef NS_ENUM(NSInteger, AVCaptureAutoFocusSystem) {
-	AVCaptureAutoFocusSystemNone              = 0,
-	AVCaptureAutoFocusSystemContrastDetection = 1,
-	AVCaptureAutoFocusSystemPhaseDetection    = 2,
+    AVCaptureAutoFocusSystemNone              = 0,
+    AVCaptureAutoFocusSystemContrastDetection = 1,
+    AVCaptureAutoFocusSystemPhaseDetection    = 2,
 } NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
 
+#pragma mark - AVCaptureDeviceFormat
+
+@class AVCaptureOutput;
 @class AVCaptureDeviceFormatInternal;
 
 /*!
  @class AVCaptureDeviceFormat
  @abstract
     An AVCaptureDeviceFormat wraps a CMFormatDescription and other format-related information, such as min and max framerate.
-
+ 
  @discussion
     An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat is a thin wrapper around a CMFormatDescription, and can carry associated device format information that doesn't go in a CMFormatDescription, such as min and max frame rate. An AVCaptureDeviceFormat object is immutable. Its values do not change for the life of the object.
  */
@@ -1736,21 +1829,23 @@
     AVCaptureDeviceFormatInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property mediaType
  @abstract
     An NSString describing the media type of an AVCaptureDevice active or supported format.
-
+ 
  @discussion
     Supported mediaTypes are listed in AVMediaFormat.h. This is a read-only property. The caller assumes no ownership of the returned value and should not CFRelease it.
  */
-@property(nonatomic, readonly) NSString *mediaType;
+@property(nonatomic, readonly) AVMediaType mediaType;
 
 /*!
  @property formatDescription
  @abstract
     A CMFormatDescription describing an AVCaptureDevice active or supported format.
-
+ 
  @discussion
     A CMFormatDescription describing an AVCaptureDevice active or supported format. This is a read-only property. The caller assumes no ownership of the returned value and should not CFRelease it.
  */
@@ -1760,11 +1855,11 @@
  @property videoSupportedFrameRateRanges
  @abstract
     A property indicating the format's supported frame rate ranges.
-
+ 
  @discussion
     videoSupportedFrameRateRanges is an array of AVFrameRateRange objects, one for each of the format's supported video frame rate ranges.
  */
-@property(nonatomic, readonly) NSArray *videoSupportedFrameRateRanges;
+@property(nonatomic, readonly) NSArray<AVFrameRateRange *> *videoSupportedFrameRateRanges;
 
 #if TARGET_OS_IPHONE
 
@@ -1772,7 +1867,7 @@
  @property videoFieldOfView
  @abstract
     A property indicating the format's field of view.
-
+ 
  @discussion
     videoFieldOfView is a float value indicating the receiver's field of view in degrees. If field of view is unknown, a value of 0 is returned.
  */
@@ -1782,7 +1877,7 @@
  @property videoBinned
  @abstract
     A property indicating whether the format is binned.
-
+ 
  @discussion
     videoBinned is a BOOL indicating whether the format is a binned format. Binning is a pixel-combining process which can result in greater low light sensitivity at the cost of reduced resolution.
  */
@@ -1805,7 +1900,7 @@
  @property videoStabilizationSupported
  @abstract
     A property indicating whether the format supports video stabilization.
-
+ 
  @discussion
     videoStabilizationSupported is a BOOL indicating whether the format can be stabilized using AVCaptureConnection -setEnablesVideoStabilizationWhenAvailable. This property is deprecated. Use isVideoStabilizationModeSupported: instead.
  */
@@ -1875,7 +1970,7 @@
  @property videoHDRSupported
  @abstract
     A property indicating whether the format supports high dynamic range streaming.
-
+ 
  @discussion
     videoHDRSupported is a BOOL indicating whether the format supports high dynamic range streaming. See AVCaptureDevice's videoHDREnabled property.
  */
@@ -1887,7 +1982,11 @@
     CMVideoDimensions indicating the highest resolution still image that can be produced by this format.
  
  @discussion
-    Normally, AVCaptureStillImageOutput emits images with the same dimensions as its source AVCaptureDevice's activeFormat. However, if you set highResolutionStillImageOutputEnabled to YES, AVCaptureStillImageOutput emits still images with its source AVCaptureDevice's activeFormat.highResolutionStillImageDimensions.
+    By default, AVCapturePhotoOutput and AVCaptureStillImageOutput emit images with the same dimensions as their source AVCaptureDevice's activeFormat.formatDescription property. Some device formats support high resolution photo output. That is, they can stream video to an AVCaptureVideoDataOutput or AVCaptureMovieFileOutput at one resolution while outputting photos to AVCapturePhotoOutput at a higher resolution. You may query this property to discover a video format's supported high resolution still image dimensions. See -[AVCapturePhotoOutput highResolutionPhotoEnabled], -[AVCapturePhotoSettings highResolutionPhotoEnabled], and -[AVCaptureStillImageOutput highResolutionStillImageOutputEnabled].
+ 
+    AVCaptureDeviceFormats of type AVMediaTypeDepthData may also support the delivery of a higher resolution depth data map to an AVCapturePhotoOutput. Chief differences are:
+       - Depth data accompanying still images is not supported by AVCaptureStillImageOutput. You must use AVCapturePhotoOutput.
+       - By opting in for depth data ( -[AVCapturePhotoSettings setDepthDataDeliveryEnabled:YES] ), you implicitly opt in for high resolution depth data if it's available. You may query the -[AVCaptureDevice activeDepthDataFormat]'s highResolutionStillImageDimensions to discover the depth data resolution that will be delivered with captured photos.
  */
 @property(nonatomic, readonly) CMVideoDimensions highResolutionStillImageDimensions NS_AVAILABLE_IOS(8_0);
 
@@ -1911,11 +2010,52 @@
  */
 @property(nonatomic, readonly) NSArray<NSNumber *> *supportedColorSpaces NS_AVAILABLE_IOS(10_0);
 
+/*!
+ @property videoMinZoomFactorForDepthDataDelivery
+ @abstract
+    Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property when delivering depth data to one or more outputs.
+ 
+ @discussion
+    Dual camera devices support a limited zoom range when delivering depth data to any output. If this device format has no -supportedDepthDataFormats, this property returns 1.0.
+ */
+@property(nonatomic, readonly) CGFloat videoMinZoomFactorForDepthDataDelivery NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property videoMaxZoomFactorForDepthDataDelivery
+ @abstract
+    Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property when delivering depth data to one or more outputs.
+ 
+ @discussion
+    Dual camera devices support a limited zoom range when delivering depth data to any output. If this device format has no -supportedDepthDataFormats, this property returns videoMaxZoomFactor.
+ */
+@property(nonatomic, readonly) CGFloat videoMaxZoomFactorForDepthDataDelivery NS_AVAILABLE_IOS(11_0);
+
+/*
+ @property supportedDepthDataFormats
+ @abstract
+    Indicates this format's companion depth data formats.
+ 
+ @discussion
+    If no depth data formats are supported by the receiver, an empty array is returned. On dual camera devices, the supportedDepthDataFormats list items always match the aspect ratio of their paired video format. When the receiver is set as the device's activeFormat, you may set the device's activeDepthDataFormat to one of these supported depth data formats.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureDeviceFormat *> *supportedDepthDataFormats NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property unsupportedCaptureOutputClasses
+ @abstract
+    A property indicating AVCaptureOutput subclasses the receiver does not support.
+ 
+ @discussion
+    As a rule, AVCaptureDeviceFormats of a given mediaType are available for use with all AVCaptureOutputs that accept that media type, but there are exceptions. For instance, on iOS, the photo resolution video formats may not be used as sources for AVCaptureMovieFileOutput due to bandwidth limitations. On DualCamera devices, AVCaptureDepthDataOutput is not supported by the 12 MP device formats when using the -[AVCaptureDevice setActiveFormat:] API due to bandwidth limitations, though their use with -[AVCaptureSession setSessionPreset:AVCaptureSessionPresetPhoto] is supported. When using the photo preset, video is streamed at preview resolution rather than full sensor resolution.  
+ */
+@property(nonatomic, readonly) NSArray<Class> *unsupportedCaptureOutputClasses NS_AVAILABLE_IOS(11_0);
+
 #endif // TARGET_OS_IPHONE
 
 @end
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+
+#pragma mark - AVCaptureDeviceInputSource
 
 @class AVCaptureDeviceInputSourceInternal;
 
@@ -1923,22 +2063,24 @@
  @class AVCaptureDeviceInputSource
  @abstract
     An AVCaptureDeviceInputSource represents a distinct input source on an AVCaptureDevice object.
-
+ 
  @discussion
     An AVCaptureDevice may optionally present an array of inputSources, representing distinct mutually exclusive inputs to the device, for example, an audio AVCaptureDevice might have ADAT optical and analog input sources. A video AVCaptureDevice might have an HDMI input source, or a component input source.
  */
-NS_CLASS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
 @interface AVCaptureDeviceInputSource : NSObject
 {
 @private
     AVCaptureDeviceInputSourceInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property inputSourceID
  @abstract
     An ID unique among the inputSources exposed by a given AVCaptureDevice.
-
+ 
  @discussion
     An AVCaptureDevice's inputSources array must contain AVCaptureInputSource objects with unique inputSourceIDs.
  */
@@ -1948,7 +2090,7 @@
  @property localizedName
  @abstract
     A localized human-readable name for the receiver.
-
+ 
  @discussion
     This property can be used for displaying the name of the capture device input source in a user interface.
  */
@@ -1956,4 +2098,4 @@
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h	2016-09-23 21:02:18.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h	2017-05-24 00:37:44.000000000 -0400
@@ -1,21 +1,23 @@
 /*
-    File:  AVCaptureOutputBase.h
- 	
- 	Framework:  AVFoundation
+    File:  AVCaptureFileOutput.h
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
-#import <AVFoundation/AVCaptureOutput.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVVideoSettings.h>
+#import <CoreMedia/CMSampleBuffer.h>
 
-#pragma mark - AVCaptureFileOutput
+NS_ASSUME_NONNULL_BEGIN
 
-@class AVCaptureFileOutputInternal;
-@protocol AVCaptureFileOutputRecordingDelegate;
+#pragma mark AVCaptureFileOutput
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+@class AVCaptureFileOutputInternal;
+@class AVMetadataItem;
 @protocol AVCaptureFileOutputDelegate;
-#endif
+@protocol AVCaptureFileOutputRecordingDelegate;
 
 /*!
  @class AVCaptureFileOutput
@@ -24,41 +26,41 @@
  
  @discussion
     This abstract superclass defines the interface for outputs that record media samples to files. File outputs can start recording to a new file using the startRecordingToOutputFileURL:recordingDelegate: method. On successive invocations of this method on Mac OS X, the output file can by changed dynamically without losing media samples. A file output can stop recording using the stopRecording method. Because files are recorded in the background, applications will need to specify a delegate for each new file so that they can be notified when recorded files are finished.
-
+ 
     On Mac OS X, clients can also set a delegate on the file output itself that can be used to control recording along exact media sample boundaries using the captureOutput:didOutputSampleBuffer:fromConnection: method.
-
+ 
     The concrete subclasses of AVCaptureFileOutput are AVCaptureMovieFileOutput, which records media to a QuickTime movie file, and AVCaptureAudioFileOutput, which writes audio media to a variety of audio file formats.
  */
 NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @interface AVCaptureFileOutput : AVCaptureOutput 
 {
 @private
-	AVCaptureFileOutputInternal *_fileOutputInternal;
+    AVCaptureFileOutputInternal *_fileOutputInternal;
 }
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property delegate
  @abstract
     The receiver's delegate.
-
+ 
  @discussion
     The value of this property is an object conforming to the AVCaptureFileOutputDelegate protocol that will be able to monitor and control recording along exact sample boundaries.
  */
-@property(nonatomic, assign) id<AVCaptureFileOutputDelegate> delegate NS_AVAILABLE(10_7, NA);
+@property(nonatomic, assign, nullable) id<AVCaptureFileOutputDelegate> delegate NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @property outputFileURL
  @abstract
     The file URL of the file to which the receiver is currently recording incoming buffers.
-
+ 
  @discussion
     The value of this property is an NSURL object containing the file URL of the file currently being written by the receiver. Returns nil if the receiver is not recording to any file.
  */
-@property(nonatomic, readonly) NSURL *outputFileURL;
+@property(nonatomic, readonly, nullable) NSURL *outputFileURL;
 
 /*!
  @method startRecordingToOutputFileURL:recordingDelegate:
@@ -69,30 +71,30 @@
     An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
  @param delegate
     An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
-
+ 
  @discussion
     The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
-
+ 
     Clients need not call stopRecording before calling this method while another recording is in progress. On Mac OS X, if this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
-
+ 
     When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
-
+ 
     On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
-
+ 
     Note: AVCaptureAudioFileOutput does not support -startRecordingToOutputFileURL:recordingDelegate:. Use -startRecordingToOutputFileURL:outputFileType:recordingDelegate: instead.
  */
-- (void)startRecordingToOutputFileURL:(NSURL*)outputFileURL recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
+- (void)startRecordingToOutputFileURL:(NSURL *)outputFileURL recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
 
 /*!
  @method stopRecording
  @abstract
     Tells the receiver to stop recording to the current file.
-
+ 
  @discussion
     Clients can call this method when they want to stop recording new samples to the current file, and do not want to continue recording to another file. Clients that want to switch from one file to another should not call this method. Instead they should simply call startRecordingToOutputFileURL:recordingDelegate: with the new file URL.
-
+ 
     When recording is stopped either by calling this method, by changing files using startRecordingToOutputFileURL:recordingDelegate:, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, before using the file, clients must wait until the delegate that was specified in startRecordingToOutputFileURL:recordingDelegate: is notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method.
-
+ 
     On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
  */
 - (void)stopRecording;
@@ -101,55 +103,55 @@
  @property recording
  @abstract
     Indicates whether the receiver is currently recording.
-
+ 
  @discussion
     The value of this property is YES when the receiver currently has a file to which it is writing new samples, NO otherwise.
  */
 @property(nonatomic, readonly, getter=isRecording) BOOL recording;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property recordingPaused
  @abstract
     Indicates whether recording to the current output file is paused.
-
+ 
  @discussion
     This property indicates recording to the file returned by outputFileURL has been previously paused using the pauseRecording method. When a recording is paused, captured samples are not written to the output file, but new samples can be written to the same file in the future by calling resumeRecording.
  */
-@property(nonatomic, readonly, getter=isRecordingPaused) BOOL recordingPaused NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly, getter=isRecordingPaused) BOOL recordingPaused NS_AVAILABLE_MAC(10_7);
 
 /*!
  @method pauseRecording
  @abstract
     Pauses recording to the current output file.
-
+ 
  @discussion
     This method causes the receiver to stop writing captured samples to the current output file returned by outputFileURL, but leaves the file open so that samples can be written to it in the future, when resumeRecording is called. This allows clients to record multiple media segments that are not contiguous in time to a single file.
-
+ 
     On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
  */
-- (void)pauseRecording NS_AVAILABLE(10_7, NA);
+- (void)pauseRecording NS_AVAILABLE_MAC(10_7);
 
 /*!
  @method resumeRecording
  @abstract
     Resumes recording to the current output file after it was previously paused using pauseRecording.
-
+ 
  @discussion
     This method causes the receiver to resume writing captured samples to the current output file returned by outputFileURL, after recording was previously paused using pauseRecording. This allows clients to record multiple media segments that are not contiguous in time to a single file.
-
+ 
     On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the current file are guaranteed to be those contained in the sample buffer passed to that method.
  */
-- (void)resumeRecording NS_AVAILABLE(10_7, NA);
+- (void)resumeRecording NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @property recordedDuration
  @abstract
     Indicates the duration of the media recorded to the current output file.
-
+ 
  @discussion
     If recording is in progress, this property returns the total time recorded so far.
  */
@@ -159,17 +161,17 @@
  @property recordedFileSize
  @abstract
     Indicates the size, in bytes, of the data recorded to the current output file.
-
+ 
  @discussion
     If a recording is in progress, this property returns the size in bytes of the data recorded so far.
  */
-@property(nonatomic, readonly) int64_t recordedFileSize;	
+@property(nonatomic, readonly) int64_t recordedFileSize;
 
 /*!
  @property maxRecordedDuration
  @abstract
     Specifies the maximum duration of the media that should be recorded by the receiver.
-
+ 
  @discussion
     This property specifies a hard limit on the duration of recorded files. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error. The default value of this property is kCMTimeInvalid, which indicates no limit.
  */
@@ -189,7 +191,7 @@
  @property minFreeDiskSpaceLimit
  @abstract
     Specifies the minimum amount of free space, in bytes, required for recording to continue on a given volume.
-
+ 
  @discussion
     This property specifies a hard lower limit on the amount of free space that must remain on a target volume for recording to continue. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error.
  */
@@ -197,12 +199,13 @@
 
 @end
 
+
 /*!
  @protocol AVCaptureFileOutputRecordingDelegate
  @abstract
     Defines an interface for delegates of AVCaptureFileOutput to respond to events that occur in the process of recording a single file.
  */
-__TVOS_PROHIBITED
+NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @protocol AVCaptureFileOutputRecordingDelegate <NSObject>
 
 @optional
@@ -211,67 +214,65 @@
  @method captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:
  @abstract
     Informs the delegate when the output has started writing to a file.
-
- @param captureOutput
+ 
+ @param output
     The capture file output that started writing the file.
  @param fileURL
     The file URL of the file that is being written.
  @param connections
     An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
-
+ 
  @discussion
     This method is called when the file output has started writing data to a file. If an error condition prevents any data from being written, this method may not be called. captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error: and captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: will always be called, even if no data is written.
-
+ 
     Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
  */
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+- (void)captureOutput:(AVCaptureFileOutput *)output didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections;
 
 /*!
  @method captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:
  @abstract
     Called whenever the output is recording to a file and successfully pauses the recording at the request of the client.
-
- @param captureOutput
+ 
+ @param output
     The capture file output that has paused its file recording.
  @param fileURL
     The file URL of the file that is being written.
  @param connections
     An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
-
+ 
  @discussion
     Delegates can use this method to be informed when a request to pause recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to pauseRecording was made.
-
+ 
     Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
  */
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didPauseRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections NS_AVAILABLE(10_7, NA);
+- (void)captureOutput:(AVCaptureFileOutput *)output didPauseRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections NS_AVAILABLE_MAC(10_7);
 
 /*!
  @method captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:
  @abstract
     Called whenever the output, at the request of the client, successfully resumes a file recording that was paused.
-
- @param captureOutput
+ 
+ @param output
     The capture file output that has resumed its paused file recording.
  @param fileURL
     The file URL of the file that is being written.
  @param connections
     An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
-
+ 
  @discussion
     Delegates can use this method to be informed when a request to resume recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to resumeRecording was made.
-
+ 
     Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
  */
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didResumeRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections NS_AVAILABLE(10_7, NA);
+- (void)captureOutput:(AVCaptureFileOutput *)output didResumeRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections NS_AVAILABLE_MAC(10_7);
 
 /*!
  @method captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:
  @abstract
     Informs the delegate when the output will stop writing new samples to a file.
-
- @param captureOutput
+ 
+ @param output
     The capture file output that will finish writing the file.
  @param fileURL
     The file URL of the file that is being written.
@@ -279,15 +280,13 @@
     An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
  @param error
     An error describing what caused the file to stop recording, or nil if there was no error.
-
+ 
  @discussion
     This method is called when the file output will stop recording new samples to the file at outputFileURL, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
-
+ 
     Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
  */
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput willFinishRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections error:(NSError *)error NS_AVAILABLE(10_7, NA);
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+- (void)captureOutput:(AVCaptureFileOutput *)output willFinishRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections error:(nullable NSError *)error NS_AVAILABLE_MAC(10_7);
 
 @required
 
@@ -295,34 +294,34 @@
  @method captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:
  @abstract
     Informs the delegate when all pending data has been written to an output file.
-
- @param captureOutput
+ 
+ @param output
     The capture file output that has finished writing the file.
- @param fileURL
+ @param outputFileURL
     The file URL of the file that has been written.
  @param connections
     An array of AVCaptureConnection objects attached to the file output that provided the data that was written to the file.
  @param error
     An error describing what caused the file to stop recording, or nil if there was no error.
-
+ 
  @discussion
     This method is called when the file output has finished writing all data to a file whose recording was stopped, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
-
+ 
     Clients should not assume that this method will be called on a specific thread.
-
+ 
     Delegates are required to implement this method.
  */
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error;
+- (void)captureOutput:(AVCaptureFileOutput *)output didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections error:(nullable NSError *)error;
 
 @end
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
 
 /*!
  @protocol AVCaptureFileOutputDelegate
  @abstract
     Defines an interface for delegates of AVCaptureFileOutput to monitor and control recordings along exact sample boundaries.
  */
+NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
 @protocol AVCaptureFileOutputDelegate <NSObject>
 
 @required
@@ -331,14 +330,14 @@
  @method captureOutputShouldProvideSampleAccurateRecordingStart:
  @abstract
     Allows a client to opt in to frame accurate record-start in captureOutput:didOutputSampleBuffer:fromConnection:
-
- @param captureOutput
+ 
+ @param output
     The AVCaptureFileOutput instance with which the delegate is associated.
-
+ 
  @discussion
     In apps linked before Mac OS X 10.8, delegates that implement the captureOutput:didOutputSampleBuffer:fromConnection: method can ensure frame accurate start / stop of a recording by calling startRecordingToOutputFileURL:recordingDelegate: from within the callback. Frame accurate start requires the capture output to apply outputSettings when the session starts running, so it is ready to record on any given frame boundary. Compressing all the time while the session is running has power, thermal, and CPU implications. In apps linked on or after Mac OS X 10.8, delegates must implement captureOutputShouldProvideSampleAccurateRecordingStart: to indicate whether frame accurate start/stop recording is required (returning YES) or not (returning NO). The output calls this method as soon as the delegate is added, and never again. If your delegate returns NO, the capture output applies compression settings when startRecordingToOutputFileURL:recordingDelegate: is called, and disables compression settings after the recording is stopped.
  */
-- (BOOL)captureOutputShouldProvideSampleAccurateRecordingStart:(AVCaptureOutput *)captureOutput NS_AVAILABLE(10_8, NA);
+- (BOOL)captureOutputShouldProvideSampleAccurateRecordingStart:(AVCaptureOutput *)output NS_AVAILABLE_MAC(10_8);
 
 @optional
 
@@ -346,31 +345,29 @@
  @method captureOutput:didOutputSampleBuffer:fromConnection:
  @abstract
     Gives the delegate the opportunity to inspect samples as they are received by the output and optionally start and stop recording at exact times.
-
- @param captureOutput
+ 
+ @param output
     The capture file output that is receiving the media data.
  @param sampleBuffer
     A CMSampleBuffer object containing the sample data and additional information about the sample, such as its format and presentation time.
  @param connection
     The AVCaptureConnection object attached to the file output from which the sample data was received.
-
+ 
  @discussion
     This method is called whenever the file output receives a single sample buffer (a single video frame or audio buffer, for example) from the given connection. This gives delegates an opportunity to start and stop recording or change output files at an exact sample boundary if -captureOutputShouldProvideSampleAccurateRecordingStart: returns YES. If called from within this method, the file output's startRecordingToOutputFileURL:recordingDelegate: and resumeRecording methods are guaranteed to include the received sample buffer in the new file, whereas calls to stopRecording and pauseRecording are guaranteed to include all samples leading up to those in the current sample buffer in the existing file.
-
+ 
     Delegates can gather information particular to the samples by inspecting the CMSampleBuffer object. Sample buffers always contain a single frame of video if called from this method but may also contain multiple samples of audio. For B-frame video formats, samples are always delivered in presentation order.
-
+ 
     Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and then CFRelease it when they are finished with it.
-
+ 
     Note that to maintain optimal performance, some sample buffers directly reference pools of memory that may need to be reused by the device system and other capture inputs. This is frequently the case for uncompressed device native capture where memory blocks are copied as little as possible. If multiple sample buffers reference such pools of memory for too long, inputs will no longer be able to copy new samples into memory and those samples will be dropped. If your application is causing samples to be dropped by retaining the provided CMSampleBuffer objects for too long, but it needs access to the sample data for a long period of time, consider copying the data into a new buffer and then calling CFRelease on the sample buffer if it was previously retained so that the memory it references can be reused.
  
     Clients should not assume that this method will be called on a specific thread. In addition, this method is called periodically, so it must be efficient to prevent capture performance problems.
  */
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, NA);
+- (void)captureOutput:(AVCaptureFileOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
 
 #pragma mark - AVCaptureMovieFileOutput
 
@@ -380,7 +377,7 @@
  @class AVCaptureMovieFileOutput
  @abstract
     AVCaptureMovieFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured media to QuickTime movie files.
-
+ 
  @discussion
     AVCaptureMovieFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to QuickTime movie files. In addition, instances of AVCaptureMovieFileOutput allow clients to configure options specific to the QuickTime file format, including allowing them to write metadata collections to each file, specify media encoding options for each track (Mac OS X), and specify an interval at which movie fragments should be written.
  */
@@ -388,17 +385,21 @@
 @interface AVCaptureMovieFileOutput : AVCaptureFileOutput
 {
 @private
-	AVCaptureMovieFileOutputInternal *_internal;
+    AVCaptureMovieFileOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @property movieFragmentInterval
  @abstract
     Specifies the frequency with which movie fragments should be written.
-
+ 
  @discussion
     When movie fragments are used, a partially written QuickTime movie file whose writing is unexpectedly interrupted can be successfully opened and played up to multiples of the specified time interval. A value of kCMTimeInvalid indicates that movie fragments should not be used, but that only a movie atom describing all of the media in the file should be written. The default value of this property is ten seconds.
-
+ 
     Changing the value of this property will not affect the movie fragment interval of the file currently being written, if there is one.
  */
 @property(nonatomic) CMTime movieFragmentInterval;
@@ -407,11 +408,11 @@
  @property metadata
  @abstract
     A collection of metadata to be written to the receiver's output files.
-
+ 
  @discussion
     The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file.
  */
-@property(nonatomic, copy) NSArray *metadata;
+@property(nonatomic, copy, nullable) NSArray<AVMetadataItem *> *metadata;
 
 #if TARGET_OS_IPHONE
 
@@ -419,56 +420,54 @@
  @property availableVideoCodecTypes
  @abstract
     Indicates the supported video codec formats that can be specified in setOutputSettingsForConnection:.
-
+ 
  @discussion
-    The value of this property is an NSArray of NSStrings that can be used as values for the AVVideoCodecKey in the receiver's setOutputSettingsForConnection: dictionary. The array of available video codecs may change depending on the current session preset.
+    The value of this property is an NSArray of AVVideoCodecTypes that can be used as values for the AVVideoCodecKey in the receiver's setOutputSettingsForConnection: dictionary. The array of available video codecs may change depending on the current session preset. The first codec in the array is used by default when recording a file.
  */
-@property(nonatomic, readonly) NSArray *availableVideoCodecTypes NS_AVAILABLE_IOS(10_0);
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availableVideoCodecTypes NS_AVAILABLE_IOS(10_0);
 
 #endif // TARGET_OS_IPHONE
 
 /*!
  @method outputSettingsForConnection:
  @abstract
-    Returns the options the receiver uses to re-encode media from the given connection as it is being recorded.
-
+    Returns the options the receiver uses to encode media from the given connection as it is being recorded.
+ 
  @param connection
-    The connection delivering the media to be re-encoded.
+    The connection delivering the media to be encoded.
  @result
     An NSDictionary of output settings.
-
+ 
  @discussion
-    See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on how to construct an output settings dictionary. If the returned value is an empty dictionary (i.e. [NSDictionary dictionary], the format of the media from the connection will not be changed before being written to the file. If -setOutputSettings:forConnection: was called with a nil dictionary, this method returns a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
+    See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on the structure of an output settings dictionary. If the returned value is an empty dictionary (i.e. [NSDictionary dictionary], the format of the media from the connection will not be changed before being written to the file. If -setOutputSettings:forConnection: was called with a nil dictionary, this method returns a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
  */
-- (NSDictionary *)outputSettingsForConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 10_0);
+- (NSDictionary<NSString *, id> *)outputSettingsForConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 10_0);
 
 /*!
  @method setOutputSettings:forConnection:
  @abstract
     Sets the options the receiver uses to encode media from the given connection as it is being recorded.
-
+ 
  @param outputSettings
     An NSDictionary of output settings.
  @param connection
     The connection delivering the media to be encoded.
-
+ 
  @discussion
     See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on how to construct an output settings dictionary. A value of an empty dictionary (i.e. [NSDictionary dictionary], means that the format of the media from the connection should not be changed before being written to the file. A value of nil means that the output format will be determined by the session preset. In this case, -outputSettingsForConnection: will return a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
  
     On iOS, you may only specify the AVVideoCodecKey in the outputSettings. If you specify any other key, an NSInvalidArgumentException will be thrown. See the availableVideoCodecTypes property.
  */
-- (void)setOutputSettings:(NSDictionary *)outputSettings forConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 10_0);
-
-#if TARGET_OS_IPHONE
+- (void)setOutputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings forConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 10_0);
 
 /*!
  @method recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:
  @abstract
     Returns YES if the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
-
+ 
  @param connection
     A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
-
+ 
  @discussion
     See setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection: for details on the behavior controlled by this value. The default value returned is NO.
  */
@@ -481,32 +480,27 @@
  
  @param doRecordChanges
     If YES, the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
-
  @param connection
     A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
-
+ 
  @discussion
     When a recording is started the current state of a video capture connection's videoOrientation and videoMirrored properties are used to build the display matrix for the created video track. The movie file format allows only one display matrix per track, which means that any changes made during a recording to the videoOrientation and videoMirrored properties are not captured. For example, a user starts a recording with their device in the portrait orientation, and then partway through the recording changes the device to a landscape orientation. The landscape orientation requires a different display matrix, but only the initial display matrix (the portrait display matrix) is recorded for the video track.
-
+ 
     By invoking this method the client application directs the movie file output to create an additional track in the captured movie. This track is a timed metadata track that is associated with the video track, and contains one or more samples that contain a Video Orientation value (as defined by EXIF and TIFF specifications, which is enumerated by CGImagePropertyOrientation in <ImageIO/CGImageProperties.h>). The value represents the display matrix corresponding to the AVCaptureConnection's videoOrientation and videoMirrored properties when applied to the input source. The initial sample written to the timed metadata track represents video track's display matrix. During recording additional samples will be written to the timed metadata track whenever the client application changes the video connection's videoOrienation or videoMirrored properties. Using the above example, when the client application detects the user changing the device from portrait to landscape orientation, it updates the video connection's videoOrientation property, thus causing the movie file output to add a new sample to the timed metadata track.
-	
+ 
     After capture, playback and editing applications can use the timed metadata track to enhance their user's experience. For example, when playing back the captured movie, a playback engine can use the samples to adjust the display of the video samples to keep the video properly oriented. Another example is an editing application that uses the sample the sample times to suggest cut points for breaking the captured movie into separate clips, where each clip is properly oriented.
-	
+ 
     The default behavior is to not create the timed metadata track.
-	
+ 
     The doRecordChanges value is only observed at the start of recording. Changes to the value will not have any effect until the next recording is started.
  */
 - (void)setRecordsVideoOrientationAndMirroringChanges:(BOOL)doRecordChanges asMetadataTrackForConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(9_0);
 
-#endif // TARGET_OS_IPHONE
-
 @end
 
 
 #pragma mark - AVCaptureAudioFileOutput
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
 @class AVCaptureAudioFileOutputInternal;
 
 /*!
@@ -517,68 +511,72 @@
  @discussion
     AVCaptureAudioFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to audio files. In addition, instances of AVCaptureAudioFileOutput allow clients to configure options specific to the audio file formats, including allowing them to write metadata collections to each file and specify audio encoding options.
  */
-NS_CLASS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
 @interface AVCaptureAudioFileOutput : AVCaptureFileOutput
 {
 @private
-	AVCaptureAudioFileOutputInternal *_internal;
+    AVCaptureAudioFileOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @method availableOutputFileTypes
- @abstract		
+ @abstract
     Provides the file types AVCaptureAudioFileOutput can write.
  
  @result
     An NSArray of UTIs identifying the file types the AVCaptureAudioFileOutput class can write.
  */
-+ (NSArray *)availableOutputFileTypes;
++ (NSArray<AVFileType> *)availableOutputFileTypes;
 
 /*!
  @method startRecordingToOutputFileURL:outputFileType:recordingDelegate:
  @abstract
     Tells the receiver to start recording to a new file of the specified format, and specifies a delegate that will be notified when recording is finished.
-
+ 
  @param outputFileURL
     An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
  @param fileType
     A UTI indicating the format of the file to be written.
  @param delegate
     An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
-
+ 
  @discussion
     The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
-
+ 
     The fileType argument is a UTI corresponding to the audio file format that should be written. UTIs for common audio file types are declared in AVMediaFormat.h.
-
+ 
     Clients need not call stopRecording before calling this method while another recording is in progress. If this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
-
+ 
     When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
-
+ 
     On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
  */
-- (void)startRecordingToOutputFileURL:(NSURL*)outputFileURL outputFileType:(NSString *)fileType recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
+- (void)startRecordingToOutputFileURL:(NSURL *)outputFileURL outputFileType:(AVFileType)fileType recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
 
 /*!
  @property metadata
  @abstract
     A collection of metadata to be written to the receiver's output files.
-
+ 
  @discussion
     The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file. Only ID3 v2.2, v2.3, or v2.4 style metadata items are supported.
  */
-@property(nonatomic, copy) NSArray *metadata; 
+@property(nonatomic, copy) NSArray<AVMetadataItem *> *metadata;
 
 /*!
  @property audioSettings
  @abstract
     Specifies the options the receiver uses to re-encode audio as it is being recorded.
-
+ 
  @discussion
     The output settings dictionary can contain values for keys from AVAudioSettings.h. A value of nil indicates that the format of the audio should not be changed before being written to the file.
  */
-@property(nonatomic, copy) NSDictionary *audioSettings;
+@property(nonatomic, copy, nullable) NSDictionary<NSString *, id> *audioSettings;
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h	2016-09-12 23:29:49.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h	2017-05-24 00:41:54.000000000 -0400
@@ -1,19 +1,24 @@
 /*
-	File:  AVCaptureInput.h
-
-	Framework:  AVFoundation
+    File:  AVCaptureInput.h
+ 
+    Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMFormatDescription.h>
 #import <CoreMedia/CMSync.h>
-#if TARGET_OS_MAC && ! (TARGET_OS_EMBEDDED || TARGET_OS_IPHONE || TARGET_OS_WIN32)
-	#import <ApplicationServices/../Frameworks/CoreGraphics.framework/Headers/CGDirectDisplay.h>
+#if TARGET_OS_OSX
+    #import <CoreGraphics/CGDirectDisplay.h>
 #endif
 
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureInput
+
 @class AVCaptureInputPort;
 @class AVCaptureInputInternal;
 @class AVTimedMetadataGroup;
@@ -22,7 +27,7 @@
  @class AVCaptureInput
  @abstract
     AVCaptureInput is an abstract class that provides an interface for connecting capture input sources to an AVCaptureSession.
-
+ 
  @discussion
     Concrete instances of AVCaptureInput representing input sources such as cameras can be added to instances of AVCaptureSession using the -[AVCaptureSession addInput:] method. An AVCaptureInput vends one or more streams of media data. For example, input devices can provide both audio and video data. Each media stream provided by an input is represented by an AVCaptureInputPort object. Within a capture session, connections are made between AVCaptureInput instances and AVCaptureOutput instances via AVCaptureConnection objects that define the mapping between a set of AVCaptureInputPort objects and a single AVCaptureOutput.
  */
@@ -30,18 +35,20 @@
 @interface AVCaptureInput : NSObject 
 {
 @private
-	AVCaptureInputInternal *_inputInternal;
+    AVCaptureInputInternal *_inputInternal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property ports
  @abstract
     The ports owned by the receiver.
-
+ 
  @discussion
     The value of this property is an array of AVCaptureInputPort objects, each exposing an interface to a single stream of media data provided by an input.
  */
-@property(nonatomic, readonly) NSArray *ports;
+@property(nonatomic, readonly) NSArray<AVCaptureInputPort *> *ports;
 
 @end
 
@@ -50,19 +57,22 @@
  @constant AVCaptureInputPortFormatDescriptionDidChangeNotification
  @abstract
     This notification is posted when the value of an AVCaptureInputPort instance's formatDescription property changes.
-
+ 
  @discussion
     The notification object is the AVCaptureInputPort instance whose format description changed.
  */
 AVF_EXPORT NSString *const AVCaptureInputPortFormatDescriptionDidChangeNotification NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
+
+#pragma mark - AVCaptureInputPort
+
 @class AVCaptureInputPortInternal;
 
 /*!
  @class AVCaptureInputPort
  @abstract
     An AVCaptureInputPort describes a single stream of media data provided by an AVCaptureInput and provides an interface for connecting that stream to AVCaptureOutput instances via AVCaptureConnection.
-
+ 
  @discussion
     Instances of AVCaptureInputPort cannot be created directly. An AVCaptureInput exposes its input ports via its ports property. Input ports provide information about the format of their media data via the mediaType and formatDescription properties, and allow clients to control the flow of data via the enabled property. Input ports are used by an AVCaptureConnection to define the mapping between inputs and outputs in an AVCaptureSession.
  */
@@ -73,11 +83,13 @@
     AVCaptureInputPortInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property input
  @abstract
     The input that owns the receiver.
-
+ 
  @discussion
     The value of this property is an AVCaptureInput instance that owns the receiver.
  */
@@ -87,27 +99,27 @@
  @property mediaType
  @abstract
     The media type of the data provided by the receiver.
-
+ 
  @discussion
     The value of this property is a constant describing the type of media, such as AVMediaTypeVideo or AVMediaTypeAudio, provided by the receiver. Media type constants are defined in AVMediaFormat.h.
  */
-@property(nonatomic, readonly) NSString *mediaType;
+@property(nonatomic, readonly) AVMediaType mediaType;
 
 /*!
  @property formatDescription
  @abstract
     The format of the data provided by the receiver.
-
+ 
  @discussion
     The value of this property is a CMFormatDescription that describes the format of the media data currently provided by the receiver. Clients can be notified of changes to the format by observing the AVCaptureInputPortFormatDescriptionDidChangeNotification.
  */
-@property(nonatomic, readonly) CMFormatDescriptionRef formatDescription;
+@property(nonatomic, readonly, nullable) __attribute__((NSObject)) CMFormatDescriptionRef formatDescription;
 
 /*!
  @property enabled
  @abstract
     Whether the receiver should provide data.
-
+ 
  @discussion
     The value of this property is a BOOL that determines whether the receiver should provide data to outputs when a session is running. Clients can set this property to fine tune which media streams from a given input will be used during capture. The default value is YES.
  */
@@ -120,11 +132,14 @@
  
  @discussion
     The clock is read-only.
-  */
-@property(nonatomic, readonly) __attribute__((NSObject)) CMClockRef clock NS_AVAILABLE(10_9, 7_0);
+ */
+@property(nonatomic, readonly, nullable) __attribute__((NSObject)) CMClockRef clock NS_AVAILABLE(10_9, 7_0);
 
 @end
 
+
+#pragma mark - AVCaptureDeviceInput
+
 @class AVCaptureDevice;
 @class AVCaptureDeviceInputInternal;
 
@@ -132,7 +147,7 @@
  @class AVCaptureDeviceInput
  @abstract
     AVCaptureDeviceInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from an AVCaptureDevice.
-
+ 
  @discussion
     Instances of AVCaptureDeviceInput are input sources for AVCaptureSession that provide media data from devices connected to the system, represented by instances of AVCaptureDevice.
  */
@@ -140,48 +155,48 @@
 @interface AVCaptureDeviceInput : AVCaptureInput 
 {
 @private
-	AVCaptureDeviceInputInternal *_internal;
+    AVCaptureDeviceInputInternal *_internal;
 }
 
 /*!
  @method deviceInputWithDevice:error:
  @abstract
     Returns an AVCaptureDeviceInput instance that provides media data from the given device.
-
+ 
  @param device
     An AVCaptureDevice instance to be used for capture.
  @param outError
     On return, if the given device cannot be used for capture, points to an NSError describing the problem.
  @result
     An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used for capture.
-
+ 
  @discussion
     This method returns an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if necessary. If the device cannot be opened because it is no longer available or because it is in use, for example, this method returns nil, and the optional outError parameter points to an NSError describing the problem.
  */
-+ (instancetype)deviceInputWithDevice:(AVCaptureDevice *)device error:(NSError **)outError;
++ (nullable instancetype)deviceInputWithDevice:(AVCaptureDevice *)device error:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @method initWithDevice:error:
  @abstract
     Creates an AVCaptureDeviceInput instance that provides media data from the given device.
-
+ 
  @param device
     An AVCaptureDevice instance to be used for capture.
  @param outError
     On return, if the given device cannot be used for capture, points to an NSError describing the problem.
  @result
     An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used for capture.
-
+ 
  @discussion
     This method creates an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if necessary. If the device cannot be opened because it is no longer available or because it is in use, for example, this method returns nil, and the optional outError parameter points to an NSError describing the problem.
  */
-- (instancetype)initWithDevice:(AVCaptureDevice *)device error:(NSError **)outError;
+- (nullable instancetype)initWithDevice:(AVCaptureDevice *)device error:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @property device
  @abstract
     The device from which the receiver provides data.
-
+ 
  @discussion
     The value of this property is the AVCaptureDevice instance that was used to create the receiver.
  */
@@ -190,7 +205,7 @@
 @end
 
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#pragma mark - AVCaptureScreenInput
 
 @class AVCaptureScreenInputInternal;
 
@@ -198,18 +213,32 @@
  @class AVCaptureScreenInput
  @abstract
     AVCaptureScreenInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from a screen or portion thereof.
-
+ 
  @discussion
     Instances of AVCaptureScreenInput are input sources for AVCaptureSession that provide media data from one of the screens connected to the system, represented by CGDirectDisplayIDs.
  */
-NS_CLASS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
 @interface AVCaptureScreenInput : AVCaptureInput 
 {
 @private
-	AVCaptureScreenInputInternal *_internal;
+    AVCaptureScreenInputInternal *_internal;
 }
 
 /*!
+ @method init
+ @abstract
+    Creates an AVCaptureScreenInput instance that provides media data from the main display.
+ 
+ @discussion
+    This method creates an instance of AVCaptureScreenInput using the main display whose id is returned from CGMainDisplayID().
+ */
+- (instancetype)init;
+
++ (instancetype)new;
+
+#if TARGET_OS_OSX
+
+/*!
  @method initWithDisplayID:
  @abstract
     Creates an AVCaptureScreenInput instance that provides media data from the given display.
@@ -218,17 +247,19 @@
     The id of the display from which to capture video. CGDirectDisplayID is defined in <CoreGraphics/CGDirectDisplay.h>
  @result
     An AVCaptureScreenInput instance that provides data from the given screen, or nil, if the screen could not be used for capture.
-
+ 
  @discussion
     This method creates an instance of AVCaptureScreenInput that can be used to capture data from a display in an AVCaptureSession. This method validates the displayID. If the display cannot be used because it is not available on the system, for example, this method returns nil.
  */
 - (instancetype)initWithDisplayID:(CGDirectDisplayID)displayID;
 
+#endif // TARGET_OS_OSX
+
 /*!
  @property minFrameDuration
  @abstract
     A property indicating the screen input's minimum frame duration.
-
+ 
  @discussion
     An AVCaptureScreenInput's minFrameDuration is the reciprocal of its maximum frame rate. This property may be used to request a maximum frame rate at which the input produces video frames. The requested rate may not be achievable due to overall bandwidth, so actual frame rates may be lower.
  */
@@ -238,7 +269,7 @@
  @property cropRect
  @abstract
     A property indicating the bounding rectangle of the screen area to be captured in pixels.
-
+ 
  @discussion
     By default, AVCaptureScreenInput captures the entire area of the displayID with which it is associated. To limit the capture rectangle to a subsection of the screen, set the cropRect property, which defines a smaller section of the screen in the screen's coordinate system. The origin (0,0) is the bottom-left corner of the screen.
  */
@@ -248,7 +279,7 @@
  @property scaleFactor
  @abstract
     A property indicating the factor by which video buffers captured from the screen are to be scaled.
-
+ 
  @discussion
     By default, AVCaptureScreenInput captures the video buffers from the display at a scale factor of 1.0 (no scaling). Set this property to scale the buffers by a given factor. For instance, a 320x240 capture area with a scaleFactor of 2.0f produces video buffers at 640x480.
  */
@@ -258,7 +289,7 @@
  @property capturesMouseClicks
  @abstract
     A property indicating whether mouse clicks should be highlighted in the captured output.
-
+ 
  @discussion
     By default, AVCaptureScreenInput does not highlight mouse clicks in its captured output. If this property is set to YES, mouse clicks are highlighted (a circle is drawn around the mouse for the duration of the click) in the captured output.
  */
@@ -268,28 +299,28 @@
  @property capturesCursor
  @abstract
     A property indicating whether the cursor should be rendered to the captured output.
-
+ 
  @discussion
     By default, AVCaptureScreenInput draws the cursor in its captured output. If this property is set to NO, the captured output contains only the windows on the screen. Cursor is omitted. Note that cursor position and mouse button state at the time of capture is preserved in CMSampleBuffers emitted from AVCaptureScreenInput. See the inline documentation for kCMIOSampleBufferAttachmentKey_MouseAndKeyboardModifiers in <CoreMediaIO/CMIOSampleBuffer.h>
  */
-@property(nonatomic) BOOL capturesCursor NS_AVAILABLE(10_8, NA);
+@property(nonatomic) BOOL capturesCursor NS_AVAILABLE_MAC(10_8);
 
 /*!
  @property removesDuplicateFrames
  @abstract
     A property indicating whether duplicate frames should be removed by the input.
-
+ 
  @discussion
     If this property is set to YES, AVCaptureScreenInput performs frame differencing and when it detects duplicate frames, it drops them. If set to NO, the captured output receives all frames from the input. Prior to 10.9 this value defaulted to YES. In 10.9 and later, it defaults to NO, as modern platforms support frame differencing in hardware-based encoders.
-	
+ 
     As of 10.10, this property has been deprecated and is ignored. Clients wishing to re-create this functionality can use an AVCaptureVideoDataOutput and compare frame contents in their own code. If they wish to write a movie file, they can then pass the unique frames to an AVAssetWriterInput.
  */
-@property(nonatomic) BOOL removesDuplicateFrames NS_DEPRECATED(10_8, 10_10, NA, NA);
+@property(nonatomic) BOOL removesDuplicateFrames NS_DEPRECATED_MAC(10_8, 10_10);
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
 
+#pragma mark - AVCaptureMetadataInput
 
 @class AVCaptureMetadataInputInternal;
 
@@ -297,11 +328,11 @@
  @class AVCaptureMetadataInput
  @abstract
     AVCaptureMetadataInput is a concrete subclass of AVCaptureInput that provides a way for clients to supply AVMetadataItems to an AVCaptureSession.
-
+ 
  @discussion
     Instances of AVCaptureMetadataInput are input sources for AVCaptureSession that provide AVMetadataItems to an AVCaptureSession. AVCaptureMetadataInputs present one and only one AVCaptureInputPort, which currently may only be connected to an AVCaptureMovieFileOutput. The metadata supplied over the input port is provided by the client, and must conform to a client-supplied CMFormatDescription. The AVMetadataItems are supplied in an AVTimedMetadataGroup.
  */
-NS_CLASS_AVAILABLE(NA, 9_0) __TVOS_PROHIBITED
+NS_CLASS_AVAILABLE_IOS(9_0) __TVOS_PROHIBITED
 @interface AVCaptureMetadataInput : AVCaptureInput 
 {
 @private
@@ -312,14 +343,14 @@
  @method metadataInputWithFormatDescription:clock:
  @abstract
     Returns an AVCaptureMetadataInput instance that allows a client to provide AVTimedMetadataGroups to an AVCaptureSession.
-
+ 
  @param desc
     A CMFormatDescription that defines the metadata to be supplied by the client. Throws an NSInvalidArgumentException if NULL is passed.
  @param clock
     A CMClock that provided the timebase for the supplied samples. Throws an NSInvalidArgumentException if NULL is passed.
  @result
     An AVCaptureMetadataInput instance.
-
+ 
  @discussion
     This method returns an instance of AVCaptureMetadataInput that can be used to capture AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
  */
@@ -329,14 +360,14 @@
  @method initWithFormatDescription:clock:
  @abstract
     Creates an AVCaptureMetadataInput instance that allows a client to provide AVTimedMetadataGroups to an AVCaptureSession.
-
+ 
  @param desc
     A CMFormatDescription that defines the metadata to be supplied by the client. Throws NSInvalidArgumentException if NULL is passed.
  @param clock
     A CMClock that provided the timebase for the supplied samples. Throws NSInvalidArgumentException if NULL is passed.
  @result
     An AVCaptureMetadataInput instance, or nil, if the device could not be used for capture.
-
+ 
  @discussion
     This method creates an instance of AVCaptureMetadataInput that can be used to capture AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
  */
@@ -346,13 +377,15 @@
  @method appendTimedMetadataGroup:
  @abstract
     Provides metadata to the AVCaptureSession.
-
+ 
  @param metadata
     An AVTimedMetadataGroup of metadata. Will throw an exception if nil. In order to denote a period of no metadata, an empty AVTimedMetadataGroup should be passed.
-
+ 
  @discussion
-    The provided AVTimedMetadataGroup will be provided to the AVCaptureSession. The group's presentation timestamp is expressed in the context of the clock supplied to the initializer. It is not required that the AVTimedMetadataGroup have a duration;  an empty AVTimedMetadataGroup can be supplied to denote a period of no metadata.
+    The provided AVTimedMetadataGroup will be provided to the AVCaptureSession. The group's presentation timestamp is expressed in the context of the clock supplied to the initializer. It is not required that the AVTimedMetadataGroup have a duration; an empty AVTimedMetadataGroup can be supplied to denote a period of no metadata.
  */
-- (BOOL)appendTimedMetadataGroup:(AVTimedMetadataGroup *)metadata error:(NSError **)outError;
+- (BOOL)appendTimedMetadataGroup:(AVTimedMetadataGroup *)metadata error:(NSError * _Nullable * _Nullable)outError;
 
 @end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h	2016-09-23 20:45:16.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h	2017-05-24 00:28:27.000000000 -0400
@@ -1,14 +1,17 @@
 /*
     File:  AVCaptureMetadataOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
-#import <AVFoundation/AVCaptureOutput.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVMetadataObject.h>
+
+NS_ASSUME_NONNULL_BEGIN
 
-#pragma mark - AVCaptureMetadataOutput
+#pragma mark AVCaptureMetadataOutput
 
 @class AVCaptureMetadataOutputInternal;
 @protocol AVCaptureMetadataOutputObjectsDelegate;
@@ -17,35 +20,39 @@
  @class AVCaptureMetadataOutput
  @abstract
     AVCaptureMetadataOutput is a concrete subclass of AVCaptureOutput that can be used to process metadata objects from an attached connection.
-
+ 
  @discussion
     Instances of AVCaptureMetadataOutput emit arrays of AVMetadataObject instances (see AVMetadataObject.h), such as detected faces. Applications can access the metadata objects with the captureOutput:didOutputMetadataObjects:fromConnection: delegate method.
-*/
-NS_CLASS_AVAILABLE(NA, 6_0) __TVOS_PROHIBITED
+ */
+NS_CLASS_AVAILABLE_IOS(6_0) __TVOS_PROHIBITED
 @interface AVCaptureMetadataOutput : AVCaptureOutput 
 {
 @private
-	AVCaptureMetadataOutputInternal *_internal;
+    AVCaptureMetadataOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @method setMetadataObjectsDelegate:queue:
  @abstract
     Sets the receiver's delegate that will accept metadata objects and dispatch queue on which the delegate will be called.
-
+ 
  @param objectsDelegate
     An object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that will receive metadata objects after they are captured.
  @param objectsCallbackQueue
     A dispatch queue on which all delegate methods will be called.
-
+ 
  @discussion
     When new metadata objects are captured in the receiver's connection, they will be vended to the delegate using the captureOutput:didOutputMetadataObjects:fromConnection: delegate method. All delegate methods will be called on the specified dispatch queue.
-
+ 
     Clients that need to minimize the chances of metadata being dropped should specify a queue on which a sufficiently small amount of processing is performed along with receiving metadata objects.
-
+ 
     A serial dispatch queue must be used to guarantee that metadata objects will be delivered in order. The objectsCallbackQueue parameter may not be NULL, except when setting the objectsDelegate to nil.
-*/
-- (void)setMetadataObjectsDelegate:(id<AVCaptureMetadataOutputObjectsDelegate>)objectsDelegate queue:(dispatch_queue_t)objectsCallbackQueue;
+ */
+- (void)setMetadataObjectsDelegate:(nullable id<AVCaptureMetadataOutputObjectsDelegate>)objectsDelegate queue:(nullable dispatch_queue_t)objectsCallbackQueue;
 
 /*!
  @property metadataObjectsDelegate
@@ -54,18 +61,18 @@
  
  @discussion
     The value of this property is an object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that will receive metadata objects after they are captured. The delegate is set using the setMetadataObjectsDelegate:queue: method.
-*/
-@property(nonatomic, readonly) id<AVCaptureMetadataOutputObjectsDelegate> metadataObjectsDelegate;
+ */
+@property(nonatomic, readonly, nullable) id<AVCaptureMetadataOutputObjectsDelegate> metadataObjectsDelegate;
 
 /*!
  @property metadataObjectsCallbackQueue
  @abstract
     The dispatch queue on which all metadata object delegate methods will be called.
-
+ 
  @discussion
     The value of this property is a dispatch_queue_t. The queue is set using the setMetadataObjectsDelegate:queue: method.
-*/
-@property(nonatomic, readonly) dispatch_queue_t metadataObjectsCallbackQueue;
+ */
+@property(nonatomic, readonly, nullable) dispatch_queue_t metadataObjectsCallbackQueue;
 
 /*!
  @property availableMetadataObjectTypes
@@ -74,18 +81,18 @@
  
  @discussion
     The value of this property is an NSArray of NSStrings corresponding to AVMetadataObjectType strings defined in AVMetadataObject.h -- one for each metadata object type supported by the receiver. Available metadata object types are dependent on the capabilities of the AVCaptureInputPort to which this receiver's AVCaptureConnection is connected. Clients may specify the types of objects they would like to process by calling setMetadataObjectTypes:. This property is key-value observable.
-*/
-@property(nonatomic, readonly) NSArray *availableMetadataObjectTypes;
+ */
+@property(nonatomic, readonly) NSArray<AVMetadataObjectType> *availableMetadataObjectTypes;
 
 /*!
  @property metadataObjectTypes
  @abstract
     Specifies the types of metadata objects that the receiver should present to the client.
-
+ 
  @discussion
     AVCaptureMetadataOutput may detect and emit multiple metadata object types. For apps linked before iOS 7.0, the receiver defaults to capturing face metadata objects if supported (see -availableMetadataObjectTypes). For apps linked on or after iOS 7.0, the receiver captures no metadata objects by default. -setMetadataObjectTypes: throws an NSInvalidArgumentException if any elements in the array are not present in the -availableMetadataObjectTypes array.
-*/
-@property(nonatomic, copy) NSArray *metadataObjectTypes;
+ */
+@property(nonatomic, copy, null_resettable) NSArray<AVMetadataObjectType> *metadataObjectTypes;
 
 /*!
  @property rectOfInterest
@@ -99,12 +106,13 @@
 
 @end
 
+
 /*!
  @protocol AVCaptureMetadataOutputObjectsDelegate
  @abstract
     Defines an interface for delegates of AVCaptureMetadataOutput to receive emitted objects.
-*/
-__TVOS_PROHIBITED
+ */
+NS_AVAILABLE_IOS(6_0) __TVOS_PROHIBITED
 @protocol AVCaptureMetadataOutputObjectsDelegate <NSObject>
 
 @optional
@@ -113,19 +121,21 @@
  @method captureOutput:didOutputMetadataObjects:fromConnection:
  @abstract
     Called whenever an AVCaptureMetadataOutput instance emits new objects through a connection.
-
- @param captureOutput
+ 
+ @param output
     The AVCaptureMetadataOutput instance that emitted the objects.
  @param metadataObjects
     An array of AVMetadataObject subclasses (see AVMetadataObject.h).
  @param connection
     The AVCaptureConnection through which the objects were emitted.
-
+ 
  @discussion
     Delegates receive this message whenever the output captures and emits new objects, as specified by its metadataObjectTypes property. Delegates can use the provided objects in conjunction with other APIs for further processing. This method will be called on the dispatch queue specified by the output's metadataObjectsCallbackQueue property. This method may be called frequently, so it must be efficient to prevent capture performance problems, including dropped metadata objects.
-
+ 
     Clients that need to reference metadata objects outside of the scope of this method must retain them and then release them when they are finished with them.
-*/
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection;
+ */
+- (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection;
 
 @end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h	2016-08-05 01:30:12.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h	2017-05-24 00:28:27.000000000 -0400
@@ -1,115 +1,16 @@
 /*
     File:  AVCaptureOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
-#import <Foundation/Foundation.h>
-#import <AVFoundation/AVCaptureSession.h>
-
-@class AVMetadataObject;
-@class AVCaptureOutputInternal;
-
-/*!
- @class AVCaptureOutput
- @abstract
-    AVCaptureOutput is an abstract class that defines an interface for an output destination of an AVCaptureSession.
- 
- @discussion
-    AVCaptureOutput provides an abstract interface for connecting capture output destinations, such as files and video previews, to an AVCaptureSession.
-
-    An AVCaptureOutput can have multiple connections represented by AVCaptureConnection objects, one for each stream of media that it receives from an AVCaptureInput. An AVCaptureOutput does not have any connections when it is first created. When an output is added to an AVCaptureSession, connections are created that map media data from that session's inputs to its outputs.
-
-    Concrete AVCaptureOutput instances can be added to an AVCaptureSession using the -[AVCaptureSession addOutput:] and -[AVCaptureSession addOutputWithNoConnections:] methods.
- */
-NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
-@interface AVCaptureOutput : NSObject
-{
-@private
-    AVCaptureOutputInternal *_outputInternal;
-}
-
-/*!
- @property connections
- @abstract
-    The connections that describe the flow of media data to the receiver from AVCaptureInputs.
-
- @discussion
-    The value of this property is an NSArray of AVCaptureConnection objects, each describing the mapping between the receiver and the AVCaptureInputPorts of one or more AVCaptureInputs.
- */
-@property(nonatomic, readonly) NSArray *connections;
-
-/*!
- @method connectionWithMediaType:
- @abstract
-    Returns the first connection in the connections array with an inputPort of the specified mediaType.
-
- @param mediaType
-    An AVMediaType constant from AVMediaFormat.h, e.g. AVMediaTypeVideo.
-
- @discussion
-    This convenience method returns the first AVCaptureConnection in the receiver's connections array that has an AVCaptureInputPort of the specified mediaType. If no connection with the specified mediaType is found, nil is returned.
- */
-- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType NS_AVAILABLE(10_7, 5_0);
-
-/*!
- @method transformedMetadataObjectForMetadataObject:connection:
- @abstract
-    Converts an AVMetadataObject's visual properties to the receiver's coordinates.
-
- @param metadataObject
-    An AVMetadataObject originating from the same AVCaptureInput as the receiver.
- @param connection
-    The receiver's connection whose AVCaptureInput matches that of the metadata object to be converted.
- @result
-    An AVMetadataObject whose properties are in output coordinates.
-
- @discussion
-    AVMetadataObject bounds may be expressed as a rect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. Face metadata objects likewise express yaw and roll angles with respect to an unrotated picture. -transformedMetadataObjectForMetadataObject:connection: converts the visual properties in the coordinate space of the supplied AVMetadataObject to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. If the provided metadata object originates from an input source other than the preview layer's, nil will be returned.
- 
-    If an AVCaptureVideoDataOutput instance's connection's videoOrientation or videoMirrored properties are set to non-default values, the output applies the desired mirroring and orientation by physically rotating and or flipping sample buffers as they pass through it. AVCaptureStillImageOutput, on the other hand, does not physically rotate its buffers. It attaches an appropriate kCGImagePropertyOrientation number to captured still image buffers (see ImageIO/CGImageProperties.h) indicating how the image should be displayed on playback. Likewise, AVCaptureMovieFileOutput does not physically apply orientation/mirroring to its sample buffers -- it uses a QuickTime track matrix to indicate how the buffers should be rotated and/or flipped on playback.
- 
-    transformedMetadataObjectForMetadataObject:connection: alters the visual properties of the provided metadata object to match the physical rotation / mirroring of the sample buffers provided by the receiver through the indicated connection. I.e., for video data output, adjusted metadata object coordinates are rotated/mirrored. For still image and movie file output, they are not.
- */
-- (AVMetadataObject *)transformedMetadataObjectForMetadataObject:(AVMetadataObject *)metadataObject connection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(6_0);
-
-/*!
- @method metadataOutputRectOfInterestForRect:
- @abstract
-    Converts a rectangle in the receiver's coordinate space to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver.
- 
- @param rectInOutputCoordinates
-    A CGRect in the receiver's coordinates.
- @result
-    A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
- 
- @discussion
-    AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of the receiver to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
- */
-- (CGRect)metadataOutputRectOfInterestForRect:(CGRect)rectInOutputCoordinates NS_AVAILABLE_IOS(7_0);
-
-/*!
- @method rectForMetadataOutputRectOfInterest:
- @abstract
-    Converts a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver to a rectangle in the receiver's coordinates.
- 
- @param rectInMetadataOutputCoordinates
-    A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
- @result
-    A CGRect in the receiver's coordinates.
- 
- @discussion
-    AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
- */
-- (CGRect)rectForMetadataOutputRectOfInterest:(CGRect)rectInMetadataOutputCoordinates NS_AVAILABLE_IOS(7_0);
-
-@end
 
 #import <AVFoundation/AVCaptureAudioDataOutput.h>
 #import <AVFoundation/AVCaptureAudioPreviewOutput.h>
+#import <AVFoundation/AVCaptureDepthDataOutput.h>
 #import <AVFoundation/AVCaptureFileOutput.h>
 #import <AVFoundation/AVCaptureMetadataOutput.h>
 #import <AVFoundation/AVCapturePhotoOutput.h>
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h	2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,140 @@
+/*
+    File:  AVCaptureOutputBase.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <Foundation/Foundation.h>
+#import <AVFoundation/AVCaptureSession.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureOutput
+
+@class AVMetadataObject;
+@class AVCaptureOutputInternal;
+
+/*!
+ @class AVCaptureOutput
+ @abstract
+    AVCaptureOutput is an abstract class that defines an interface for an output destination of an AVCaptureSession.
+ 
+ @discussion
+    AVCaptureOutput provides an abstract interface for connecting capture output destinations, such as files and video previews, to an AVCaptureSession.
+
+    An AVCaptureOutput can have multiple connections represented by AVCaptureConnection objects, one for each stream of media that it receives from an AVCaptureInput. An AVCaptureOutput does not have any connections when it is first created. When an output is added to an AVCaptureSession, connections are created that map media data from that session's inputs to its outputs.
+
+    Concrete AVCaptureOutput instances can be added to an AVCaptureSession using the -[AVCaptureSession addOutput:] and -[AVCaptureSession addOutputWithNoConnections:] methods.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureOutput : NSObject
+{
+@private
+    AVCaptureOutputInternal *_outputInternal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property connections
+ @abstract
+    The connections that describe the flow of media data to the receiver from AVCaptureInputs.
+
+ @discussion
+    The value of this property is an NSArray of AVCaptureConnection objects, each describing the mapping between the receiver and the AVCaptureInputPorts of one or more AVCaptureInputs.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureConnection *> *connections;
+
+/*!
+ @method connectionWithMediaType:
+ @abstract
+    Returns the first connection in the connections array with an inputPort of the specified mediaType.
+
+ @param mediaType
+    An AVMediaType constant from AVMediaFormat.h, e.g. AVMediaTypeVideo.
+
+ @discussion
+    This convenience method returns the first AVCaptureConnection in the receiver's connections array that has an AVCaptureInputPort of the specified mediaType. If no connection with the specified mediaType is found, nil is returned.
+ */
+- (nullable AVCaptureConnection *)connectionWithMediaType:(AVMediaType)mediaType NS_AVAILABLE(10_7, 5_0);
+
+/*!
+ @method transformedMetadataObjectForMetadataObject:connection:
+ @abstract
+    Converts an AVMetadataObject's visual properties to the receiver's coordinates.
+
+ @param metadataObject
+    An AVMetadataObject originating from the same AVCaptureInput as the receiver.
+ @param connection
+    The receiver's connection whose AVCaptureInput matches that of the metadata object to be converted.
+ @result
+    An AVMetadataObject whose properties are in output coordinates.
+
+ @discussion
+    AVMetadataObject bounds may be expressed as a rect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. Face metadata objects likewise express yaw and roll angles with respect to an unrotated picture. -transformedMetadataObjectForMetadataObject:connection: converts the visual properties in the coordinate space of the supplied AVMetadataObject to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. If the provided metadata object originates from an input source other than the preview layer's, nil will be returned.
+ 
+    If an AVCaptureVideoDataOutput instance's connection's videoOrientation or videoMirrored properties are set to non-default values, the output applies the desired mirroring and orientation by physically rotating and or flipping sample buffers as they pass through it. AVCaptureStillImageOutput, on the other hand, does not physically rotate its buffers. It attaches an appropriate kCGImagePropertyOrientation number to captured still image buffers (see ImageIO/CGImageProperties.h) indicating how the image should be displayed on playback. Likewise, AVCaptureMovieFileOutput does not physically apply orientation/mirroring to its sample buffers -- it uses a QuickTime track matrix to indicate how the buffers should be rotated and/or flipped on playback.
+ 
+    transformedMetadataObjectForMetadataObject:connection: alters the visual properties of the provided metadata object to match the physical rotation / mirroring of the sample buffers provided by the receiver through the indicated connection. I.e., for video data output, adjusted metadata object coordinates are rotated/mirrored. For still image and movie file output, they are not.
+ */
+- (nullable AVMetadataObject *)transformedMetadataObjectForMetadataObject:(AVMetadataObject *)metadataObject connection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(6_0);
+
+/*!
+ @method metadataOutputRectOfInterestForRect:
+ @abstract
+    Converts a rectangle in the receiver's coordinate space to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver.
+ 
+ @param rectInOutputCoordinates
+    A CGRect in the receiver's coordinates.
+ @result
+    A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
+ 
+ @discussion
+    AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of the receiver to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
+ */
+- (CGRect)metadataOutputRectOfInterestForRect:(CGRect)rectInOutputCoordinates NS_AVAILABLE_IOS(7_0);
+
+/*!
+ @method rectForMetadataOutputRectOfInterest:
+ @abstract
+    Converts a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver to a rectangle in the receiver's coordinates.
+ 
+ @param rectInMetadataOutputCoordinates
+    A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
+ @result
+    A CGRect in the receiver's coordinates.
+ 
+ @discussion
+    AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
+ */
+- (CGRect)rectForMetadataOutputRectOfInterest:(CGRect)rectInMetadataOutputCoordinates NS_AVAILABLE_IOS(7_0);
+
+@end
+
+
+/*!
+ @enum AVCaptureOutputDataDroppedReason
+ @abstract
+    Constants indicating the reason a capture data output dropped data.
+
+ @constant AVCaptureOutputDataDroppedReasonNone
+    No data was dropped.
+ @constant AVCaptureOutputDataDroppedReasonLateData
+    Data was dropped because alwaysDiscardsLate{VideoFrames | DepthData} is YES and the client was still processing previous data when the current data needed to be delivered.
+ @constant AVCaptureOutputDataDroppedReasonOutOfBuffers
+    Data was dropped because its pool of buffers ran dry. This is usually indicative that the client is holding onto data objects too long.
+ @constant AVCaptureOutputDataDroppedReasonDiscontinuity
+    Data was dropped because the device providing the data experienced a discontinuity, and an unknown number of data objects have been lost. This condition is typically caused by the system being too busy.
+ */
+typedef NS_ENUM(NSInteger, AVCaptureOutputDataDroppedReason) {
+    AVCaptureOutputDataDroppedReasonNone          = 0,
+    AVCaptureOutputDataDroppedReasonLateData      = 1,
+    AVCaptureOutputDataDroppedReasonOutOfBuffers  = 2,
+    AVCaptureOutputDataDroppedReasonDiscontinuity = 3,
+} NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	2017-02-22 01:14:53.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	2017-05-24 00:37:44.000000000 -0400
@@ -1,15 +1,19 @@
 /*
     File:  AVCapturePhotoOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2016 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
 */
 
-#import <AVFoundation/AVCaptureOutput.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVVideoSettings.h>
+#import <CoreMedia/CMSampleBuffer.h>
 
 NS_ASSUME_NONNULL_BEGIN
 
+#pragma mark AVCapturePhotoOutput
+
 @class AVCapturePhotoSettings;
 @class AVCapturePhotoBracketSettings;
 @class AVCaptureResolvedPhotoSettings;
@@ -41,9 +45,13 @@
 @interface AVCapturePhotoOutput : AVCaptureOutput
 {
 @private
-	AVCapturePhotoOutputInternal *_internal;
+    AVCapturePhotoOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @method capturePhotoWithSettings:delegate:
  @abstract
@@ -66,11 +74,13 @@
         - If rawPhotoPixelFormatType is non-zero, your delegate must respond to -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
         - If rawPhotoPixelFormatType is non-zero, highResolutionPhotoEnabled may be YES or NO, but the setting only applies to the processed image, if you've specified one.
         - If rawPhotoPixelFormatType is non-zero, the videoZoomFactor of the source device and the videoScaleAndCropFactor of the photo output's video connection must both be 1.0. Ensure no zoom is applied before requesting a RAW capture, and don't change the zoom during RAW capture.
-    Format rules:
+        - If rawFileType is specified, it must be present in -availableRawPhotoFileTypes and must support the rawPhotoPixelFormatType specified using -supportedRawPhotoPixelFormatTypesForFileType:.
+    Processed Format rules:
         - If format is non-nil, a kCVPixelBufferPixelFormatTypeKey or AVVideoCodecKey must be present, and both may not be present.
         - If format has a kCVPixelBufferPixelFormatTypeKey, its value must be present in the receiver's -availablePhotoPixelFormatTypes array.
         - If format has a AVVideoCodecKey, its value must be present in the receiver's -availablePhotoCodecTypes array.
         - If format is non-nil, your delegate must respond to -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
+        - If processedFileType is specified, it must be present in -availablePhotoFileTypes and must support the format's specified kCVPixelBufferPixelFormatTypeKey (using -supportedPhotoPixelFormatTypesForFileType:) or AVVideoCodecKey (using -supportedPhotoCodecTypesForFileType:).
     Flash rules:
         - The specified flashMode must be present in the receiver's -supportedFlashModes array.
     Live Photo rules:
@@ -136,7 +146,7 @@
  @discussion
     If you wish to capture a photo in a compressed format, such as JPEG, you must ensure that the format you want is present in the receiver's availablePhotoCodecTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no codec types are available. This property is key-value observable.
  */
-@property(nonatomic, readonly) NSArray<NSString *> *availablePhotoCodecTypes;
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availablePhotoCodecTypes;
 
 /*!
  @property availableRawPhotoPixelFormatTypes
@@ -149,6 +159,71 @@
 @property(nonatomic, readonly) NSArray<NSNumber *> *availableRawPhotoPixelFormatTypes;
 
 /*!
+ @property availablePhotoFileTypes
+ @abstract
+    An array of AVFileType values that are currently supported by the receiver.
+
+ @discussion
+    If you wish to capture a photo that is formatted for a particular file container, such as HEIF, you must ensure that the fileType you desire is present in the receiver's availablePhotoFileTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no file types are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVFileType> *availablePhotoFileTypes NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property availableRawPhotoFileTypes
+ @abstract
+    An array of AVFileType values that are currently supported by the receiver for RAW capture.
+
+ @discussion
+    If you wish to capture a RAW photo that is formatted for a particular file container, such as DNG, you must ensure that the fileType you desire is present in the receiver's availableRawPhotoFileTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no file types are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVFileType> *availableRawPhotoFileTypes NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method supportedPhotoPixelFormatTypesForFileType:
+ @abstract
+    An array of pixel format type values that are currently supported by the receiver for a particular file container.
+ 
+ @param fileType
+    The AVFileType container type intended for storage of a photo.
+ @result
+    An array of CVPixelBufferPixelFormatTypeKey values supported by the receiver for the file type in question.
+ 
+ @discussion
+    If you wish to capture a photo for storage in a particular file container, such as TIFF, you must ensure that the photo pixel format type you request is valid for that file type. If no pixel format types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are supported.
+ */
+- (NSArray<NSNumber *> *)supportedPhotoPixelFormatTypesForFileType:(AVFileType)fileType NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method supportedPhotoCodecTypesForFileType:
+ @abstract
+    An array of AVVideoCodecKey values that are currently supported by the receiver for a particular file container.
+ 
+ @param fileType
+    The AVFileType container type intended for storage of a photo.
+ @result
+    An array of AVVideoCodecKey values supported by the receiver for the file type in question.
+ 
+ @discussion
+    If you wish to capture a photo for storage in a particular file container, such as HEIF, you must ensure that the photo codec type you request is valid for that file type. If no codec types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no codec types are supported.
+ */
+- (NSArray<AVVideoCodecType> *)supportedPhotoCodecTypesForFileType:(AVFileType)fileType NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method supportedRawPhotoPixelFormatTypesForFileType:
+ @abstract
+    An array of CVPixelBufferPixelFormatType values that are currently supported by the receiver for a particular file container.
+ 
+ @param fileType
+    The AVFileType container type intended for storage of a photo.
+ @result
+    An array of CVPixelBufferPixelFormatType values supported by the receiver for the file type in question.
+ 
+ @discussion
+    If you wish to capture a photo for storage in a particular file container, such as DNG, you must ensure that the RAW pixel format type you request is valid for that file type. If no RAW pixel format types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are supported.
+ */
+- (NSArray<NSNumber *> *)supportedRawPhotoPixelFormatTypesForFileType:(AVFileType)fileType NS_AVAILABLE_IOS(11_0);
+
+/*!
  @property stillImageStabilizationSupported
  @abstract
     Indicates whether the still image stabilization feature is supported by the receiver.
@@ -179,6 +254,36 @@
 @property(nonatomic, readonly, getter=isDualCameraFusionSupported) BOOL dualCameraFusionSupported NS_AVAILABLE_IOS(10_2);
 
 /*!
+ @property dualCameraDualPhotoDeliverySupported
+ @abstract
+    Specifies whether the photo output's current configuration supports delivery of both telephoto and wide images from the DualCamera.
+
+ @discussion
+    DualCamera dual photo delivery is only supported for certain AVCaptureSession sessionPresets and AVCaptureDevice activeFormats. When switching cameras or formats this property may change. When this property changes from YES to NO, dualCameraDualPhotoDeliveryEnabled also reverts to NO. If you've previously opted in for DualCamera dual photo delivery and then change configurations, you may need to set dualCameraDualPhotoDeliveryEnabled = YES again. This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isDualCameraDualPhotoDeliverySupported) BOOL dualCameraDualPhotoDeliverySupported NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property dualCameraDualPhotoDeliveryEnabled
+ @abstract
+    Indicates whether the photo output is configured for delivery of both the telephoto and wide images from the DualCamera.
+
+ @discussion
+    Default value is NO. This property may only be set to YES if dualCameraDualPhotoDeliverySupported is YES. DualCamera dual photo delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to do any dual photo delivery captures, you should set this property to YES before calling -[AVCaptureSession startRunning]. See also -[AVCapturePhotoSettings dualCameraDualPhotoDeliveryEnabled].
+ */
+@property(nonatomic, getter=isDualCameraDualPhotoDeliveryEnabled) BOOL dualCameraDualPhotoDeliveryEnabled NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property cameraCalibrationDataDeliverySupported
+ @abstract
+    Specifies whether the photo output's current configuration supports delivery of AVCameraCalibrationData in the resultant AVCapturePhoto.
+
+ @discussion
+    Camera calibration data delivery (intrinsics, extrinsics, lens distortion characteristics, etc.) is only supported in certain configurations. In iOS 11, its value is only YES if dualCameraDualPhotoDeliveryEnabled is YES. This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isCameraCalibrationDataDeliverySupported) BOOL cameraCalibrationDataDeliverySupported NS_AVAILABLE_IOS(11_0);
+
+/*!
  @property supportedFlashModes
  @abstract
     An array of AVCaptureFlashMode constants for the current capture session configuration.
@@ -206,7 +311,7 @@
  @discussion
     You can influence the return values of isFlashScene and isStillImageStabilizationScene by setting this property, indicating the flashMode and autoStillImageStabilizationEnabled values that should be considered for scene monitoring. For instance, if you set flashMode to AVCaptureFlashModeOff, isFlashScene always reports NO. If you set it to AVCaptureFlashModeAuto or AVCaptureFlashModeOn, isFlashScene answers YES or NO based on the current scene's lighting conditions. Note that there is some overlap in the light level ranges that benefit from still image stabilization and flash. If your photoSettingsForSceneMonitoring indicate that both still image stabilization and flash scenes should be monitored, still image stabilization takes precedence, and isFlashScene becomes YES at lower overall light levels. The default value for this property is nil. See isStillImageStabilizationScene and isFlashScene for further discussion.
  */
-@property(nullable, nonatomic, copy) AVCapturePhotoSettings *photoSettingsForSceneMonitoring;
+@property(nonatomic, copy, nullable) AVCapturePhotoSettings *photoSettingsForSceneMonitoring;
 
 /*!
  @property highResolutionCaptureEnabled
@@ -279,21 +384,31 @@
 @property(nonatomic, getter=isLivePhotoAutoTrimmingEnabled) BOOL livePhotoAutoTrimmingEnabled;
 
 /*!
+ @property availableLivePhotoVideoCodecTypes
+ @abstract
+    An array of AVVideoCodecKey values that are currently supported by the receiver for use in the movie complement of a Live Photo.
+
+ @discussion
+    Prior to iOS 11, all Live Photo movie video tracks are compressed using H.264. Beginning in iOS 11, you can select the Live Photo movie video compression format using one of the AVVideoCodecKey strings presented in this property. The system's default (preferred) video codec is always presented first in the list. If you've not yet added your receiver to an AVCaptureSession with a video source, no codecs are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availableLivePhotoVideoCodecTypes NS_AVAILABLE_IOS(11_0);
+
+/*!
  @method JPEGPhotoDataRepresentationForJPEGSampleBuffer:previewPhotoSampleBuffer:
  @abstract
     A class method that writes a JPEG sample buffer to an NSData in the JPEG file format.
  
- @param jpegSampleBuffer
+ @param JPEGSampleBuffer
     A CMSampleBuffer containing JPEG compressed data.
- @param previewSampleBuffer
+ @param previewPhotoSampleBuffer
     An optional CMSampleBuffer containing pixel buffer image data to be written as a thumbnail image.
  @result
     An NSData containing bits in the JPEG file format. May return nil if the re-packaging process fails.
 
  @discussion
-    AVCapturePhotoOutput delivers JPEG photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a JPEG file, you may call this class method, optionally inserting your own metadata into the JPEG CMSampleBuffer first, and optionally passing a preview image to be written to the JPEG file format as a thumbnail image.
+    AVCapturePhotoOutput's depecrated -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback delivers JPEG photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a JPEG file, you may call this class method, optionally inserting your own metadata into the JPEG CMSampleBuffer first, and optionally passing a preview image to be written to the JPEG file format as a thumbnail image.
  */
-+ (nullable NSData *)JPEGPhotoDataRepresentationForJPEGSampleBuffer:(CMSampleBufferRef)JPEGSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer;
++ (nullable NSData *)JPEGPhotoDataRepresentationForJPEGSampleBuffer:(CMSampleBufferRef)JPEGSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer NS_DEPRECATED_IOS(10_0, 11_0, "Use -[AVCapturePhoto fileDataRepresentation] instead.");
 
 /*!
  @method DNGPhotoDataRepresentationForRawSampleBuffer:previewPhotoSampleBuffer:
@@ -302,15 +417,42 @@
  
  @param rawSampleBuffer
     A CMSampleBuffer containing Bayer RAW data.
- @param previewSampleBuffer
+ @param previewPhotoSampleBuffer
     An optional CMSampleBuffer containing pixel buffer image data to be written as a thumbnail image.
  @result
     An NSData containing bits in the DNG file format. May return nil if the re-packaging process fails.
 
  @discussion
-    AVCapturePhotoOutput delivers RAW photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a DNG file, you may call this class method, optionally inserting your own metadata into the RAW CMSampleBuffer first, and optionally passing a preview image to be written to the DNG file format as a thumbnail image. Only RAW images from Apple built-in cameras are supported.
+    AVCapturePhotoOutput's deprecated -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback delivers RAW photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a DNG file, you may call this class method, optionally inserting your own metadata into the RAW CMSampleBuffer first, and optionally passing a preview image to be written to the DNG file format as a thumbnail image. Only RAW images from Apple built-in cameras are supported.
  */
-+ (nullable NSData *)DNGPhotoDataRepresentationForRawSampleBuffer:(CMSampleBufferRef)rawSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer;
++ (nullable NSData *)DNGPhotoDataRepresentationForRawSampleBuffer:(CMSampleBufferRef)rawSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer NS_DEPRECATED_IOS(10_0, 11_0, "Use -[AVCapturePhoto fileDataRepresentation] instead.");
+
+@end
+
+
+@class AVCapturePhoto;
+
+@interface AVCapturePhotoOutput (AVCapturePhotoOutputDepthDataDeliverySupport)
+
+/*!
+ @property depthDataDeliverySupported
+ @abstract
+    A BOOL value specifying whether depth data delivery is supported.
+
+ @discussion
+    Some cameras and configurations support the delivery of depth data (e.g. disparity maps) along with the photo. This property returns YES if the session's current configuration allows photos to be captured with depth data, from which depth-related filters may be applied. When switching cameras or formats this property may change. When this property changes from YES to NO, depthDataDeliveryEnabled also reverts to NO. If you've previously opted in for depth data delivery and then change configurations, you may need to set depthDataDeliveryEnabled = YES again.This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isDepthDataDeliverySupported) BOOL depthDataDeliverySupported NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property depthDataDeliveryEnabled
+ @abstract
+    A BOOL specifying whether the photo render pipeline is prepared for depth data delivery.
+
+ @discussion
+    Default is NO. Set to YES if you wish depth data to be delivered with your AVCapturePhotos. This property may only be set to YES if depthDataDeliverySupported is YES. Enabling depth data delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture depth data, you should set this property to YES before calling -[AVCaptureSession startRunning].
+ */
+@property(nonatomic, getter=isDepthDataDeliveryEnabled) BOOL depthDataDeliveryEnabled NS_AVAILABLE_IOS(11_0);
 
 @end
 
@@ -323,13 +465,13 @@
  @discussion
     AVCapturePhotoOutput invokes the AVCapturePhotoCaptureDelegate callbacks on a common dispatch queue — not necessarily the main queue. While the -captureOutput:willBeginCaptureForResolvedSettings: callback always comes first and the -captureOutput:didFinishCaptureForResolvedSettings: callback always comes last, none of the other callbacks can be assumed to come in any particular order. The AVCaptureResolvedPhotoSettings instance passed to the client with each callback has the same uniqueID as the AVCapturePhotoSettings instance passed in -capturePhotoWithSettings:delegate:. All callbacks are marked optional, but depending on the features you've specified in your AVCapturePhotoSettings, some callbacks become mandatory and are validated in -capturePhotoWithSettings:delegate:. If your delegate does not implement the mandatory callbacks, an NSInvalidArgumentException is thrown.
 
-    - If you initialize your photo settings with a format dictionary, or use one of the default constructors (that is, if you're not requesting a RAW-only capture), your delegate must respond to -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
-    - If you initialize your photo settings with a rawPhotoPixelFormatType, your delegate must respond to -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
+    - If you initialize your photo settings with a format dictionary, or use one of the default constructors (that is, if you're not requesting a RAW-only capture), your delegate must respond to either - captureOutput:didFinishProcessingPhoto:error: or the deprecated -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:. If your delegate responds to both of these callbacks, only the undeprecated variant will be called.
+    - If you initialize your photo settings with a rawPhotoPixelFormatType, your delegate must respond to either -captureOutput:didFinishProcessingPhoto:error: or the deprecated -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:. If your delegate responds to both of these callbacks, only the undeprecated variant will be called.
     - If you set livePhotoMovieFileURL to non-nil, your delegate must respond to -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
  
     In the event of an error, all expected callbacks are fired with an appropriate error.
  */
-__TVOS_PROHIBITED
+NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED
 @protocol AVCapturePhotoCaptureDelegate <NSObject>
 
 @optional
@@ -338,7 +480,7 @@
  @abstract
     A callback fired as soon as the capture settings have been resolved.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param resolvedSettings
     An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
@@ -346,14 +488,14 @@
  @discussion
     This callback is always delivered first for a particular capture request. It is delivered as soon as possible after you call -capturePhotoWithSettings:delegate:, so you can know what to expect in the remainder of your callbacks.
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+- (void)captureOutput:(AVCapturePhotoOutput *)output willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
 
 /*!
  @method captureOutput:willCapturePhotoForResolvedSettings:
  @abstract
     A callback fired just as the photo is being taken.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param resolvedSettings
     An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
@@ -361,14 +503,14 @@
  @discussion
     The timing of this callback is analogous to AVCaptureStillImageOutput's capturingStillImage property changing from NO to YES. The callback is delivered right after the shutter sound is heard (note that shutter sounds are suppressed when Live Photos are being captured).
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput willCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+- (void)captureOutput:(AVCapturePhotoOutput *)output willCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
 
 /*!
  @method captureOutput:didCapturePhotoForResolvedSettings:
  @abstract
     A callback fired just after the photo is taken.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param resolvedSettings
     An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
@@ -376,14 +518,31 @@
  @discussion
     The timing of this callback is analogous to AVCaptureStillImageOutput's capturingStillImage property changing from YES to NO.
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+- (void)captureOutput:(AVCapturePhotoOutput *)output didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+
+/*!
+ @method captureOutput:didFinishProcessingPhoto:error:
+ @abstract
+    A callback fired when photos are ready to be delivered to you (RAW or processed).
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param photo
+    An instance of AVCapturePhoto.
+ @param error
+    An error indicating what went wrong. If the photo was processed successfully, nil is returned.
+ 
+ @discussion
+    This callback fires resolvedSettings.expectedPhotoCount number of times for a given capture request. Note that the photo parameter is always non nil, even if an error is returned. The delivered AVCapturePhoto's rawPhoto property can be queried to know if it's a RAW image or processed image.
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0);
 
 /*!
  @method captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:
  @abstract
     A callback fired when the primary processed photo or photos are done.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param photoSampleBuffer
     A CMSampleBuffer containing an uncompressed pixel buffer or compressed data, along with timing information and metadata. May be nil if there was an error.
@@ -399,14 +558,14 @@
  @discussion
     If you've requested a single processed image (uncompressed or compressed) capture, the photo is delivered here. If you've requested a bracketed capture, this callback is fired bracketedSettings.count times (once for each photo in the bracket).
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error;
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error NS_DEPRECATED_IOS(10_0, 11_0, "Use -captureOutput:didFinishProcessingPhoto:error: instead.");
 
 /*!
  @method captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:
  @abstract
     A callback fired when the RAW photo or photos are done.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param rawSampleBuffer
     A CMSampleBuffer containing Bayer RAW pixel data, along with timing information and metadata. May be nil if there was an error.
@@ -422,14 +581,14 @@
  @discussion
     Single RAW image and bracketed RAW photos are delivered here. If you've requested a RAW bracketed capture, this callback is fired bracketedSettings.count times (once for each photo in the bracket).
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingRawPhotoSampleBuffer:(nullable CMSampleBufferRef)rawSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error;
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingRawPhotoSampleBuffer:(nullable CMSampleBufferRef)rawSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error NS_DEPRECATED_IOS(10_0, 11_0, "Use -captureOutput:didFinishProcessingPhoto:error: instead");
 
 /*!
  @method captureOutput:didFinishRecordingLivePhotoMovieForEventualFileAtURL:resolvedSettings:
  @abstract
     A callback fired when the Live Photo movie has captured all its media data, though all media has not yet been written to file.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param outputFileURL
     The URL to which the movie file will be written. This URL is equal to your AVCapturePhotoSettings.livePhotoMovieURL.
@@ -439,14 +598,14 @@
  @discussion
     When this callback fires, no new media is being written to the file. If you are displaying a "Live" badge, this is an appropriate time to dismiss it. The movie file itself is not done being written until the -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error: callback fires.
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishRecordingLivePhotoMovieForEventualFileAtURL:(NSURL *)outputFileURL resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishRecordingLivePhotoMovieForEventualFileAtURL:(NSURL *)outputFileURL resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
 
 /*!
  @method captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:
  @abstract
     A callback fired when the Live Photo movie is finished being written to disk.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param outputFileURL
     The URL where the movie file resides. This URL is equal to your AVCapturePhotoSettings.livePhotoMovieURL.
@@ -462,14 +621,14 @@
  @discussion
     When this callback fires, the movie on disk is fully finished and ready for consumption.
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishProcessingLivePhotoToMovieFileAtURL:(NSURL *)outputFileURL duration:(CMTime)duration photoDisplayTime:(CMTime)photoDisplayTime resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(nullable NSError *)error;
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingLivePhotoToMovieFileAtURL:(NSURL *)outputFileURL duration:(CMTime)duration photoDisplayTime:(CMTime)photoDisplayTime resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(nullable NSError *)error;
 
 /*!
  @method captureOutput:didFinishCaptureForResolvedSettings:error:
  @abstract
     A callback fired when the photo capture is completed and no more callbacks will be fired.
  
- @param captureOutput
+ @param output
     The calling instance of AVCapturePhotoOutput.
  @param resolvedSettings
     An instance of AVCaptureResolvedPhotoSettings indicating which capture features were selected.
@@ -479,11 +638,13 @@
  @discussion
     This callback always fires last and when it does, you may clean up any state relating to this photo capture.
  */
-- (void)captureOutput:(AVCapturePhotoOutput *)captureOutput didFinishCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(nullable NSError *)error;
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(nullable NSError *)error;
 
 @end
 
 
+#pragma mark - AVCapturePhotoSettings
+
 @class AVCapturePhotoSettingsInternal;
 
 /*!
@@ -498,7 +659,7 @@
 @interface AVCapturePhotoSettings : NSObject <NSCopying>
 {
 @private
-	AVCapturePhotoSettingsInternal *_internal;
+    AVCapturePhotoSettingsInternal *_internal;
 }
 
 /*!
@@ -510,7 +671,7 @@
     An instance of AVCapturePhotoSettings.
  
  @discussion
-    A default AVCapturePhotoSettings object has a format of AVVideoCodecJPEG and autoStillImageStabilizationEnabled set to YES.
+    A default AVCapturePhotoSettings object has a format of AVVideoCodecTypeJPEG, a fileType of AVFileTypeJPEG, and autoStillImageStabilizationEnabled set to YES.
  */
 + (instancetype)photoSettings;
 
@@ -562,6 +723,27 @@
 + (instancetype)photoSettingsWithRawPixelFormatType:(OSType)rawPixelFormatType processedFormat:(nullable NSDictionary<NSString *, id> *)processedFormat;
 
 /*!
+ @method photoSettingsWithRawPixelFormatType:processedFormat:fileType:
+ @abstract
+    Creates an instance of AVCapturePhotoSettings specifying RAW + a processed format (such as JPEG) and a file container to which it will be written.
+ 
+ @param rawPixelFormatType
+    A Bayer RAW pixel format OSType (defined in CVPixelBuffer.h). Pass 0 if you do not desire a RAW photo callback.
+ @param rawFileType
+    The file container for which the RAW image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
+ @param processedFormat
+    A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. Pass nil if you do not desire a processed photo callback.
+ @param processedFileType
+    The file container for which the processed image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
+ @result
+    An instance of AVCapturePhotoSettings.
+ 
+ @discussion
+    rawPixelFormatType must be one of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. Set rawPixelFormatType to 0 if you do not desire a RAW photo callback. If you are specifying a rawFileType, it must be present in AVCapturePhotoOutput's -availableRawPhotoFileTypes array. If you wish an uncompressed processedFormat, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the processedFormat specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed processedFormat. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you are specifying a processedFileType, it must be present in AVCapturePhotoOutput's -availablePhotoFileTypes array. Pass a nil processedFormat dictionary if you only desire a RAW photo capture. See AVCapturePhotoOutput's -capturePhotoWithSettings:delegate: inline documentation for a discussion of restrictions on AVCapturePhotoSettings when requesting RAW capture.
+ */
++ (instancetype)photoSettingsWithRawPixelFormatType:(OSType)rawPixelFormatType rawFileType:(nullable AVFileType)rawFileType processedFormat:(nullable NSDictionary<NSString *, id> *)processedFormat processedFileType:(nullable AVFileType)processedFileType NS_AVAILABLE_IOS(11_0);
+
+/*!
  @method photoSettingsFromPhotoSettings:
  @abstract
     Creates an instance of AVCapturePhotoSettings with a new uniqueID from an existing instance of AVCapturePhotoSettings.
@@ -594,7 +776,17 @@
  @discussion
     The format dictionary you passed to one of the creation methods. May be nil if you've specified RAW-only capture.
  */
-@property(nullable, readonly, copy) NSDictionary<NSString *, id> *format;
+@property(readonly, copy, nullable) NSDictionary<NSString *, id> *format;
+
+/*!
+ @property processedFileType
+ @abstract
+    The file container for which the processed photo is formatted to be stored.
+
+ @discussion
+    The formatting of data within a photo buffer is often dependent on the file format intended for storage. For instance, a JPEG encoded photo buffer intended for storage in a JPEG (JPEG File Interchange Format) file differs from JPEG to be stored in HEIF. The HEIF-containerized JPEG buffer is tiled for readback efficiency and partitioned into the box structure dictated by the HEIF file format. Some codecs are only supported by AVCapturePhotoOutput if containerized. For instance, the AVVideoCodecTypeHEVC is only supported with AVFileTypeHEIF formatting. To discover which photo pixel format types and video codecs are supported for a given file type, you may query AVCapturePhotoOutput's -supportedPhotoPixelFormatTypesForFileType:, or -supportedPhotoCodecTypesForFileType: respectively.
+ */
+@property(nullable, readonly) AVFileType processedFileType NS_AVAILABLE_IOS(11_0);
 
 /*!
  @property rawPhotoPixelFormatType
@@ -607,6 +799,16 @@
 @property(readonly) OSType rawPhotoPixelFormatType;
 
 /*!
+ @property rawFileType
+ @abstract
+    The file container for which the RAW photo is formatted to be stored.
+
+ @discussion
+    The formatting of data within a RAW photo buffer may be dependent on the file format intended for storage. To discover which RAW photo pixel format types are supported for a given file type, you may query AVCapturePhotoOutput's -supportedRawPhotoPixelFormatTypesForFileType:.
+ */
+@property(nullable, readonly) AVFileType rawFileType NS_AVAILABLE_IOS(11_0);
+
+/*!
  @property flashMode
  @abstract
     Specifies whether the flash should be on, off, or chosen automatically by AVCapturePhotoOutput.
@@ -622,7 +824,7 @@
     Specifies whether still image stabilization should be used automatically.
 
  @discussion
-    Default is YES unless you are capturing a RAW photo (RAW photos may not be processed by definition). When set to YES, still image stabilization is applied automatically in low light to counteract hand shake. If the device has optical image stabilization, autoStillImageStabilizationEnabled makes use of lens stabilization as well.
+    Default is YES unless you are capturing a RAW photo (RAW photos may not be processed by definition) or a bracket using AVCapturePhotoBracketSettings. When set to YES, still image stabilization is applied automatically in low light to counteract hand shake. If the device has optical image stabilization, autoStillImageStabilizationEnabled makes use of lens stabilization as well.
  */
 @property(nonatomic, getter=isAutoStillImageStabilizationEnabled) BOOL autoStillImageStabilizationEnabled;
 
@@ -637,6 +839,16 @@
 @property(nonatomic, getter=isAutoDualCameraFusionEnabled) BOOL autoDualCameraFusionEnabled NS_AVAILABLE_IOS(10_2);
 
 /*!
+ @property dualCameraDualPhotoDeliveryEnabled
+ @abstract
+    Specifies whether the DualCamera should return both the telephoto and wide image.
+
+ @discussion
+    Default is NO. When set to YES, your captureOutput:didFinishProcessingPhoto:error: callback will receive twice the number of callbacks, as both the telephoto image(s) and wide-angle image(s) are delivered. You may only set this property to YES if you've set your AVCapturePhotoOutput's dualCameraDualPhotoDeliveryEnabled property to YES, and your delegate responds to the captureOutput:didFinishProcessingPhoto:error: selector.
+ */
+@property(nonatomic, getter=isDualCameraDualPhotoDeliveryEnabled) BOOL dualCameraDualPhotoDeliveryEnabled NS_AVAILABLE_IOS(11_0);
+
+/*!
  @property highResolutionPhotoEnabled
  @abstract
     Specifies whether photos should be captured at the highest resolution supported by the source AVCaptureDevice's activeFormat.
@@ -647,16 +859,76 @@
 @property(nonatomic, getter=isHighResolutionPhotoEnabled) BOOL highResolutionPhotoEnabled;
 
 /*!
+ @property depthDataDeliveryEnabled
+ @abstract
+    Specifies whether AVDepthData should be captured along with the photo.
+
+ @discussion
+    Default is NO. Set to YES if you wish to receive depth data with your photo. Throws an exception if -[AVCapturePhotoOutput depthDataDeliveryEnabled] is not set to YES or your delegate does not respond to the captureOutput:didFinishProcessingPhoto:error: selector. Note that setting this property to YES may add significant processing time to the delivery of your didFinishProcessingPhoto: callback.
+ */
+@property(nonatomic, getter=isDepthDataDeliveryEnabled) BOOL depthDataDeliveryEnabled NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property embedsDepthDataInPhoto
+ @abstract
+    Specifies whether depth data included with this photo should be written to the photo's file structure.
+
+ @discussion
+    Default is YES. When depthDataDeliveryEnabled is set to YES, this property specifies whether the included depth data should be written to the resulting photo's internal file structure. Depth data is currently only supported in HEIF, JPEG, and DNG. This property is ignored if depthDataDeliveryEnabled is set to NO.
+ */
+@property(nonatomic) BOOL embedsDepthDataInPhoto NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property depthDataFiltered
+ @abstract
+    Specifies whether the depth data delivered with the photo should be filtered to fill invalid values.
+
+ @discussion
+    Default is YES. This property is ignored unless depthDataDeliveryEnabled is set to YES. Depth data maps may contain invalid pixel values due to a variety of factors including occlusions and low light. When depthDataFiltered is set to YES, the photo output interpolates missing data, filling in all holes.
+ */
+@property(nonatomic, getter=isDepthDataFiltered) BOOL depthDataFiltered NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property cameraCalibrationDataDeliveryEnabled
+ @abstract
+    Specifies whether AVCameraCalibrationData should be captured and delivered along with this photo.
+
+ @discussion
+    Default is NO. Set to YES if you wish to receive camera calibration data with your photo. Camera calibration data is delivered as a property of an AVCapturePhoto, so if you are using the CMSampleBuffer delegate callbacks rather than -captureOutput:didFinishProcessingPhoto:error:, an exception is thrown. Also, you may only set this property to YES if your AVCapturePhotoOutput's cameraCalibrationDataDeliverySupported property is YES. When requesting dual camera dual photo delivery plus camera calibration data, the wide and tele photos each contain camera calibration data for their respective camera. Note that AVCameraCalibrationData can be delivered as a property of an AVCapturePhoto or an AVDepthData, thus your delegate must respond to the captureOutput:didFinishProcessingPhoto:error: selector.
+ */
+@property(nonatomic, getter=isCameraCalibrationDataDeliveryEnabled) BOOL cameraCalibrationDataDeliveryEnabled NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property metadata
+ @abstract
+    A dictionary of metadata key/value pairs you'd like to have written to each photo in the capture request.
+
+ @discussion
+    Valid metadata keys are found in <ImageIO/CGImageProperties.h>. AVCapturePhotoOutput inserts a base set of metadata into each photo it captures, such as kCGImagePropertyOrientation, kCGImagePropertyExifDictionary, and kCGImagePropertyMakerAppleDictionary. You may specify metadata keys and values that should be written to each photo in the capture request. If you've specified metadata that also appears in AVCapturePhotoOutput's base set, your value replaces the base value. An NSInvalidArgumentException is thrown if you specify keys other than those found in <ImageIO/CGImageProperties.h>.
+ */
+@property(nonatomic, copy) NSDictionary<NSString *, id> *metadata NS_AVAILABLE_IOS(11_0);
+
+/*!
  @property livePhotoMovieFileURL
  @abstract
     Specifies that a Live Photo movie be captured to complement the still photo.
 
  @discussion
-    A Live Photo movie is a short movie (with audio, if you've added an audio input to your session) containing the moments right before and after the still photo. A QuickTime movie file will be written to disk at the URL specified if it is a valid file URL accessible to your app's sandbox. You may only set this property is AVCapturePhotoOutput's livePhotoCaptureSupported property is YES. When you specify a Live Photo, your AVCapturePhotoCaptureDelegate object must implement -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
+    A Live Photo movie is a short movie (with audio, if you've added an audio input to your session) containing the moments right before and after the still photo. A QuickTime movie file will be written to disk at the URL specified if it is a valid file URL accessible to your app's sandbox. You may only set this property if AVCapturePhotoOutput's livePhotoCaptureSupported property is YES. When you specify a Live Photo, your AVCapturePhotoCaptureDelegate object must implement -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
  */
 @property(nonatomic, copy, nullable) NSURL *livePhotoMovieFileURL;
 
 /*!
+ @property livePhotoVideoCodecType
+ @abstract
+    Specifies the video codec type to use when compressing video for the Live Photo movie complement.
+
+ @discussion
+    Prior to iOS 11, all Live Photo movie video tracks are compressed using H.264. Beginning in iOS 11, you can select the Live Photo movie video compression format by specifying one of the strings present in AVCapturePhotoOutput's availableLivePhotoVideoCodecTypes array.
+ */
+@property(nonatomic, copy) AVVideoCodecType livePhotoVideoCodecType NS_AVAILABLE_IOS(11_0);
+
+/*!
  @property livePhotoMovieMetadata
  @abstract
     Movie-level metadata to be written to the Live Photo movie.
@@ -682,13 +954,35 @@
     A dictionary of Core Video pixel buffer attributes specifying the preview photo format to be delivered along with the RAW or processed photo.
 
  @discussion
-    A dictionary of pixel buffer attributes specifying a smaller version of the RAW or processed photo for preview purposes. This image is sometimes referred to as a "thumbnail image". The kCVPixelBufferPixelFormatTypeKey is required and must be present in the receiver's -availablePreviewPhotoPixelFormatTypes array. Optional keys are { kCVPixelBufferWidthKey | kCVPixelBufferHeightKey }. If you wish to specify dimensions, you must add both width and height. Width and height must are only honored up to the display dimensions. If you specify a width and height whose aspect ratio differs from the RAW or processed photo, the larger of the two dimensions is honored and aspect ratio of the RAW or processed photo is always preserved.
+    A dictionary of pixel buffer attributes specifying a smaller version of the RAW or processed photo for preview purposes. The kCVPixelBufferPixelFormatTypeKey is required and must be present in the receiver's -availablePreviewPhotoPixelFormatTypes array. Optional keys are { kCVPixelBufferWidthKey | kCVPixelBufferHeightKey }. If you wish to specify dimensions, you must add both width and height. Width and height are only honored up to the display dimensions. If you specify a width and height whose aspect ratio differs from the RAW or processed photo, the larger of the two dimensions is honored and aspect ratio of the RAW or processed photo is always preserved.
  */
 @property(nonatomic, copy, nullable) NSDictionary<NSString *, id> *previewPhotoFormat;
 
+/*!
+ @property availableEmbeddedThumbnailPhotoCodecTypes
+ @abstract
+    An array of available AVVideoCodecKeys that may be used when specifying an embeddedThumbnailPhotoFormat.
+ 
+ @discussion
+    The array is sorted such that the thumbnail codec type that is most backward compatible is listed first (jpeg).
+ */
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availableEmbeddedThumbnailPhotoCodecTypes NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property embeddedThumbnailPhotoFormat
+ @abstract
+    A dictionary of AVVideoSettings keys specifying the thumbnail format to be written to the processed or RAW photo.
+
+ @discussion
+    A dictionary of AVVideoSettings keys specifying a smaller version of the RAW or processed photo to be embedded in that image before calling back the AVCapturePhotoCaptureDelegate. This image is sometimes referred to as a "thumbnail image". The AVVideoCodecKey is required and must be present in the receiver's -availableEmbeddedThumbnailPhotoCodecTypes array. Optional keys are { AVVideoWidthKey | AVVideoHeightKey }. If you wish to specify dimensions, you must specify both width and height. If you specify a width and height whose aspect ratio differs from the RAW or processed photo, the larger of the two dimensions is honored and aspect ratio of the RAW or processed photo is always preserved.
+ */
+@property(nonatomic, copy, nullable) NSDictionary<NSString *, id> *embeddedThumbnailPhotoFormat NS_AVAILABLE_IOS(11_0);
+
 @end
 
 
+#pragma mark - AVCapturePhotoBracketSettings
+
 @class AVCapturePhotoBracketSettingsInternal;
 
 /*!
@@ -705,7 +999,7 @@
 @interface AVCapturePhotoBracketSettings : AVCapturePhotoSettings
 {
 @private
-	AVCapturePhotoBracketSettingsInternal *_bracketSettingsInternal;
+    AVCapturePhotoBracketSettingsInternal *_bracketSettingsInternal;
 }
 
 /*!
@@ -716,7 +1010,7 @@
  @param rawPixelFormatType
     One of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. May be set to 0 if you do not desire RAW capture.
  @param processedFormat
-    A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you only wish to capture RAW, you may pass a non-zero rawPixelFormatType and a nil processedFormat dictionary. If you pass a rawPixelFormatType of 0 AND a nil processedFormat dictionary, the default output of AVVideoCodecJPEG will be delivered.
+    A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you only wish to capture RAW, you may pass a non-zero rawPixelFormatType and a nil processedFormat dictionary. If you pass a rawPixelFormatType of 0 AND a nil processedFormat dictionary, the default output of AVVideoCodecTypeJPEG will be delivered.
  @param bracketedSettings
     An array of AVCaptureBracketedStillImageSettings objects (defined in AVCaptureStillImageOutput.h). All must be of the same type, either AVCaptureManualExposureBracketedStillImageSettings or AVCaptureAutoExposureBracketedStillImageSettings. bracketedSettings.count must be <= AVCapturePhotoOutput's -maxBracketedCapturePhotoCount.
  @result
@@ -727,7 +1021,32 @@
  
     AVCapturePhotoBracketSettings do not support flashMode, autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata.
  */
-+ (instancetype)photoBracketSettingsWithRawPixelFormatType:(OSType)rawPixelFormatType processedFormat:(nullable NSDictionary<NSString *, id> *)processedFormat bracketedSettings:(NSArray<AVCaptureBracketedStillImageSettings *> *)bracketedSettings;
++ (instancetype)photoBracketSettingsWithRawPixelFormatType:(OSType)rawPixelFormatType processedFormat:(nullable NSDictionary<NSString *, id> *)processedFormat bracketedSettings:(NSArray<__kindof AVCaptureBracketedStillImageSettings *> *)bracketedSettings;
+
+/*!
+ @method photoBracketSettingsWithRawPixelFormatType:rawFileType:processedFormat:processedFileType:bracketedSettings:
+ @abstract
+    Creates an instance of AVCapturePhotoBracketSettings.
+ 
+ @param rawPixelFormatType
+    One of the OSTypes contained in AVCapturePhotoOutput's -availableRawPhotoPixelFormatTypes array. May be set to 0 if you do not desire RAW capture.
+ @param rawFileType
+    The file container for which the RAW image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
+ @param processedFormat
+    A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property. If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhotoOutput's -availablePhotoCodecTypes array. If you are specifying a compressed format, the AVVideoCompressionPropertiesKey is also supported, with a payload dictionary containing a single AVVideoQualityKey. If you only wish to capture RAW, you may pass a non-zero rawPixelFormatType and a nil processedFormat dictionary. If you pass a rawPixelFormatType of 0 AND a nil processedFormat dictionary, the default output of AVVideoCodecTypeJPEG will be delivered.
+ @param processedFileType
+    The file container for which the processed image should be formatted to be written. Pass nil if you have no preferred file container. A default container will be chosen for you.
+ @param bracketedSettings
+    An array of AVCaptureBracketedStillImageSettings objects (defined in AVCaptureStillImageOutput.h). All must be of the same type, either AVCaptureManualExposureBracketedStillImageSettings or AVCaptureAutoExposureBracketedStillImageSettings. bracketedSettings.count must be <= AVCapturePhotoOutput's -maxBracketedCapturePhotoCount.
+ @result
+    An instance of AVCapturePhotoBracketSettings.
+ 
+ @discussion
+    An NSInvalidArgumentException is thrown if bracketedSettings is nil, contains zero elements, or mixes and matches different subclasses of AVCaptureBracketedStillImageSettings.
+ 
+    AVCapturePhotoBracketSettings do not support flashMode, autoStillImageStabilizationEnabled, livePhotoMovieFileURL or livePhotoMovieMetadata.
+ */
++ (instancetype)photoBracketSettingsWithRawPixelFormatType:(OSType)rawPixelFormatType rawFileType:(nullable AVFileType)rawFileType processedFormat:(nullable NSDictionary<NSString *, id> *)processedFormat processedFileType:(nullable AVFileType)processedFileType bracketedSettings:(NSArray<AVCaptureBracketedStillImageSettings *> *)bracketedSettings NS_AVAILABLE_IOS(11_0);
 
 /*!
  @property bracketedSettings
@@ -737,7 +1056,7 @@
  @discussion
     This read-only property never returns nil.
  */
-@property(nonatomic, readonly) NSArray<AVCaptureBracketedStillImageSettings *> *bracketedSettings;
+@property(nonatomic, readonly) NSArray<__kindof AVCaptureBracketedStillImageSettings *> *bracketedSettings;
 
 /*!
  @property lensStabilizationEnabled
@@ -752,6 +1071,8 @@
 @end
 
 
+#pragma mark - AVCaptureResolvedPhotoSettings
+
 @class AVCaptureResolvedPhotoSettingsInternal;
 
 /*!
@@ -766,21 +1087,10 @@
 @interface AVCaptureResolvedPhotoSettings : NSObject
 {
 @private
-	AVCaptureResolvedPhotoSettingsInternal *_internal;
+    AVCaptureResolvedPhotoSettingsInternal *_internal;
 }
 
-/*!
- @method init
- @abstract
-    You may not create an instance of AVCaptureResolvedPhotoSettings directly.
- 
- @result
-    An instance of AVCaptureResolvedPhotoSettings (unavailable)
-
- @discussion
-    An instance of AVCaptureResolvedPhotoSettings is handed to you in each of the AVCapturePhotoCaptureDelegate callback methods.
- */
-- (instancetype)init NS_UNAVAILABLE;
+AV_INIT_UNAVAILABLE
 
 /*!
  @property uniqueID
@@ -820,6 +1130,16 @@
 @property(readonly) CMVideoDimensions previewDimensions;
 
 /*!
+ @property embeddedThumbnailDimensions
+ @abstract
+    The resolved dimensions of the embedded thumbnail that will be written to the processed or RAW photo delivered to the -captureOutput:didFinishProcessing{Photo | RawPhoto}... AVCapturePhotoCaptureDelegate callbacks.
+
+ @discussion
+    If you don't request an embedded thumbnail image, embeddedThumbnailDimensions resolve to { 0, 0 }.
+ */
+@property(readonly) CMVideoDimensions embeddedThumbnailDimensions NS_AVAILABLE_IOS(11_0);
+
+/*!
  @property livePhotoMovieDimensions
  @abstract
     The resolved dimensions of the video track in the movie that will be delivered to the -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error: callback.
@@ -853,6 +1173,261 @@
  */
 @property(readonly, getter=isDualCameraFusionEnabled) BOOL dualCameraFusionEnabled NS_AVAILABLE_IOS(10_2);
 
+/*!
+ @property expectedPhotoCount
+ @abstract
+    Indicates the number of times your -captureOutput:didFinishProcessingPhoto:error: callback will be called. For instance, if you've requested an auto exposure bracket of 3 with JPEG and RAW, the expectedPhotoCount is 6.
+ */
+@property(readonly) NSUInteger expectedPhotoCount NS_AVAILABLE_IOS(11_0);
+
+@end
+
+
+#pragma mark - AVCapturePhoto
+
+@class AVCapturePhotoInternal;
+@class AVDepthData;
+@class AVCameraCalibrationData;
+
+/*!
+ @class AVCapturePhoto
+ @abstract
+    An object representing a photo in memory, produced by the -captureOutput:didFinishingProcessingPhoto:error: in the AVCapturePhotoCaptureDelegate protocol method.
+ 
+ @discussion
+    Beginning in iOS 11, AVCapturePhotoOutput's AVCapturePhotoCaptureDelegate supports a simplified callback for delivering image data, namely -captureOutput:didFinishingProcessingPhoto:error:. This callback presents each image result for your capture request as an AVCapturePhoto object, an immutable wrapper from which various properties of the photo capture may be queried, such as the photo's preview pixel buffer, metadata, depth data, camera calibration data, and image bracket specific properties. AVCapturePhoto can wrap file-containerized photo results, such as HEVC encoded image data, containerized in the HEIC file format. CMSampleBufferRef, on the other hand, may only be used to express non file format containerized photo data. For this reason, the AVCapturePhotoCaptureDelegate protocol methods that return CMSampleBuffers have been deprecated in favor of -captureOutput:didFinishingProcessingPhoto:error:. A AVCapturePhoto wraps a single image result. For instance, if you've requested a bracketed capture of 3 images, your callback is called 3 times, each time delivering an AVCapturePhoto.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED
+@interface AVCapturePhoto : NSObject
+{
+@private
+    AVCapturePhotoInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property timestamp
+ @abstract
+    The time at which this image was captured, synchronized to the masterClock of the AVCaptureSession
+
+ @discussion
+    The timestamp property indicates the time the image was captured, and is analogous to CMSampleBufferGetPresentationTimeStamp(). If an error was provided in the -captureOutput:didFinishingProcessingPhoto:error: callback, timestamp returns kCMTimeInvalid.
+ */
+@property(readonly) CMTime timestamp;
+
+/*!
+ @property rawPhoto
+ @abstract
+    This property returns YES if this photo is a RAW image.
+
+ @discussion
+    Your AVCapturePhotoCaptureDelegate's -captureOutput:didFinishingProcessingPhoto:error: method may be called one or more times with image results, including RAW or non-RAW images. This property distinguishes RAW from non-RAW image results, for instance, if you've requested a RAW + JPEG capture.
+ */
+@property(readonly, getter=isRawPhoto) BOOL rawPhoto;
+
+/*!
+ @property pixelBuffer
+ @abstract
+    For uncompressed or RAW captures, this property offers access to the pixel data.
+
+ @discussion
+    Uncompressed captures, such as '420f' or 'BGRA', or RAW captures, such as 'bgg4', present pixel data as a CVPixelBuffer. This property is analogous to CMSampleBufferGetImageBuffer(). The pixel buffer contains only the minimal attachments required for correct display. Compressed captures, such as 'jpeg', return nil.
+ */
+@property(nullable, readonly) CVPixelBufferRef pixelBuffer NS_RETURNS_INNER_POINTER;
+
+/*!
+ @property previewPixelBuffer
+ @abstract
+    This property offers access to the preview image pixel data if you've requested it.
+
+ @discussion
+    If you requested a preview image by calling -[AVCapturePhotoSettings setPreviewPhotoFormat:] with a non-nil value, this property offers access to the resulting preview image pixel data, and is analogous to CMSampleBufferGetImageBuffer(). The pixel buffer contains only the minimal attachments required for correct display. Nil is returned if you did not request a preview image.
+ */
+@property(nullable, readonly) CVPixelBufferRef previewPixelBuffer NS_RETURNS_INNER_POINTER;
+
+/*!
+ @property embeddedThumbnailPhotoFormat
+ @abstract
+    The format of the embedded thumbnail contained in this AVCapturePhoto.
+
+ @discussion
+    If you requested an embedded thumbnail image by calling -[AVCapturePhotoSettings setEmbeddedThumbnailPhotoFormat:] with a non-nil value, this property offers access to the resolved embedded thumbnail AVVideoSettings dictionary. Nil is returned if you did not request an embedded thumbnail image.
+ */
+@property(nullable, readonly) NSDictionary<NSString *, id> *embeddedThumbnailPhotoFormat;
+
+/*!
+ @property depthData
+ @abstract
+    An AVDepthData object wrapping a disparity/depth map associated with this photo.
+
+ @discussion
+    If you requested depth data delivery by calling -[AVCapturePhotoSettings setDepthDataDeliveryEnabled:YES], this property offers access to the resulting AVDepthData object. Nil is returned if you did not request depth data delivery. Note that the depth data is only embedded in the photo's internal file format container if you set -[AVCapturePhotoSettings setEmbedsDepthDataInPhoto:YES].
+ */
+@property(nullable, readonly) AVDepthData *depthData;
+
+/*!
+ @property metadata
+ @abstract
+    An ImageIO property style dictionary of metadata associated with this photo.
+
+ @discussion
+    Valid metadata keys are found in <ImageIO/CGImageProperties.h>, such as kCGImagePropertyOrientation, kCGImagePropertyExifDictionary, kCGImagePropertyMakerAppleDictionary, etc.
+ */
+@property(readonly) NSDictionary<NSString *, id> *metadata;
+
+/*!
+ @property cameraCalibrationData
+ @abstract
+    An AVCameraCalibrationData object representing the calibration information for the camera providing the photo.
+
+ @discussion
+    Camera calibration data is only present if you call -[AVCapturePhotoSettings setCameraCalibrationDataDeliveryEnabled:YES]. When requesting dualCameraDualPhotoDeliveryEnabled plus cameraCalibrationDataDeliveryEnabled, camera calibration information is delivered with both photos. Telephoto camera calibration data is presented with the telephoto AVCapturePhoto, and wide-angle camera calibration data is presented with the wide AVCapturePhoto.
+ */
+@property(nullable, readonly) AVCameraCalibrationData *cameraCalibrationData;
+
+/*!
+ @property resolvedSettings
+ @abstract
+    The AVCaptureResolvedPhotoSettings associated with all photo results for a given -[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] request.
+
+ @discussion
+    Even in the event of an error, the resolved settings are always non nil.
+ */
+@property(readonly) AVCaptureResolvedPhotoSettings *resolvedSettings;
+
+/*!
+ @property photoCount
+ @abstract
+    This photo's index (1-based) in the total expected photo count.
+
+ @discussion
+    The resolvedSettings.expectedPhotoCount property indicates the total number of images that will be returned for a given capture request. This property indicates this photo's index (1-based). When you receive a -captureOutput:didFinishProcessingPhoto:error: callback with a photo whose photoCount matches resolvedSettings.expectedPhotoCount, you know you've received the last one for the given capture request.
+ */
+@property(readonly) NSInteger photoCount;
+
+/*!
+ @property sourceDeviceType
+ @abstract
+    The device type of the source camera providing the photo.
+
+ @discussion
+    When taking a dual photo capture from the DualCamera, you may query this property to find out the source of the photo: AVCaptureDeviceTypeBuiltInWideCamera, or AVCaptureDeviceTypeBuiltInTelephotoCamera. For all other types of capture, the source device type is equal to the -[AVCaptureDevice deviceType] of the AVCaptureDevice to which the AVCapturePhotoOutput is connected. May return nil if the source of the photo is not an AVCaptureDevice.
+ */
+@property(nullable, readonly) AVCaptureDeviceType sourceDeviceType;
+
+@end
+
+
+@interface AVCapturePhoto (AVCapturePhotoConversions)
+
+/*!
+ @method fileDataRepresentation
+ @abstract
+    Flattens the AVCapturePhoto to an NSData using the file container format (processedFileType or rawFileType) specified in the AVCapturePhotoSettings (e.g. JFIF, HEIF, DNG).
+ 
+ @result
+    An NSData containing bits in the file container's format, or nil if the flattening process fails.
+ */
+- (nullable NSData *)fileDataRepresentation NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method fileDataRepresentationWithReplacementMetadata:replacementEmbeddedThumbnailPhotoFormat:replacementEmbeddedThumbnailPixelBuffer:replacementDepthData:
+ @abstract
+    Flattens the AVCapturePhoto to an NSData using the file container format (processedFileType or rawFileType) specified in the AVCapturePhotoSettings (e.g. JFIF, HEIF, DNG), and allows you to replace metadata, thumbnail, and depth data in the process.
+ 
+ @param replacementMetadata
+    A dictionary of keys and values from <ImageIO/CGImageProperties.h>. To preserve existing metadata to the file, pass self.metadata. To strip existing metadata, pass nil. To replace metadata, pass a replacement dictionary.
+ @param replacementEmbeddedThumbnailPhotoFormat
+    A dictionary of keys and values from <AVFoundation/AVVideoSettings.h>. If you pass a non-nil dictionary, AVVideoCodecKey is required, with AVVideoWidthKey and AVVideoHeightKey being optional. To preserve the existing embedded thumbnail photo to the file, pass self.embeddedThumbnailPhotoFormat and pass nil as your replacementEmbeddedThumbnailPixelBuffer parameter. To strip the existing embedded thumbnail, pass nil for both replacementEmbeddedThumbnailPhotoFormat and replacementEmbeddedThumbnailPixelBuffer. To replace the existing embedded thumbnail photo, pass both a non-nil replacementThumbnailPixelBuffer and replacementEmbeddedThumbnailPhotoFormat dictionary.
+ @param replacementEmbeddedThumbnailPixelBuffer
+    A pixel buffer containing a source image to be encoded to the file as the replacement thumbnail image. To preserve the existing embedded thumbnail photo to the file, pass self.embeddedThumbnailPhotoFormat as your replacementEmbeddedThumbnailPhotoFormat parameter and nil as your replacementEmbeddedThumbnailPixelBuffer parameter. To strip the existing embedded thumbnail, pass nil for both replacementEmbeddedThumbnailPhotoFormat and replacementEmbeddedThumbnailPixelBuffer. To replace the existing embedded thumbnail photo, pass both a non-nil replacementThumbnailPixelBuffer and replacementEmbeddedThumbnailPhotoFormat dictionary.
+ @param replacementDepthData
+    Replacement depth data to be written to the flattened file container. To preserve existing depth data to the file, pass self.depthData. To strip it, pass nil. To replace it, pass a new AVDepthData instance.
+ @result
+    An NSData containing bits in the file container's format, or nil if the flattening process fails.
+ */
+- (nullable NSData *)fileDataRepresentationWithReplacementMetadata:(nullable NSDictionary<NSString *, id> *)replacementMetadata replacementEmbeddedThumbnailPhotoFormat:(nullable NSDictionary<NSString *, id> *)replacementEmbeddedThumbnailPhotoFormat replacementEmbeddedThumbnailPixelBuffer:(nullable CVPixelBufferRef)replacementEmbeddedThumbnailPixelBuffer replacementDepthData:(nullable AVDepthData *)replacementDepthData NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method CGImageRepresentation
+ @abstract
+    Utility method that converts the AVCapturePhoto's primary photo to a CGImage.
+ 
+ @result
+    A CGImageRef, or nil if the conversion process fails.
+ */
+- (nullable CGImageRef)CGImageRepresentation NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method CGImageRepresentation
+ @abstract
+    Utility method that converts the AVCapturePhoto's preview photo to a CGImage.
+ 
+ @result
+    A CGImageRef, or nil if the conversion process fails, or if you did not request a preview photo.
+ */
+- (nullable CGImageRef)previewCGImageRepresentation NS_AVAILABLE_IOS(11_0);
+
+@end
+
+
+/*!
+ @enum AVCaptureLensStabilizationStatus
+ @abstract
+    Constants indicating the status of the lens stabilization module (aka OIS).
+
+ @constant AVCaptureLensStabilizationStatusUnsupported
+    Indicates that lens stabilization is unsupported.
+ @constant AVCaptureLensStabilizationStatusOff
+    Indicates that lens stabilization was not in use for this capture.
+ @constant AVCaptureLensStabilizationStatusActive
+    Indicates that the lens stabilization module was active for the duration of the capture.
+ @constant AVCaptureLensStabilizationStatusOutOfRange
+    Indicates that device motion or capture duration exceeded the stabilization module's correction limits.
+ @constant AVCaptureLensStabilizationStatusUnavailable
+    Indicates that the lens stabilization module was unavailable for use at the time of capture. The module may be available in subsequent captures.
+ */
+typedef NS_ENUM(NSInteger, AVCaptureLensStabilizationStatus) {
+    AVCaptureLensStabilizationStatusUnsupported = 0,
+    AVCaptureLensStabilizationStatusOff         = 1,
+    AVCaptureLensStabilizationStatusActive      = 2,
+    AVCaptureLensStabilizationStatusOutOfRange  = 3,
+    AVCaptureLensStabilizationStatusUnavailable = 4,
+} NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED;
+
+@interface AVCapturePhoto (AVCapturePhotoBracketedCapture)
+
+/*!
+ @property bracketSettings
+ @abstract
+    The AVCaptureBracketedStillImageSettings associated with this photo.
+
+ @discussion
+    When specifying a bracketed capture using AVCapturePhotoBracketSettings, you specify an array of AVCaptureBracketedStillImageSettings -- one per image in the bracket. This property indicates the AVCaptureBracketedStillImageSettings associated with this particular photo, or nil if this photo is not part of a bracketed capture.
+ */
+@property(nullable, readonly) AVCaptureBracketedStillImageSettings *bracketSettings NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property sequenceCount
+ @abstract
+    1-based sequence count of the photo.
+
+ @discussion
+    If this photo is part of a bracketed capture (invoked using AVCapturePhotoBracketSettings), this property indicates the current result's count in the sequence, starting with 1 for the first result, or 0 if this photo is not part of a bracketed capture.
+ */
+@property(readonly) NSInteger sequenceCount NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property lensStabilizationStatus
+ @abstract
+    The status of the lens stabilization module during capture of this photo.
+
+ @discussion
+    In configurations where lens stabilization (OIS) is unsupported, AVCaptureLensStabilizationStatusUnsupported is returned. If lens stabilization is supported, but this photo is not part of a bracketed capture in which -[AVCapturePhotoBracketSettings setLensStabilizationEnabled:YES] was called, AVCaptureLensStabilizationStatusOff is returned. Otherwise a lens stabilization status is returned indicating how lens stabilization was applied during the capture.
+ */
+@property(readonly) AVCaptureLensStabilizationStatus lensStabilizationStatus NS_AVAILABLE_IOS(11_0);
+
 @end
 
 NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h	2016-08-05 01:59:19.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h	2017-05-24 00:41:54.000000000 -0400
@@ -1,17 +1,20 @@
 /*
     File:  AVCaptureSession.h
-
-	Framework:  AVFoundation
-
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
 #import <AVFoundation/AVCaptureDevice.h>
+#import <AVFoundation/AVCaptureSessionPreset.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMFormatDescription.h>
 #import <CoreMedia/CMSync.h>
 
+NS_ASSUME_NONNULL_BEGIN
+
 /*!
  @constant AVCaptureSessionRuntimeErrorNotification
  @abstract
@@ -52,8 +55,6 @@
  */
 AVF_EXPORT NSString *const AVCaptureSessionDidStopRunningNotification NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
-#if TARGET_OS_IPHONE
-
 /*!
  @constant AVCaptureSessionWasInterruptedNotification
  @abstract
@@ -66,6 +67,7 @@
  */
 AVF_EXPORT NSString *const AVCaptureSessionWasInterruptedNotification NS_AVAILABLE_IOS(4_0) __TVOS_PROHIBITED;
 
+
 /*!
  @enum AVCaptureSessionInterruptionReason
  @abstract
@@ -87,6 +89,7 @@
     AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps = 4,
 } NS_AVAILABLE_IOS(9_0) __TVOS_PROHIBITED;
 
+
 /*!
  @constant AVCaptureSessionInterruptionReasonKey
  @abstract
@@ -107,7 +110,6 @@
  */
 AVF_EXPORT NSString *const AVCaptureSessionInterruptionEndedNotification NS_AVAILABLE_IOS(4_0) __TVOS_PROHIBITED;
 
-#endif // TARGET_OS_IPHONE
 
 /*!
  @enum AVCaptureVideoOrientation
@@ -130,157 +132,8 @@
     AVCaptureVideoOrientationLandscapeLeft      = 4,
 } NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
-/*!
- @constant AVCaptureSessionPresetPhoto
- @abstract
-    An AVCaptureSession preset suitable for high resolution photo quality output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetPhoto for full resolution photo quality output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPresetPhoto NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
-
-/*!
- @constant AVCaptureSessionPresetHigh
- @abstract
-    An AVCaptureSession preset suitable for high quality video and audio output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetHigh to achieve high quality video and audio output. AVCaptureSessionPresetHigh is the default sessionPreset value.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPresetHigh NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
-
-/*!
- @constant AVCaptureSessionPresetMedium
- @abstract
-    An AVCaptureSession preset suitable for medium quality output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetMedium to achieve output video and audio bitrates suitable for sharing over WiFi.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPresetMedium NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
-/*!
- @constant AVCaptureSessionPresetLow
- @abstract
-    An AVCaptureSession preset suitable for low quality output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetLow to achieve output video and audio bitrates suitable for sharing over 3G.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPresetLow NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @constant AVCaptureSessionPreset320x240
- @abstract
-    An AVCaptureSession preset suitable for 320x240 video output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset320x240 to achieve 320x240 output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPreset320x240 NS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED;
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @constant AVCaptureSessionPreset352x288
- @abstract
-    An AVCaptureSession preset suitable for 352x288 video output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset352x288 to achieve CIF quality (352x288) output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPreset352x288 NS_AVAILABLE(10_7, 5_0) __TVOS_PROHIBITED;
-
-/*!
- @constant AVCaptureSessionPreset640x480
- @abstract
-    An AVCaptureSession preset suitable for 640x480 video output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset640x480 to achieve VGA quality (640x480) output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPreset640x480 NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @constant AVCaptureSessionPreset960x540
- @abstract
-    An AVCaptureSession preset suitable for 960x540 video output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset960x540 to achieve quarter HD quality (960x540) output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPreset960x540 NS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED;
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @constant AVCaptureSessionPreset1280x720
- @abstract
-    An AVCaptureSession preset suitable for 1280x720 video output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1280x720 to achieve 1280x720 output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPreset1280x720 NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
-
-#if TARGET_OS_IPHONE
-
-/*!
- @constant AVCaptureSessionPreset1920x1080
- @abstract
-    An AVCaptureSession preset suitable for 1920x1080 video output.
- 
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1920x1080 to achieve 1920x1080 output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPreset1920x1080 NS_AVAILABLE(NA, 5_0) __TVOS_PROHIBITED;
-
-/*!
- @constant AVCaptureSessionPreset3840x2160
- @abstract
-    An AVCaptureSession preset suitable for 3840x2160 (UHD 4K) video output.
-
- @discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset3840x2160 to achieve 3840x2160 output.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPreset3840x2160 NS_AVAILABLE(NA, 9_0) __TVOS_PROHIBITED;
-
-#endif // TARGET_OS_IPHONE
-
-/*!
-@constant AVCaptureSessionPresetiFrame960x540
-@abstract
-    An AVCaptureSession preset producing 960x540 Apple iFrame video and audio content.
-
-@discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame960x540 to achieve 960x540 quality iFrame H.264 video at ~30 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPresetiFrame960x540 NS_AVAILABLE(10_9, 5_0) __TVOS_PROHIBITED;
-
-/*!
-@constant AVCaptureSessionPresetiFrame1280x720
-@abstract
-    An AVCaptureSession preset producing 1280x720 Apple iFrame video and audio content.
-
-@discussion
-    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame1280x720 to achieve 1280x720 quality iFrame H.264 video at ~40 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPresetiFrame1280x720 NS_AVAILABLE(10_9, 5_0) __TVOS_PROHIBITED;
-
-/*!
-@constant AVCaptureSessionPresetInputPriority
-@abstract
-    An AVCaptureSession preset indicating that the formats of the session's inputs are being given priority.
-
-@discussion
-    By calling -setSessionPreset:, clients can easily configure an AVCaptureSession to produce a desired quality of service level. The session configures its inputs and outputs optimally to produce the QoS level indicated. Clients who need to ensure a particular input format is chosen can use AVCaptureDevice's -setActiveFormat: method. When a client sets the active format on a device, the associated session's -sessionPreset property automatically changes to AVCaptureSessionPresetInputPriority. This change indicates that the input format selected by the client now dictates the quality of service level provided at the outputs. When a client sets the session preset to anything other than AVCaptureSessionPresetInputPriority, the session resumes responsibility for configuring inputs and outputs, and is free to change its inputs' activeFormat as needed.
- */
-AVF_EXPORT NSString *const AVCaptureSessionPresetInputPriority NS_AVAILABLE(NA, 7_0) __TVOS_PROHIBITED;
+#pragma mark - AVCaptureSession
 
 @class AVCaptureInput;
 @class AVCaptureOutput;
@@ -299,7 +152,7 @@
 @interface AVCaptureSession : NSObject 
 {
 @private
-	AVCaptureSessionInternal *_internal;
+    AVCaptureSessionInternal *_internal;
 }
 
 /*!
@@ -315,7 +168,7 @@
  @discussion
     An AVCaptureSession instance can be associated with a preset that configures its inputs and outputs to fulfill common use cases. This method can be used to determine if the receiver supports the desired preset given its current input and output configuration. The receiver's sessionPreset property may only be set to a certain preset if this method returns YES for that preset.
  */
-- (BOOL)canSetSessionPreset:(NSString*)preset;
+- (BOOL)canSetSessionPreset:(AVCaptureSessionPreset)preset;
 
 /*!
  @property sessionPreset
@@ -323,19 +176,19 @@
     Indicates the session preset currently in use by the receiver.
  
  @discussion
-    The value of this property is an NSString (one of AVCaptureSessionPreset*) indicating the current session preset in use by the receiver. The sessionPreset property may be set while the receiver is running.
+    The value of this property is an AVCaptureSessionPreset indicating the current session preset in use by the receiver. The sessionPreset property may be set while the receiver is running.
  */
-@property(nonatomic, copy) NSString *sessionPreset;
+@property(nonatomic, copy) AVCaptureSessionPreset sessionPreset;
 
 /*!
  @property inputs
  @abstract
     An NSArray of AVCaptureInputs currently added to the receiver.
-
+ 
  @discussion
     The value of this property is an NSArray of AVCaptureInputs currently added to the receiver. Clients can add AVCaptureInputs to a session by calling -addInput:.
  */
-@property(nonatomic, readonly) NSArray *inputs;
+@property(nonatomic, readonly) NSArray<__kindof AVCaptureInput *> *inputs;
 
 /*!
  @method canAddInput:
@@ -382,11 +235,11 @@
  @property outputs
  @abstract
     An NSArray of AVCaptureOutputs currently added to the receiver.
-
+ 
  @discussion
     The value of this property is an NSArray of AVCaptureOutputs currently added to the receiver. Clients can add AVCaptureOutputs to a session by calling -addOutput:.
  */
-@property(nonatomic, readonly) NSArray *outputs;
+@property(nonatomic, readonly) NSArray<__kindof AVCaptureOutput *> *outputs;
 
 /*!
  @method canAddOutput:
@@ -498,7 +351,7 @@
  @method beginConfiguration
  @abstract
     When paired with commitConfiguration, allows a client to batch multiple configuration operations on a running session into atomic updates.
-
+ 
  @discussion
     -beginConfiguration / -commitConfiguration are AVCaptureSession's mechanism for batching multiple configuration operations on a running session into atomic updates. After calling [session beginConfiguration], clients may add or remove outputs, alter the sessionPreset, or configure individual AVCaptureInput or Output properties. All changes will be pended until the client calls [session commitConfiguration], at which time they will be applied together. -beginConfiguration / -commitConfiguration pairs may be nested, and will only be applied when the outermost commit is invoked.
  */
@@ -508,7 +361,7 @@
  @method commitConfiguration
  @abstract
     When preceded by beginConfiguration, allows a client to batch multiple configuration operations on a running session into atomic updates.
-
+ 
  @discussion
     -beginConfiguration / -commitConfiguration are AVCaptureSession's mechanism for batching multiple configuration operations on a running session into atomic updates. After calling [session beginConfiguration], clients may add or remove outputs, alter the sessionPreset, or configure individual AVCaptureInput or Output properties. All changes will be pended until the client calls [session commitConfiguration], at which time they will be applied together. -beginConfiguration / -commitConfiguration pairs may be nested, and will only be applied when the outermost commit is invoked.
  */
@@ -524,7 +377,6 @@
  */
 @property(nonatomic, readonly, getter=isRunning) BOOL running;
 
-
 #if TARGET_OS_IPHONE
 
 /*!
@@ -573,7 +425,7 @@
  @method startRunning
  @abstract
     Starts an AVCaptureSession instance running.
-
+ 
  @discussion
     Clients invoke -startRunning to start the flow of data from inputs to outputs connected to the AVCaptureSession instance. This call blocks until the session object has completely started up or failed. A failure to start running is reported through the AVCaptureSessionRuntimeErrorNotification mechanism.
  */
@@ -583,7 +435,7 @@
  @method stopRunning
  @abstract
     Stops an AVCaptureSession instance that is currently running.
-
+ 
  @discussion
     Clients invoke -stopRunning to stop the flow of data from inputs to outputs connected to the AVCaptureSession instance. This call blocks until the session object has completely stopped.
  */
@@ -595,7 +447,7 @@
     Provides the master clock being used for output synchronization.
  @discussion
     The masterClock is readonly. Use masterClock to synchronize AVCaptureOutput data with external data sources (e.g motion samples). All capture output sample buffer timestamps are on the masterClock timebase.
-	
+ 
     For example, if you want to reverse synchronize the output timestamps to the original timestamps, you can do the following: In captureOutput:didOutputSampleBuffer:fromConnection:
  
     AVCaptureInputPort *port = [[connection inputPorts] objectAtIndex:0];
@@ -606,12 +458,11 @@
  
     This property is key-value observable.
  */
-@property(nonatomic, readonly) __attribute__((NSObject)) CMClockRef masterClock NS_AVAILABLE(10_9, 7_0);
+@property(nonatomic, readonly, nullable) __attribute__((NSObject)) CMClockRef masterClock NS_AVAILABLE(10_9, 7_0);
 
 @end
 
 
-
 /*!
  @enum AVVideoFieldMode
  @abstract
@@ -631,8 +482,10 @@
     AVVideoFieldModeTopOnly     = 1,
     AVVideoFieldModeBottomOnly  = 2,
     AVVideoFieldModeDeinterlace = 3,
-} NS_AVAILABLE(10_7, NA) __TVOS_PROHIBITED;
+} NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED;
+
 
+#pragma mark - AVCaptureConnection
 
 @class AVCaptureAudioChannel;
 @class AVCaptureVideoPreviewLayer;
@@ -646,16 +499,16 @@
  
  @discussion
     AVCaptureInputs have one or more AVCaptureInputPorts. AVCaptureOutputs can accept data from one or more sources (example - an AVCaptureMovieFileOutput accepts both video and audio data). AVCaptureVideoPreviewLayers can accept data from one AVCaptureInputPort whose mediaType is AVMediaTypeVideo. When an input or output is added to a session, or a video preview layer is associated with a session, the session greedily forms connections between all the compatible AVCaptureInputs' ports and AVCaptureOutputs or AVCaptureVideoPreviewLayers. Iterating through an output's connections or a video preview layer's sole connection, a client may enable or disable the flow of data from a given input to a given output or preview layer.
-     
+ 
     Connections involving audio expose an array of AVCaptureAudioChannel objects, which can be used for monitoring levels.
-
+ 
     Connections involving video expose video specific properties, such as videoMirrored and videoOrientation.
  */
 NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @interface AVCaptureConnection : NSObject 
 {
 @private
-	AVCaptureConnectionInternal *_internal;
+    AVCaptureConnectionInternal *_internal;
 }
 
 /*!
@@ -673,7 +526,7 @@
  @discussion
     This method returns an instance of AVCaptureConnection that may be subsequently added to an AVCaptureSession instance using AVCaptureSession's -addConnection: method. When using -addInput: or -addOutput:, connections are formed between all compatible inputs and outputs automatically. You do not need to manually create and add connections to the session unless you use the primitive -addInputWithNoConnections: or -addOutputWithNoConnections: methods.
  */
-+ (instancetype)connectionWithInputPorts:(NSArray *)ports output:(AVCaptureOutput *)output NS_AVAILABLE(10_7, 8_0);
++ (instancetype)connectionWithInputPorts:(NSArray<AVCaptureInputPort *> *)ports output:(AVCaptureOutput *)output NS_AVAILABLE(10_7, 8_0);
 
 /*!
  @method connectionWithInputPort:videoPreviewLayer:
@@ -707,7 +560,7 @@
  @discussion
     This method returns an instance of AVCaptureConnection that may be subsequently added to an AVCaptureSession instance using AVCaptureSession's -addConnection: method. When using -addInput: or -addOutput:, connections are formed between all compatible inputs and outputs automatically. You do not need to manually create and add connections to the session unless you use the primitive -addInputWithNoConnections: or -addOutputWithNoConnections: methods.
  */
-- (instancetype)initWithInputPorts:(NSArray *)ports output:(AVCaptureOutput *)output NS_AVAILABLE(10_7, 8_0);
+- (instancetype)initWithInputPorts:(NSArray<AVCaptureInputPort *> *)ports output:(AVCaptureOutput *)output NS_AVAILABLE(10_7, 8_0);
 
 /*!
  @method initWithInputPort:videoPreviewLayer:
@@ -731,11 +584,11 @@
  @property inputPorts
  @abstract
     An array of AVCaptureInputPort instances providing data through this connection.
-
+ 
  @discussion
     An AVCaptureConnection may involve one or more AVCaptureInputPorts producing data to the connection's AVCaptureOutput. This property is read-only. An AVCaptureConnection's inputPorts remain static for the life of the object.
  */
-@property(nonatomic, readonly) NSArray *inputPorts;
+@property(nonatomic, readonly) NSArray<AVCaptureInputPort *> *inputPorts;
 
 /*!
  @property output
@@ -774,6 +627,8 @@
 
  @discussion
     The value of this property is a BOOL that determines whether the receiver's output can consume data provided through this connection. This property is read-only. Clients may key-value observe this property to know when a session's configuration forces a connection to become inactive. The default value is YES.
+ 
+    Prior to iOS 11, the audio connection feeding an AVCaptureAudioDataOutput is made inactive when using AVCaptureSessionPresetPhoto or the equivalent photo format using -[AVCaptureDevice activeFormat]. On iOS 11 and later, the audio connection feeding AVCaptureAudioDataOutput is active for all presets and device formats.
  */
 @property(nonatomic, readonly, getter=isActive) BOOL active;
 
@@ -781,17 +636,17 @@
  @property audioChannels
  @abstract
     An array of AVCaptureAudioChannel objects representing individual channels of audio data flowing through the connection.
-
+ 
  @discussion
     This property is only applicable to AVCaptureConnection instances involving audio. In such connections, the audioChannels array contains one AVCaptureAudioChannel object for each channel of audio data flowing through this connection.
  */
-@property(nonatomic, readonly) NSArray *audioChannels;
+@property(nonatomic, readonly) NSArray<AVCaptureAudioChannel *> *audioChannels;
 
 /*!
  @property supportsVideoMirroring
  @abstract
     Indicates whether the connection supports setting the videoMirrored property.
-
+ 
  @discussion
     This property is only applicable to AVCaptureConnection instances involving video. In such connections, the videoMirrored property may only be set if
     -isVideoMirroringSupported returns YES.
@@ -802,7 +657,7 @@
  @property videoMirrored
  @abstract
     Indicates whether the video flowing through the connection should be mirrored about its vertical axis.
-
+ 
  @discussion
     This property is only applicable to AVCaptureConnection instances involving video. if -isVideoMirroringSupported returns YES, videoMirrored may be set to flip the video about its vertical axis and produce a mirror-image effect.
  */
@@ -812,17 +667,17 @@
  @property automaticallyAdjustsVideoMirroring
  @abstract
     Specifies whether or not the value of @"videoMirrored" can change based on configuration of the session.
-	
- @discussion		
+ 
+ @discussion
     For some session configurations, video data flowing through the connection will be mirrored by default. When the value of this property is YES, the value of @"videoMirrored" may change depending on the configuration of the session, for example after switching to a different AVCaptureDeviceInput. The default value is YES.
  */
-@property (nonatomic) BOOL automaticallyAdjustsVideoMirroring NS_AVAILABLE(10_7, 6_0);
+@property(nonatomic) BOOL automaticallyAdjustsVideoMirroring NS_AVAILABLE(10_7, 6_0);
 
 /*!
  @property supportsVideoOrientation
  @abstract
     Indicates whether the connection supports setting the videoOrientation property.
-
+ 
  @discussion
     This property is only applicable to AVCaptureConnection instances involving video. In such connections, the videoOrientation property may only be set if -isVideoOrientationSupported returns YES.
  */
@@ -832,13 +687,13 @@
  @property videoOrientation
  @abstract
     Indicates whether the video flowing through the connection should be rotated to a given orientation.
-
+ 
  @discussion
     This property is only applicable to AVCaptureConnection instances involving video. If -isVideoOrientationSupported returns YES, videoOrientation may be set to rotate the video buffers being consumed by the connection's output. Note that setting videoOrientation does not necessarily result in a physical rotation of video buffers. For instance, a video connection to an AVCaptureMovieFileOutput handles orientation using a Quicktime track matrix. In the AVCaptureStillImageOutput, orientation is handled using Exif tags.
  */
 @property(nonatomic) AVCaptureVideoOrientation videoOrientation;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property supportsVideoFieldMode
@@ -848,7 +703,7 @@
  @discussion
     This property is only applicable to AVCaptureConnection instances involving video. In such connections, the videoFieldMode property may only be set if -isVideoFieldModeSupported returns YES.
  */
-@property(nonatomic, readonly, getter=isVideoFieldModeSupported) BOOL supportsVideoFieldMode NS_AVAILABLE(10_7, NA);
+@property(nonatomic, readonly, getter=isVideoFieldModeSupported) BOOL supportsVideoFieldMode NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property videoFieldMode
@@ -858,9 +713,9 @@
  @discussion
     This property is only applicable to AVCaptureConnection instances involving video. If -isVideoFieldModeSupported returns YES, videoFieldMode may be set to affect interlaced video content flowing through the connection.
  */
-@property(nonatomic) AVVideoFieldMode videoFieldMode NS_AVAILABLE(10_7, NA);
+@property(nonatomic) AVVideoFieldMode videoFieldMode NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @property supportsVideoMinFrameDuration
@@ -975,7 +830,7 @@
 @property(nonatomic, readonly, getter=isVideoStabilizationEnabled) BOOL videoStabilizationEnabled NS_DEPRECATED_IOS(6_0, 8_0, "Use activeVideoStabilizationMode instead.");
 
 /*!
- @property enablesVideoStabilizationWhenAvailable;
+ @property enablesVideoStabilizationWhenAvailable
  @abstract
     Indicates whether stabilization should be applied to video flowing through the receiver when the feature is available.
  
@@ -984,11 +839,32 @@
  */
 @property(nonatomic) BOOL enablesVideoStabilizationWhenAvailable NS_DEPRECATED_IOS(6_0, 8_0, "Use preferredVideoStabilizationMode instead.");
 
+/*!
+ @property cameraIntrinsicMatrixDeliverySupported
+ @abstract
+    Indicates whether the connection supports camera intrinsic matrix delivery.
+ 
+ @discussion
+    This property is only applicable to AVCaptureConnection instances involving video. In such connections, the cameraIntrinsicMatrixDeliveryEnabled property may only be set to YES if - isCameraIntrinsicMatrixDeliverySupported returns YES. This property returns YES if both the connection's input device format and the connection's output support camera intrinsic matrix delivery. In iOS 11, only the AVCaptureVideoDataOutput's connection supports this property.
+ */
+@property(nonatomic, readonly, getter=isCameraIntrinsicMatrixDeliverySupported) BOOL cameraIntrinsicMatrixDeliverySupported NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property cameraIntrinsicMatrixDeliveryEnabled
+ @abstract
+    Indicates whether camera intrinsic matrix delivery should be enabled.
+ 
+ @discussion
+    This property is only applicable to AVCaptureConnection instances involving video. Refer to property cameraIntrinsicMatrixDeliverySupported before setting this property. When this property is set to YES, the receiver's output will add the kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix sample buffer attachment to all vended sample buffers. This property must be set before the session starts running.
+ */
+@property(nonatomic, getter=isCameraIntrinsicMatrixDeliveryEnabled) BOOL cameraIntrinsicMatrixDeliveryEnabled NS_AVAILABLE_IOS(11_0);
+
 #endif // TARGET_OS_IPHONE
 
 @end
 
 
+#pragma mark - AVCaptureAudioChannel
 
 @class AVCaptureAudioChannelInternal;
 
@@ -1004,14 +880,14 @@
 @interface AVCaptureAudioChannel : NSObject
 {
 @private
-	AVCaptureAudioChannelInternal *_internal;
+    AVCaptureAudioChannelInternal *_internal;
 }
 
 /*!
  @property averagePowerLevel
  @abstract
     A measurement of the instantaneous average power level of the audio flowing through the receiver.
-
+ 
  @discussion
     A client may poll an AVCaptureAudioChannel object for its current averagePowerLevel to get its instantaneous average power level in decibels. This property is not key-value observable.
  */
@@ -1021,34 +897,36 @@
  @property peakHoldLevel
  @abstract
     A measurement of the peak/hold level of the audio flowing through the receiver.
-
+ 
  @discussion
     A client may poll an AVCaptureAudioChannel object for its current peakHoldLevel to get its most recent peak hold level in decibels. This property is not key-value observable.
  */
 @property(nonatomic, readonly) float peakHoldLevel;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property volume
  @abstract
     A property indicating the current volume (gain) of the receiver.
-
+ 
  @discussion
     The volume property indicates the current volume or gain of the receiver as a floating point value between 0.0 -> 1.0. If you desire to boost the gain in software, you may specify a a value greater than 1.0.
  */
-@property(nonatomic) float volume NS_AVAILABLE(10_7, NA);
+@property(nonatomic) float volume NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property enabled
  @abstract
     A property indicating whether the receiver is currently enabled for data capture.
-
+ 
  @discussion
     By default, all AVCaptureAudioChannel objects exposed by a connection are enabled. You may set enabled to NO to stop the flow of data for a particular AVCaptureAudioChannel.
  */
-@property(nonatomic, getter=isEnabled) BOOL enabled NS_AVAILABLE(10_7, NA);
+@property(nonatomic, getter=isEnabled) BOOL enabled NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 @end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h	2017-05-25 06:23:48.000000000 -0400
@@ -0,0 +1,162 @@
+/*
+    File:  AVCaptureSessionPreset.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*!
+ @group AVCaptureSessionPreset string constants
+ 
+ @discussion
+    Clients may use an AVCaptureSessionPreset to set the format for output on an AVCaptureSession.
+ */
+typedef NSString * AVCaptureSessionPreset NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED NS_STRING_ENUM;
+
+/*!
+ @constant AVCaptureSessionPresetPhoto
+ @abstract
+    An AVCaptureSession preset suitable for high resolution photo quality output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetPhoto for full resolution photo quality output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetPhoto NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetHigh
+ @abstract
+    An AVCaptureSession preset suitable for high quality video and audio output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetHigh to achieve high quality video and audio output. AVCaptureSessionPresetHigh is the default sessionPreset value.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetHigh NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetMedium
+ @abstract
+    An AVCaptureSession preset suitable for medium quality output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetMedium to achieve output video and audio bitrates suitable for sharing over WiFi.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetMedium NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetLow
+ @abstract
+    An AVCaptureSession preset suitable for low quality output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetLow to achieve output video and audio bitrates suitable for sharing over 3G.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetLow NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset320x240
+ @abstract
+    An AVCaptureSession preset suitable for 320x240 video output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset320x240 to achieve 320x240 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset320x240 NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset352x288
+ @abstract
+    An AVCaptureSession preset suitable for 352x288 video output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset352x288 to achieve CIF quality (352x288) output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset352x288 NS_AVAILABLE(10_7, 5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset640x480
+ @abstract
+    An AVCaptureSession preset suitable for 640x480 video output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset640x480 to achieve VGA quality (640x480) output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset640x480 NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset960x540
+ @abstract
+    An AVCaptureSession preset suitable for 960x540 video output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset960x540 to achieve quarter HD quality (960x540) output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset960x540 NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset1280x720
+ @abstract
+    An AVCaptureSession preset suitable for 1280x720 video output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1280x720 to achieve 1280x720 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1280x720 NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset1920x1080
+ @abstract
+    An AVCaptureSession preset suitable for 1920x1080 video output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1920x1080 to achieve 1920x1080 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1920x1080 NS_AVAILABLE_IOS(5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset3840x2160
+ @abstract
+    An AVCaptureSession preset suitable for 3840x2160 (UHD 4K) video output.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset3840x2160 to achieve 3840x2160 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset3840x2160 NS_AVAILABLE_IOS(9_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetiFrame960x540
+ @abstract
+    An AVCaptureSession preset producing 960x540 Apple iFrame video and audio content.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame960x540 to achieve 960x540 quality iFrame H.264 video at ~30 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame960x540 NS_AVAILABLE(10_9, 5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetiFrame1280x720
+ @abstract
+    An AVCaptureSession preset producing 1280x720 Apple iFrame video and audio content.
+ 
+ @discussion
+    Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame1280x720 to achieve 1280x720 quality iFrame H.264 video at ~40 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame1280x720 NS_AVAILABLE(10_9, 5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetInputPriority
+ @abstract
+    An AVCaptureSession preset indicating that the formats of the session's inputs are being given priority.
+ 
+ @discussion
+    By calling -setSessionPreset:, clients can easily configure an AVCaptureSession to produce a desired quality of service level. The session configures its inputs and outputs optimally to produce the QoS level indicated. Clients who need to ensure a particular input format is chosen can use AVCaptureDevice's -setActiveFormat: method. When a client sets the active format on a device, the associated session's -sessionPreset property automatically changes to AVCaptureSessionPresetInputPriority. This change indicates that the input format selected by the client now dictates the quality of service level provided at the outputs. When a client sets the session preset to anything other than AVCaptureSessionPresetInputPriority, the session resumes responsibility for configuring inputs and outputs, and is free to change its inputs' activeFormat as needed.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetInputPriority NS_AVAILABLE_IOS(7_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h	2016-09-23 20:45:17.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h	2017-05-24 00:28:07.000000000 -0400
@@ -1,14 +1,18 @@
 /*
     File:  AVCaptureStillImageOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
-#import <AVFoundation/AVCaptureOutput.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVVideoSettings.h>
+#import <CoreMedia/CMSampleBuffer.h>
+
+NS_ASSUME_NONNULL_BEGIN
 
-#pragma mark - AVCaptureStillImageOutput
+#pragma mark AVCaptureStillImageOutput
 
 @class AVCaptureStillImageOutputInternal;
 
@@ -16,7 +20,7 @@
  @class AVCaptureStillImageOutput
  @abstract
     AVCaptureStillImageOutput is a concrete subclass of AVCaptureOutput that can be used to capture high-quality still images with accompanying metadata.
-
+ 
  @discussion
     Instances of AVCaptureStillImageOutput can be used to capture, on demand, high quality snapshots from a realtime capture source. Clients can request a still image for the current time using the captureStillImageAsynchronouslyFromConnection:completionHandler: method. Clients can also configure still image outputs to produce still images in specific image formats.
  */
@@ -24,40 +28,44 @@
 @interface AVCaptureStillImageOutput : AVCaptureOutput 
 {
 @private
-	AVCaptureStillImageOutputInternal *_internal;
+    AVCaptureStillImageOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @property outputSettings
  @abstract
     Specifies the options the receiver uses to encode still images before they are delivered.
-
+ 
  @discussion
     See AVVideoSettings.h for more information on how to construct an output settings dictionary.
-
-    On iOS, the only currently supported keys are AVVideoCodecKey and kCVPixelBufferPixelFormatTypeKey. Use -availableImageDataCVPixelFormatTypes and -availableImageDataCodecTypes to determine what codec keys and pixel formats are supported. AVVideoQualityKey is supported on iOS 6.0 and later and may only be used when AVVideoCodecKey is set to AVVideoCodecJPEG.
+ 
+    On iOS, the only currently supported keys are AVVideoCodecKey and kCVPixelBufferPixelFormatTypeKey. Use -availableImageDataCVPixelFormatTypes and -availableImageDataCodecTypes to determine what codec keys and pixel formats are supported. AVVideoQualityKey is supported on iOS 6.0 and later and may only be used when AVVideoCodecKey is set to AVVideoCodecTypeJPEG.
  */
-@property(nonatomic, copy) NSDictionary *outputSettings;
+@property(nonatomic, copy) NSDictionary<NSString *, id> *outputSettings;
 
 /*!
  @property availableImageDataCVPixelFormatTypes
  @abstract
     Indicates the supported image pixel formats that can be specified in outputSettings.
-
+ 
  @discussion
     The value of this property is an NSArray of NSNumbers that can be used as values for the kCVPixelBufferPixelFormatTypeKey in the receiver's outputSettings property. The first format in the returned list is the most efficient output format.
  */
-@property(nonatomic, readonly) NSArray *availableImageDataCVPixelFormatTypes;
+@property(nonatomic, readonly) NSArray<NSNumber *> *availableImageDataCVPixelFormatTypes;
 
 /*!
  @property availableImageDataCodecTypes
  @abstract
     Indicates the supported image codec formats that can be specified in outputSettings.
-
+ 
  @discussion
-    The value of this property is an NSArray of NSStrings that can be used as values for the AVVideoCodecKey in the receiver's outputSettings property.
+    The value of this property is an NSArray of AVVideoCodecTypes that can be used as values for the AVVideoCodecKey in the receiver's outputSettings property.
  */
-@property(nonatomic, readonly) NSArray *availableImageDataCodecTypes;
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availableImageDataCodecTypes;
 
 #if TARGET_OS_IPHONE
 
@@ -107,7 +115,7 @@
  @property capturingStillImage
  @abstract
     A boolean value that becomes true when a still image is being captured.
-
+ 
  @discussion
     The value of this property is a BOOL that becomes true when a still image is being captured, and false when no still image capture is underway. This property is key-value observable.
  */
@@ -117,39 +125,40 @@
  @method captureStillImageAsynchronouslyFromConnection:completionHandler:
  @abstract
     Initiates an asynchronous still image capture, returning the result to a completion handler.
-
+ 
  @param connection
     The AVCaptureConnection object from which to capture the still image.
  @param handler
     A block that will be called when the still image capture is complete. The block will be passed a CMSampleBuffer object containing the image data or an NSError object if an image could not be captured.
-
+ 
  @discussion
     This method will return immediately after it is invoked, later calling the provided completion handler block when image data is ready. If the request could not be completed, the error parameter will contain an NSError object describing the failure.
-
+ 
     Attachments to the image data sample buffer may contain metadata appropriate to the image data format. For instance, a sample buffer containing JPEG data may carry a kCGImagePropertyExifDictionary as an attachment. See <ImageIO/CGImageProperties.h> for a list of keys and value types.
-
+ 
     Clients should not assume that the completion handler will be called on a specific thread.
  
     Calls to captureStillImageAsynchronouslyFromConnection:completionHandler: are not synchronized with AVCaptureDevice manual control completion handlers. Setting a device manual control, waiting for its completion, then calling captureStillImageAsynchronouslyFromConnection:completionHandler: DOES NOT ensure that the still image returned reflects your manual control change. It may be from an earlier time. You can compare your manual control completion handler sync time to the returned still image's presentation time. You can retrieve the sample buffer's pts using CMSampleBufferGetPresentationTimestamp(). If the still image has an earlier timestamp, your manual control command does not apply to it.
  */
-- (void)captureStillImageAsynchronouslyFromConnection:(AVCaptureConnection *)connection completionHandler:(void (^)(CMSampleBufferRef imageDataSampleBuffer, NSError *error))handler;
+- (void)captureStillImageAsynchronouslyFromConnection:(AVCaptureConnection *)connection completionHandler:(void (^)(CMSampleBufferRef _Nullable imageDataSampleBuffer, NSError * _Nullable error))handler;
 
 /*!
  @method jpegStillImageNSDataRepresentation:
  @abstract
     Converts the still image data and metadata attachments in a JPEG sample buffer to an NSData representation.
-
+ 
  @param jpegSampleBuffer
     The sample buffer carrying JPEG image data, optionally with Exif metadata sample buffer attachments. This method throws an NSInvalidArgumentException if jpegSampleBuffer is NULL or not in the JPEG format.
-
+ 
  @discussion
     This method returns an NSData representation of a JPEG still image sample buffer, merging the image data and Exif metadata sample buffer attachments without recompressing the image. The returned NSData is suitable for writing to disk.
  */
-+ (NSData *)jpegStillImageNSDataRepresentation:(CMSampleBufferRef)jpegSampleBuffer;
++ (nullable NSData *)jpegStillImageNSDataRepresentation:(CMSampleBufferRef)jpegSampleBuffer;
 
 @end
 
-#if TARGET_OS_IPHONE
+
+#pragma mark - AVCaptureBracketedStillImageSettings
 
 /*!
  @class AVCaptureBracketedStillImageSettings
@@ -161,8 +170,14 @@
  */
 NS_CLASS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED
 @interface AVCaptureBracketedStillImageSettings : NSObject
+
+AV_INIT_UNAVAILABLE
+
 @end
 
+
+#pragma mark - AVCaptureManualExposureBracketedStillImageSettings
+
 /*!
  @class AVCaptureManualExposureBracketedStillImageSettings
  @abstract
@@ -174,13 +189,39 @@
 NS_CLASS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED
 @interface AVCaptureManualExposureBracketedStillImageSettings : AVCaptureBracketedStillImageSettings
 
+/*!
+ @method manualExposureSettingsWithExposureDuration:ISO:
+ @abstract
+    Creates an AVCaptureManualExposureBracketedStillImageSettings using the specified exposure duration and ISO.
+ 
+ @param duration
+    The exposure duration in seconds. Pass AVCaptureExposureDurationCurrent to leave the duration unchanged for this bracketed image.
+ @param ISO
+    The ISO. Pass AVCaptureISOCurrent to leave the ISO unchanged for this bracketed image.
+ @result
+    An initialized AVCaptureManualExposureBracketedStillImageSettings instance.
+ */
 + (instancetype)manualExposureSettingsWithExposureDuration:(CMTime)duration ISO:(float)ISO;
 
+/*!
+ @property exposureDuration
+ @abstract
+    The exposure duration for the still image.
+ */
 @property(readonly) CMTime exposureDuration;
+
+/*!
+ @property ISO
+ @abstract
+    The ISO for the still image.
+ */
 @property(readonly) float ISO;
 
 @end
 
+
+#pragma mark - AVCaptureAutoExposureBracketedStillImageSettings
+
 /*!
  @class AVCaptureAutoExposureBracketedStillImageSettings
  @abstract
@@ -192,14 +233,30 @@
 NS_CLASS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED
 @interface AVCaptureAutoExposureBracketedStillImageSettings : AVCaptureBracketedStillImageSettings
 
+/*!
+ @method autoExposureSettingsWithExposureTargetBias
+ @abstract
+     Creates an AVCaptureAutoExposureBracketedStillImageSettings using the specified exposure target bias.
+ 
+ @param exposureTargetBias
+     The exposure target bias. Pass AVCaptureExposureTargetBiasCurrent to leave the exposureTargetBias unchanged for this image.
+ @result
+     An initialized AVCaptureAutoExposureBracketedStillImageSettings instance.
+ */
 + (instancetype)autoExposureSettingsWithExposureTargetBias:(float)exposureTargetBias;
 
+/*!
+ @property exposureTargetBias
+ @abstract
+     The exposure bias for the auto exposure bracketed settings
+ */
 @property(readonly) float exposureTargetBias;
 
 @end
 
+
 /*!
- @category AVCaptureStillImageOutput (BracketedCaptureMethods)
+ @category AVCaptureStillImageOutput (AVCaptureStillImageOutputBracketedCapture)
  @abstract
     A category of methods for bracketed still image capture.
  
@@ -208,13 +265,13 @@
  
     In a bracketed capture, AVCaptureDevice flashMode property is ignored (flash is forced off), as is AVCaptureStillImageOutput's automaticallyEnablesStillImageStabilizationWhenAvailable property (stabilization is forced off).
  */
-@interface AVCaptureStillImageOutput ( BracketedCaptureMethods )
+@interface AVCaptureStillImageOutput (AVCaptureStillImageOutputBracketedCapture)
 
 /*!
  @property maxBracketedCaptureStillImageCount
  @abstract
     Specifies the maximum number of still images that may be taken in a single bracket.
-
+ 
  @discussion
     AVCaptureStillImageOutput can only satisfy a limited number of image requests in a single bracket without exhausting system resources. The maximum number of still images that may be taken in a single bracket depends on the size of the images being captured, and consequently may vary with AVCaptureSession -sessionPreset and AVCaptureDevice -activeFormat. Some formats do not support bracketed capture and return a maxBracketedCaptureStillImageCount of 0. This read-only property is key-value observable. If you exceed -maxBracketedCaptureStillImageCount, then -captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler: fails and the completionHandler is called [settings count] times with a NULL sample buffer and AVErrorMaximumStillImageCaptureRequestsExceeded.
  */
@@ -247,18 +304,15 @@
  
  @param connection
     The connection through which the still image bracket should be captured.
- 
  @param settings
     An array of AVCaptureBracketedStillImageSettings objects. All must be of the same kind of AVCaptureBracketedStillImageSettings subclass, or an NSInvalidArgumentException is thrown.
- 
- @param completionHandler
+ @param handler
     A user provided block that will be called asynchronously once resources have successfully been allocated for the specified bracketed capture operation. If sufficient resources could not be allocated, the "prepared" parameter contains NO, and "error" parameter contains a non-nil error value. If [settings count] exceeds -maxBracketedCaptureStillImageCount, then AVErrorMaximumStillImageCaptureRequestsExceeded is returned. You should not assume that the completion handler will be called on a specific thread.
  
  @discussion
     -maxBracketedCaptureStillImageCount tells you the maximum number of images that may be taken in a single bracket given the current AVCaptureDevice/AVCaptureSession/AVCaptureStillImageOutput configuration. But before taking a still image bracket, additional resources may need to be allocated. By calling -prepareToCaptureStillImageBracketFromConnection:withSettingsArray:completionHandler: first, you are able to deterministically know when the receiver is ready to capture the bracket with the specified settings array.
-
  */
-- (void)prepareToCaptureStillImageBracketFromConnection:(AVCaptureConnection *)connection withSettingsArray:(NSArray *)settings completionHandler:(void (^)(BOOL prepared, NSError *error))handler NS_DEPRECATED_IOS(8_0, 10_0, "Use AVCapturePhotoOutput setPreparedPhotoSettingsArray:completionHandler: instead");
+- (void)prepareToCaptureStillImageBracketFromConnection:(AVCaptureConnection *)connection withSettingsArray:(NSArray<__kindof AVCaptureBracketedStillImageSettings *> *)settings completionHandler:(void (^)(BOOL prepared, NSError * _Nullable error))handler NS_DEPRECATED_IOS(8_0, 10_0, "Use AVCapturePhotoOutput setPreparedPhotoSettingsArray:completionHandler: instead");
 
 /*!
  @method captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:
@@ -267,19 +321,16 @@
  
  @param connection
     The connection through which the still image bracket should be captured.
- 
  @param settings
     An array of AVCaptureBracketedStillImageSettings objects. All must be of the same kind of AVCaptureBracketedStillImageSettings subclass, or an NSInvalidArgumentException is thrown.
- 
- @param completionHandler
+ @param handler
     A user provided block that will be called asynchronously as each still image in the bracket is captured. If the capture request is successful, the "sampleBuffer" parameter contains a valid CMSampleBuffer, the "stillImageSettings" parameter contains the settings object corresponding to this still image, and a nil "error" parameter. If the bracketed capture fails, sample buffer is NULL and error is non-nil. If [settings count] exceeds -maxBracketedCaptureStillImageCount, then AVErrorMaximumStillImageCaptureRequestsExceeded is returned. You should not assume that the completion handler will be called on a specific thread.
  
  @discussion
     If you have not called -prepareToCaptureStillImageBracketFromConnection:withSettingsArray:completionHandler: for this still image bracket request, the bracket may not be taken immediately, as the receiver may internally need to prepare resources.
  */
-- (void)captureStillImageBracketAsynchronouslyFromConnection:(AVCaptureConnection *)connection withSettingsArray:(NSArray *)settings completionHandler:(void (^)(CMSampleBufferRef sampleBuffer, AVCaptureBracketedStillImageSettings *stillImageSettings, NSError *error))handler NS_DEPRECATED_IOS(8_0, 10_0, "Use AVCapturePhotoOutput capturePhotoWithSettings:delegate: instead");
+- (void)captureStillImageBracketAsynchronouslyFromConnection:(AVCaptureConnection *)connection withSettingsArray:(NSArray<__kindof AVCaptureBracketedStillImageSettings *> *)settings completionHandler:(void (^)(CMSampleBufferRef _Nullable sampleBuffer, AVCaptureBracketedStillImageSettings * _Nullable stillImageSettings, NSError * _Nullable error))handler NS_DEPRECATED_IOS(8_0, 10_0, "Use AVCapturePhotoOutput capturePhotoWithSettings:delegate: instead");
 
 @end
 
-#endif // TARGET_OS_IPHONE
-
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h	2016-09-23 21:02:18.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h	2017-05-24 00:28:27.000000000 -0400
@@ -1,15 +1,18 @@
 /*
     File:  AVCaptureVideoDataOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
-#import <AVFoundation/AVCaptureOutput.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVVideoSettings.h>
 #import <CoreMedia/CMSampleBuffer.h>
 
-#pragma mark - AVCaptureVideoDataOutput
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureVideoDataOutput
 
 @class AVCaptureVideoDataOutputInternal;
 @protocol AVCaptureVideoDataOutputSampleBufferDelegate;
@@ -18,7 +21,7 @@
  @class AVCaptureVideoDataOutput
  @abstract
     AVCaptureVideoDataOutput is a concrete subclass of AVCaptureOutput that can be used to process uncompressed or compressed frames from the video being captured.
-
+ 
  @discussion
     Instances of AVCaptureVideoDataOutput produce video frames suitable for processing using other media APIs. Applications can access the frames with the captureOutput:didOutputSampleBuffer:fromConnection: delegate method.
  */
@@ -26,107 +29,150 @@
 @interface AVCaptureVideoDataOutput : AVCaptureOutput 
 {
 @private
-	AVCaptureVideoDataOutputInternal *_internal;
+    AVCaptureVideoDataOutputInternal *_internal;
 }
 
+- (instancetype)init;
+
++ (instancetype)new;
+
 /*!
  @method setSampleBufferDelegate:queue:
  @abstract
     Sets the receiver's delegate that will accept captured buffers and dispatch queue on which the delegate will be called.
-
+ 
  @param sampleBufferDelegate
     An object conforming to the AVCaptureVideoDataOutputSampleBufferDelegate protocol that will receive sample buffers after they are captured.
  @param sampleBufferCallbackQueue
     A dispatch queue on which all sample buffer delegate methods will be called.
-
+ 
  @discussion
     When a new video sample buffer is captured it will be vended to the sample buffer delegate using the captureOutput:didOutputSampleBuffer:fromConnection: delegate method. All delegate methods will be called on the specified dispatch queue. If the queue is blocked when new frames are captured, those frames will be automatically dropped at a time determined by the value of the alwaysDiscardsLateVideoFrames property. This allows clients to process existing frames on the same queue without having to manage the potential memory usage increases that would otherwise occur when that processing is unable to keep up with the rate of incoming frames. If their frame processing is consistently unable to keep up with the rate of incoming frames, clients should consider using the minFrameDuration property, which will generally yield better performance characteristics and more consistent frame rates than frame dropping alone.
-
+ 
     Clients that need to minimize the chances of frames being dropped should specify a queue on which a sufficiently small amount of processing is being done outside of receiving sample buffers. However, if such clients migrate extra processing to another queue, they are responsible for ensuring that memory usage does not grow without bound from frames that have not been processed.
-
+ 
     A serial dispatch queue must be used to guarantee that video frames will be delivered in order. The sampleBufferCallbackQueue parameter may not be NULL, except when setting the sampleBufferDelegate to nil.
  */
-- (void)setSampleBufferDelegate:(id<AVCaptureVideoDataOutputSampleBufferDelegate>)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue;
+- (void)setSampleBufferDelegate:(nullable id<AVCaptureVideoDataOutputSampleBufferDelegate>)sampleBufferDelegate queue:(nullable dispatch_queue_t)sampleBufferCallbackQueue;
 
 /*!
  @property sampleBufferDelegate
  @abstract
     The receiver's delegate.
-
+ 
  @discussion
     The value of this property is an object conforming to the AVCaptureVideoDataOutputSampleBufferDelegate protocol that will receive sample buffers after they are captured. The delegate is set using the setSampleBufferDelegate:queue: method.
  */
-@property(nonatomic, readonly) id<AVCaptureVideoDataOutputSampleBufferDelegate> sampleBufferDelegate;
+@property(nonatomic, readonly, nullable) id<AVCaptureVideoDataOutputSampleBufferDelegate> sampleBufferDelegate;
 
 /*!
  @property sampleBufferCallbackQueue
  @abstract
     The dispatch queue on which all sample buffer delegate methods will be called.
-
+ 
  @discussion
     The value of this property is a dispatch_queue_t. The queue is set using the setSampleBufferDelegate:queue: method.
  */
-@property(nonatomic, readonly) dispatch_queue_t sampleBufferCallbackQueue;
+@property(nonatomic, readonly, nullable) dispatch_queue_t sampleBufferCallbackQueue;
 
 /*!
  @property videoSettings
  @abstract
     Specifies the settings used to decode or re-encode video before it is output by the receiver.
-
+ 
  @discussion
     See AVVideoSettings.h for more information on how to construct a video settings dictionary. To receive samples in their device native format, set this property to an empty dictionary (i.e. [NSDictionary dictionary]). To receive samples in a default uncompressed format, set this property to nil. Note that after this property is set to nil, subsequent querying of this property will yield a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
-
+ 
     On iOS, the only supported key is kCVPixelBufferPixelFormatTypeKey. Supported pixel formats are kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA.
  */
-@property(nonatomic, copy) NSDictionary *videoSettings;
+@property(nonatomic, copy, null_resettable) NSDictionary<NSString *, id> *videoSettings;
 
 /*!
  @method recommendedVideoSettingsForAssetWriterWithOutputFileType:
  @abstract
     Specifies the recommended settings for use with an AVAssetWriterInput.
+ 
+ @param outputFileType
+    Specifies the UTI of the file type to be written (see AVMediaFormat.h for a list of file format UTIs).
+ @result
+    A fully populated dictionary of keys and values that are compatible with AVAssetWriter.
+ 
+ @discussion
+    The value of this property is an NSDictionary containing values for compression settings keys defined in AVVideoSettings.h. This dictionary is suitable for use as the "outputSettings" parameter when creating an AVAssetWriterInput, such as,
+ 
+       [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings sourceFormatHint:hint];
+ 
+    The dictionary returned contains all necessary keys and values needed by AVAssetWriter (see AVAssetWriterInput.h, -initWithMediaType:outputSettings: for a more in depth discussion). For QuickTime movie and ISO file types, the recommended video settings will produce output comparable to that of AVCaptureMovieFileOutput.
+ 
+    Note that the dictionary of settings is dependent on the current configuration of the receiver's AVCaptureSession and its inputs. The settings dictionary may change if the session's configuration changes. As such, you should configure your session first, then query the recommended video settings. As of iOS 8.3, movies produced with these settings successfully import into the iOS camera roll and sync to and from like devices via iTunes.
+ */
+- (nullable NSDictionary<NSString *, id> *)recommendedVideoSettingsForAssetWriterWithOutputFileType:(AVFileType)outputFileType NS_AVAILABLE_IOS(7_0);
 
+/*!
+ @method availableVideoCodecTypesForAssetWriterWithOutputFileType:
+ @abstract
+    Specifies the available video codecs for use with AVAssetWriter and a given file type.
+ 
  @param outputFileType
     Specifies the UTI of the file type to be written (see AVMediaFormat.h for a list of file format UTIs).
+ @result
+    An array of video codecs; see AVVideoSettings.h for a full list.
+ 
+ @discussion
+    This method allows you to query the available video codecs that may be used when specifying an AVVideoCodecKey in -recommendedVideoSettingsForVideoCodecType:assetWriterOutputFileType:. When specifying an outputFileType of AVFileTypeQuickTimeMovie, video codecs are ordered identically to -[AVCaptureMovieFileOutput availableVideoCodecTypes].
+ */
+- (NSArray<AVVideoCodecType> *)availableVideoCodecTypesForAssetWriterWithOutputFileType:(AVFileType)outputFileType NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method recommendedVideoSettingsForVideoCodecType:assetWriterOutputFileType:
+ @abstract
+    Specifies the recommended settings for a particular video codec type, to be used with an AVAssetWriterInput.
  
- @return
+ @param videoCodecType
+    Specifies the desired AVVideoCodecKey to be used for compression (see AVVideoSettings.h).
+ @param outputFileType
+    Specifies the UTI of the file type to be written (see AVMediaFormat.h for a list of file format UTIs).
+ @result
     A fully populated dictionary of keys and values that are compatible with AVAssetWriter.
  
  @discussion
     The value of this property is an NSDictionary containing values for compression settings keys defined in AVVideoSettings.h. This dictionary is suitable for use as the "outputSettings" parameter when creating an AVAssetWriterInput, such as,
         
        [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings sourceFormatHint:hint];
-    
+ 
     The dictionary returned contains all necessary keys and values needed by AVAssetWriter (see AVAssetWriterInput.h, -initWithMediaType:outputSettings: for a more in depth discussion). For QuickTime movie and ISO file types, the recommended video settings will produce output comparable to that of AVCaptureMovieFileOutput.
-
+ 
+    The videoCodecType string provided must be present in the availableVideoCodecTypesForAssetWriterWithOutputFileType: array, or an NSInvalidArgumentException is thrown. 
+ 
     Note that the dictionary of settings is dependent on the current configuration of the receiver's AVCaptureSession and its inputs. The settings dictionary may change if the session's configuration changes. As such, you should configure your session first, then query the recommended video settings. As of iOS 8.3, movies produced with these settings successfully import into the iOS camera roll and sync to and from like devices via iTunes.
  */
-- (NSDictionary *)recommendedVideoSettingsForAssetWriterWithOutputFileType:(NSString *)outputFileType NS_AVAILABLE_IOS(7_0);
+- (nullable NSDictionary *)recommendedVideoSettingsForVideoCodecType:(AVVideoCodecType)videoCodecType assetWriterOutputFileType:(AVFileType)outputFileType NS_AVAILABLE_IOS(11_0);
 
 /*!
  @property availableVideoCVPixelFormatTypes
  @abstract
     Indicates the supported video pixel formats that can be specified in videoSettings.
-
+ 
  @discussion
     The value of this property is an NSArray of NSNumbers that can be used as values for the kCVPixelBufferPixelFormatTypeKey in the receiver's videoSettings property. The first format in the returned list is the most efficient output format.
  */
-@property(nonatomic, readonly) NSArray *availableVideoCVPixelFormatTypes NS_AVAILABLE(10_7, 5_0);
+@property(nonatomic, readonly) NSArray<NSNumber *> *availableVideoCVPixelFormatTypes NS_AVAILABLE(10_7, 5_0);
 
 /*!
  @property availableVideoCodecTypes
  @abstract
     Indicates the supported video codec formats that can be specified in videoSettings.
-
+ 
  @discussion
-    The value of this property is an NSArray of NSStrings that can be used as values for the AVVideoCodecKey in the receiver's videoSettings property.
+    The value of this property is an NSArray of AVVideoCodecTypes that can be used as values for the AVVideoCodecKey in the receiver's videoSettings property.
  */
-@property(nonatomic, readonly) NSArray *availableVideoCodecTypes NS_AVAILABLE(10_7, 5_0);
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availableVideoCodecTypes NS_AVAILABLE(10_7, 5_0);
 
 /*!
  @property minFrameDuration
  @abstract
     Specifies the minimum time interval between which the receiver should output consecutive video frames.
-
+ 
  @discussion
     The value of this property is a CMTime specifying the minimum duration of each video frame output by the receiver, placing a lower bound on the amount of time that should separate consecutive frames. This is equivalent to the inverse of the maximum frame rate. A value of kCMTimeZero or kCMTimeInvalid indicates an unlimited maximum frame rate. The default value is kCMTimeInvalid. As of iOS 5.0, minFrameDuration is deprecated. Use AVCaptureConnection's videoMinFrameDuration property instead.
  */
@@ -136,7 +182,7 @@
  @property alwaysDiscardsLateVideoFrames
  @abstract
     Specifies whether the receiver should always discard any video frame that is not processed before the next frame is captured.
-
+ 
  @discussion
     When the value of this property is YES, the receiver will immediately discard frames that are captured while the dispatch queue handling existing frames is blocked in the captureOutput:didOutputSampleBuffer:fromConnection: delegate method. When the value of this property is NO, delegates will be allowed more time to process old frames before new frames are discarded, but application memory usage may increase significantly as a result. The default value is YES.
  */
@@ -144,12 +190,13 @@
 
 @end
 
+
 /*!
  @protocol AVCaptureVideoDataOutputSampleBufferDelegate
  @abstract
     Defines an interface for delegates of AVCaptureVideoDataOutput to receive captured video sample buffers and be notified of late sample buffers that were dropped.
  */
-__TVOS_PROHIBITED
+NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @protocol AVCaptureVideoDataOutputSampleBufferDelegate <NSObject>
 
 @optional
@@ -158,38 +205,40 @@
  @method captureOutput:didOutputSampleBuffer:fromConnection:
  @abstract
     Called whenever an AVCaptureVideoDataOutput instance outputs a new video frame.
-
- @param captureOutput
+ 
+ @param output
     The AVCaptureVideoDataOutput instance that output the frame.
  @param sampleBuffer
     A CMSampleBuffer object containing the video frame data and additional information about the frame, such as its format and presentation time.
  @param connection
     The AVCaptureConnection from which the video was received.
-
+ 
  @discussion
     Delegates receive this message whenever the output captures and outputs a new video frame, decoding or re-encoding it as specified by its videoSettings property. Delegates can use the provided video frame in conjunction with other APIs for further processing. This method will be called on the dispatch queue specified by the output's sampleBufferCallbackQueue property. This method is called periodically, so it must be efficient to prevent capture performance problems, including dropped frames.
-
+ 
     Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and then CFRelease it when they are finished with it.
-
+ 
     Note that to maintain optimal performance, some sample buffers directly reference pools of memory that may need to be reused by the device system and other capture inputs. This is frequently the case for uncompressed device native capture where memory blocks are copied as little as possible. If multiple sample buffers reference such pools of memory for too long, inputs will no longer be able to copy new samples into memory and those samples will be dropped. If your application is causing samples to be dropped by retaining the provided CMSampleBuffer objects for too long, but it needs access to the sample data for a long period of time, consider copying the data into a new buffer and then calling CFRelease on the sample buffer if it was previously retained so that the memory it references can be reused.
  */
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
+- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
 
 /*!
  @method captureOutput:didDropSampleBuffer:fromConnection:
  @abstract
     Called once for each frame that is discarded.
-
- @param captureOutput
+ 
+ @param output
     The AVCaptureVideoDataOutput instance that dropped the frame.
  @param sampleBuffer
     A CMSampleBuffer object containing information about the dropped frame, such as its format and presentation time. This sample buffer will contain none of the original video data.
  @param connection
     The AVCaptureConnection from which the dropped video frame was received.
-
+ 
  @discussion
     Delegates receive this message whenever a video frame is dropped. This method is called once for each dropped frame. The CMSampleBuffer object passed to this delegate method will contain metadata about the dropped video frame, such as its duration and presentation time stamp, but will contain no actual video data. On iOS, Included in the sample buffer attachments is the kCMSampleBufferAttachmentKey_DroppedFrameReason, which indicates why the frame was dropped. This method will be called on the dispatch queue specified by the output's sampleBufferCallbackQueue property. Because this method will be called on the same dispatch queue that is responsible for outputting video frames, it must be efficient to prevent further capture performance problems, such as additional dropped video frames.
-  */
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 6_0);
+ */
+- (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 6_0);
 
 @end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h	2016-08-05 01:30:13.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h	2017-05-23 21:01:46.000000000 -0400
@@ -1,9 +1,9 @@
 /*
     File:  AVCaptureVideoPreviewLayer.h
-
-	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
@@ -11,6 +11,10 @@
 #import <AVFoundation/AVCaptureSession.h>
 #import <AVFoundation/AVAnimation.h>
 
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureVideoPreviewLayer
+
 @class AVMetadataObject;
 @class AVCaptureVideoPreviewLayerInternal;
 
@@ -18,22 +22,22 @@
  @class AVCaptureVideoPreviewLayer
  @abstract
     A CoreAnimation layer subclass for previewing the visual output of an AVCaptureSession.
-
- @discussion		
+ 
+ @discussion
     An AVCaptureVideoPreviewLayer instance is a subclass of CALayer and is therefore suitable for insertion in a layer hierarchy as part of a graphical interface. One creates an AVCaptureVideoPreviewLayer instance with the capture session to be previewed, using +layerWithSession: or -initWithSession:. Using the @"videoGravity" property, one can influence how content is viewed relative to the layer bounds. On some hardware configurations, the orientation of the layer can be manipulated using @"orientation" and @"mirrored".
  */
 NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @interface AVCaptureVideoPreviewLayer : CALayer
 {
 @private
-	AVCaptureVideoPreviewLayerInternal *_internal;
+    AVCaptureVideoPreviewLayerInternal *_internal;
 }
 
 /*!
  @method layerWithSession:
  @abstract
     Creates an AVCaptureVideoPreviewLayer for previewing the visual output of the specified AVCaptureSession.
-
+ 
  @param session
     The AVCaptureSession instance to be previewed.
  @result
@@ -45,7 +49,7 @@
  @method initWithSession:
  @abstract
     Creates an AVCaptureVideoPreviewLayer for previewing the visual output of the specified AVCaptureSession.
-
+ 
  @param session
     The AVCaptureSession instance to be previewed.
  @result
@@ -81,11 +85,11 @@
  @property session
  @abstract
     The AVCaptureSession instance being previewed by the receiver.
-
+ 
  @discussion
     The session is retained by the preview layer.
  */
-@property (nonatomic, retain) AVCaptureSession *session;
+@property(nonatomic, retain, nullable) AVCaptureSession *session;
 
 /*!
  method setSessionWithNoConnection:
@@ -105,28 +109,28 @@
  @discussion
     When calling initWithSession: or setSession: with a valid AVCaptureSession instance, a connection is formed to the first eligible video AVCaptureInput. If the receiver is detached from a session, the connection property becomes nil.
  */
-@property (nonatomic, readonly) AVCaptureConnection *connection NS_AVAILABLE(10_7, 6_0);
+@property(nonatomic, readonly, nullable) AVCaptureConnection *connection NS_AVAILABLE(10_7, 6_0);
 
 /*!
  @property videoGravity
  @abstract
     A string defining how the video is displayed within an AVCaptureVideoPreviewLayer bounds rect.
-
+ 
  @discussion
     Options are AVLayerVideoGravityResize, AVLayerVideoGravityResizeAspect and AVLayerVideoGravityResizeAspectFill. AVLayerVideoGravityResizeAspect is default. See <AVFoundation/AVAnimation.h> for a description of these options.
  */
-@property (copy) NSString *videoGravity;
+@property(copy) AVLayerVideoGravity videoGravity;
 
 /*!
  @method captureDevicePointOfInterestForPoint:
  @abstract
     Converts a point in layer coordinates to a point of interest in the coordinate space of the capture device providing input to the layer.
-
+ 
  @param pointInLayer
     A CGPoint in layer coordinates.
  @result
     A CGPoint in the coordinate space of the capture device providing input to the layer.
-
+ 
  @discussion
     AVCaptureDevice pointOfInterest is expressed as a CGPoint where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a point in the coordinate space of the receiver to a point of interest in the coordinate space of the AVCaptureDevice providing input to the receiver. The conversion takes frameSize and videoGravity into consideration.
  */
@@ -136,12 +140,12 @@
  @method pointForCaptureDevicePointOfInterest:
  @abstract
     Converts a point of interest in the coordinate space of the capture device providing input to the layer to a point in layer coordinates.
-
+ 
  @param captureDevicePointOfInterest
     A CGPoint in the coordinate space of the capture device providing input to the layer.
  @result
     A CGPoint in layer coordinates.
-
+ 
  @discussion
     AVCaptureDevice pointOfInterest is expressed as a CGPoint where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a point in the coordinate space of the AVCaptureDevice providing input to the coordinate space of the receiver. The conversion takes frame size and videoGravity into consideration.
  */
@@ -159,7 +163,7 @@
  
  @discussion
     AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of the receiver to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the receiver. The conversion takes frame size and videoGravity into consideration.
-  */
+ */
 - (CGRect)metadataOutputRectOfInterestForRect:(CGRect)rectInLayerCoordinates NS_AVAILABLE_IOS(7_0);
 
 /*!
@@ -174,23 +178,23 @@
  
  @discussion
     AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the coordinate space of the receiver. The conversion takes frame size and videoGravity into consideration.
-  */
+ */
 - (CGRect)rectForMetadataOutputRectOfInterest:(CGRect)rectInMetadataOutputCoordinates NS_AVAILABLE_IOS(7_0);
 
 /*!
  @method transformedMetadataObjectForMetadataObject:
  @abstract
     Converts an AVMetadataObject's visual properties to layer coordinates.
-
+ 
  @param metadataObject
     An AVMetadataObject originating from the same AVCaptureInput as the preview layer.
  @result
     An AVMetadataObject whose properties are in layer coordinates.
-
+ 
  @discussion
     AVMetadataObject bounds may be expressed as a rect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. Face metadata objects likewise express yaw and roll angles with respect to an unrotated picture. -transformedMetadataObjectForMetadataObject: converts the visual properties in the coordinate space of the supplied AVMetadataObject to the coordinate space of the receiver. The conversion takes orientation, mirroring, layer bounds and videoGravity into consideration. If the provided metadata object originates from an input source other than the preview layer's, nil will be returned.
  */
-- (AVMetadataObject *)transformedMetadataObjectForMetadataObject:(AVMetadataObject *)metadataObject NS_AVAILABLE_IOS(6_0);
+- (nullable AVMetadataObject *)transformedMetadataObjectForMetadataObject:(AVMetadataObject *)metadataObject NS_AVAILABLE_IOS(6_0);
 
 #if TARGET_OS_IPHONE
 
@@ -198,52 +202,54 @@
  @property orientationSupported
  @abstract
     Specifies whether or not the preview layer supports orientation.
-
+ 
  @discussion
     Changes in orientation are not supported on all hardware configurations. An application should check the value of @"orientationSupported" before attempting to manipulate the orientation of the receiver. This property is deprecated. Use AVCaptureConnection's -isVideoOrientationSupported instead.
  */
-@property (nonatomic, readonly, getter=isOrientationSupported) BOOL orientationSupported NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's isVideoOrientationSupported instead.");
+@property(nonatomic, readonly, getter=isOrientationSupported) BOOL orientationSupported NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's isVideoOrientationSupported instead.");
 
 /*!
  @property orientation
  @abstract
     Specifies the orientation of the preview layer.
-
+ 
  @discussion
     AVCaptureVideoOrientation and its constants are defined in AVCaptureSession.h. The value of @"orientationSupported" must be YES in order to set @"orientation". An exception will be raised if this requirement is ignored. This property is deprecated. Use AVCaptureConnection's -videoOrientation instead.
  */
-@property (nonatomic) AVCaptureVideoOrientation orientation NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's videoOrientation instead.");
+@property(nonatomic) AVCaptureVideoOrientation orientation NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's videoOrientation instead.");
 
 /*!
  @property mirroringSupported
  @abstract
     Specifies whether or not the preview layer supports mirroring.
-
+ 
  @discussion
     Mirroring is not supported on all hardware configurations. An application should check the value of @"mirroringSupported" before attempting to manipulate mirroring on the receiver. This property is deprecated. Use AVCaptureConnection's -isVideoMirroringSupported instead.
  */
-@property (nonatomic, readonly, getter=isMirroringSupported) BOOL mirroringSupported NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's isVideoMirroringSupported instead.");
+@property(nonatomic, readonly, getter=isMirroringSupported) BOOL mirroringSupported NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's isVideoMirroringSupported instead.");
 
 /*!
  @property automaticallyAdjustsMirroring
  @abstract
     Specifies whether or not the value of @"mirrored" can change based on configuration of the session.
-	
- @discussion		
+ 
+ @discussion
     For some session configurations, preview will be mirrored by default. When the value of this property is YES, the value of @"mirrored" may change depending on the configuration of the session, for example after switching to a different AVCaptureDeviceInput. The default value is YES. This property is deprecated. Use AVCaptureConnection's -automaticallyAdjustsVideoMirroring instead.
  */
-@property (nonatomic) BOOL automaticallyAdjustsMirroring NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's automaticallyAdjustsVideoMirroring instead.");
+@property(nonatomic) BOOL automaticallyAdjustsMirroring NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's automaticallyAdjustsVideoMirroring instead.");
 
 /*!
  @property mirrored
  @abstract
     Specifies whether or not the preview is flipped over a vertical axis.
-	
- @discussion		
+ 
+ @discussion
     For most applications, it is unnecessary to manipulate preview mirroring manually if @"automaticallyAdjustsMirroring" is set to YES. The value of @"automaticallyAdjustsMirroring" must be NO in order to set @"mirrored". The value of @"mirroringSupported" must be YES in order to set @"mirrored". An exception will be raised if the value of @"mirrored" is mutated without respecting these requirements. This property is deprecated. Use AVCaptureConnection's -videoMirrored instead.
  */
-@property (nonatomic, getter=isMirrored) BOOL mirrored NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's videoMirrored instead.");
+@property(nonatomic, getter=isMirrored) BOOL mirrored NS_DEPRECATED_IOS(4_0, 6_0, "Use AVCaptureConnection's videoMirrored instead.");
 
 #endif // TARGET_OS_IPHONE
 
 @end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h	2016-08-05 01:30:13.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h	2017-05-23 21:01:46.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
@@ -107,7 +107,7 @@
     @discussion
       Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVCompositionTrack *> *)tracksWithMediaType:(NSString *)mediaType;
+- (NSArray<AVCompositionTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
 
 /*!
     @method         tracksWithMediaCharacteristic:
@@ -118,7 +118,7 @@
     @discussion
       Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVCompositionTrack *> *)tracksWithMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (NSArray<AVCompositionTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 @end
 
@@ -129,7 +129,7 @@
 @interface AVMutableComposition : AVComposition
 {
 @private
-    AVMutableCompositionInternal    *_mutablePriv;
+    AVMutableCompositionInternal    *_mutablePriv __attribute__((unused));
 }
 
 /*!
@@ -241,7 +241,7 @@
     @discussion
       If the specified preferred track ID is not available, or kCMPersistentTrackID_Invalid was passed in, a unique track ID will be generated.
 */
-- (AVMutableCompositionTrack *)addMutableTrackWithMediaType:(NSString *)mediaType preferredTrackID:(CMPersistentTrackID)preferredTrackID;
+- (nullable AVMutableCompositionTrack *)addMutableTrackWithMediaType:(AVMediaType)mediaType preferredTrackID:(CMPersistentTrackID)preferredTrackID;
 
 /*!
     @method         removeTrack:
@@ -292,7 +292,7 @@
     @discussion
       Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVMutableCompositionTrack *> *)tracksWithMediaType:(NSString *)mediaType;
+- (NSArray<AVMutableCompositionTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
 
 /*!
     @method         tracksWithMediaCharacteristic:
@@ -303,7 +303,7 @@
     @discussion
       Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVMutableCompositionTrack *> *)tracksWithMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (NSArray<AVMutableCompositionTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 @end
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h	2016-09-23 21:02:18.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h	2017-05-23 21:01:46.000000000 -0400
@@ -42,6 +42,16 @@
 */
 @property (nonatomic, readonly, copy) NSArray<AVCompositionTrackSegment *> *segments;
 
+/*!
+	@method			segmentForTrackTime:
+	@abstract		Supplies the AVCompositionTrackSegment from the segments array with a target timeRange that either contains the specified track time or is the closest to it among the target timeRanges of the track's segments.
+	@param			trackTime
+					The trackTime for which an AVCompositionTrackSegment is requested.
+	@result			An AVCompositionTrackSegment.
+	@discussion		If the trackTime does not map to a sample presentation time (e.g. it's outside the track's timeRange), the segment closest in time to the specified trackTime is returned. 
+ */
+- (nullable AVCompositionTrackSegment *)segmentForTrackTime:(CMTime)trackTime;
+
 @end
 
 
@@ -57,7 +67,7 @@
 @interface AVMutableCompositionTrack : AVCompositionTrack
 {
 @private
-    AVMutableCompositionTrackInternal    *_mutablePriv;
+    AVMutableCompositionTrackInternal    *_mutablePriv __attribute__((unused));
 }
 
 /*!
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h	2017-02-20 23:29:10.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h	2017-05-24 00:28:08.000000000 -0400
@@ -1,7 +1,7 @@
 /*
     File: AVContentKeySession.h
 
-    Copyright (c) 2015-2016 Apple Inc. All rights reserved.
+    Copyright (c) 2015-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
@@ -32,31 +32,47 @@
  @group         AVContentKeySystem string constants
  @brief         Used by AVContentKeySession to determine the method of key delivery
  */
-typedef NSString *AVContentKeySystem NS_STRING_ENUM API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+typedef NSString *AVContentKeySystem NS_STRING_ENUM API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
 /*!
  @constant      AVContentKeySystemFairPlayStreaming
  @discussion    Used to specify FairPlay Streaming (FPS) as the method of key delivery.
  */
-AVF_EXPORT AVContentKeySystem const AVContentKeySystemFairPlayStreaming API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeySystem const AVContentKeySystemFairPlayStreaming API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+/*!
+ @constant      AVContentKeySystemClearKey
+ @discussion    Used to specify clear key as the method of key delivery.
+ */
+AVF_EXPORT AVContentKeySystem const AVContentKeySystemClearKey API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
+
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
 @interface AVContentKeySession : NSObject {
 @private
     AVContentKeySessionInternal *_session;
 }
 
 /*!
+ @method        contentKeySessionWithKeySystem:
+ @abstract      Creates a new instance of AVContentKeySession to manage a collection of media content keys.
+ @param         keySystem
+                A valid key system for retrieving keys.
+ @result        A new AVContentKeySession.
+ @discussion    This method returns an AVContentKeySession instance that is capable of managing collection of media content keys corresponding to the input keySystem. An NSInvalidArgumentException will be raised if the value of keySystem is unsupported.
+ */
++ (instancetype)contentKeySessionWithKeySystem:(AVContentKeySystem)keySystem API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
+
+/*!
  @method        contentKeySessionWithKeySystem:storageDirectoryAtURL:
  @abstract      Creates a new instance of AVContentKeySession to manage a collection of media content keys.
  @param         keySystem
                 A valid key system for retrieving keys.
  @param         storageURL
-                Optional URL to a writable directory that the session will use to facilitate expired session reports after abnormal session termination. Pass nil if you do not require expired session reports.
+                URL to a writable directory that the session will use to facilitate expired session reports after abnormal session termination.
  @result        A new AVContentKeySession.
  @discussion    This method returns an AVContentKeySession instance that is capable of managing collection of media content keys corresponding to the input keySystem. An NSInvalidArgumentException will be raised if the value of keySystem is unsupported.
  */
-+ (instancetype)contentKeySessionWithKeySystem:(AVContentKeySystem)keySystem storageDirectoryAtURL:(nullable NSURL *)storageURL;
++ (instancetype)contentKeySessionWithKeySystem:(AVContentKeySystem)keySystem storageDirectoryAtURL:(NSURL *)storageURL;
 
 /*!
  @method        setDelegate:queue:
@@ -129,6 +145,17 @@
 */
 - (void)renewExpiringResponseDataForContentKeyRequest:(AVContentKeyRequest *)contentKeyRequest;
 
+/*!
+ @method        makeSecureTokenForExpirationDateOfPersistableContentKey:
+ @abstract      Creates a secure server playback context (SPC) that the client could send to the key server to obtain an expiration date for the provided persistable content key data.
+ @param         persistableContentKeyData
+                Persistable content key data that was previously created using -[AVContentKeyRequest persistableContentKeyFromKeyVendorResponse:options:error:] or obtained via AVContentKeySessionDelegate callback -contentKeySession:didUpdatePersistableContentKey:forContentKeyIdentifier:.
+ @param         handler
+                Once the secure token is ready, this block will be called with the token or an error describing the failure.
+ */
+- (void)makeSecureTokenForExpirationDateOfPersistableContentKey:(NSData *)persistableContentKeyData
+											  completionHandler:(void (^)(NSData * _Nullable secureTokenData, NSError * _Nullable error))handler API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macosx, tvos, watchos);
+
 @end
 
 @interface AVContentKeySession (AVContentKeyRecipients)
@@ -188,27 +215,27 @@
  @group         AVContentKeyRequestRetryReason string constants
  @brief         Used to specify a reason for asking the client to retry a content key request.
  */
-typedef NSString *AVContentKeyRequestRetryReason NS_STRING_ENUM API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+typedef NSString *AVContentKeyRequestRetryReason NS_STRING_ENUM API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
 /*!
  @constant      AVContentKeyRequestRetryReasonTimedOut
  @discussion    Indicates that the content key request should be retried because the key response was not set soon enough either due the initial request/response was taking too long, or a lease was expiring in the meantime.
  */
-AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonTimedOut API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonTimedOut API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
 /*!
  @constant      AVContentKeyRequestRetryReasonReceivedResponseWithExpiredLease
  @discussion    Indicates that the content key request should be retried because a key response with expired lease was set on the previous content key request.
  */
-AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedResponseWithExpiredLease API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedResponseWithExpiredLease API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
 /*!
  @constant      AVContentKeyRequestRetryReasonReceivedObsoleteContentKey
  @discussion    Indicates that the content key request should be retried because an obsolete key response was set on the previous content key request.
  */
-AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedObsoleteContentKey API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedObsoleteContentKey API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
 @protocol AVContentKeySessionDelegate <NSObject>
 
 /*!
@@ -248,6 +275,20 @@
 - (void)contentKeySession:(AVContentKeySession *)session didProvidePersistableContentKeyRequest:(AVPersistableContentKeyRequest *)keyRequest;
 
 /*!
+ @method        contentKeySession:didUpdatePersistableContentKey:forContentKeyIdentifier:
+ @abstract      Provides the receiver with an updated persistable content key for a particular key request.
+ @param         session
+                An instance of AVContentKeySession that is providing the updated persistable content key.
+ @param         persistableContentKey
+                Updated persistable content key data that may be stored offline and used to answer future requests to content keys with matching key identifier.
+ @param         keyIdentifier
+                Container- and protocol-specific identifier for the persistable content key that was updated.
+ @discussion    If the content key session provides an updated persistable content key data, the previous key data is no longer valid and cannot be used to answer future loading requests.
+ */
+@optional
+- (void)contentKeySession:(AVContentKeySession *)session didUpdatePersistableContentKey:(NSData *)persistableContentKey forContentKeyIdentifier:(id)keyIdentifier API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+
+/*!
  @method        contentKeySession:contentKeyRequest:didFailWithError:
  @abstract      Informs the receiver a content key request has failed.
  @param         session
@@ -306,11 +347,11 @@
 	AVContentKeyRequestStatusRetried,
     AVContentKeyRequestStatusCancelled,
     AVContentKeyRequestStatusFailed
-} API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+} API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
 @class AVContentKeyRequestInternal;
 
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
 @interface AVContentKeyRequest : NSObject
 {
 @private
@@ -356,7 +397,7 @@
  @param         appIdentifier
                 An opaque identifier for the application. The value of this identifier depends on the particular system used to provide the content key.
  @param         contentIdentifier
-                An opaque identifier for the content. The value of this identifier depends on the particular system used to provide the content key. May be nil.
+                An opaque identifier for the content. The value of this identifier depends on the particular system used to provide the content key.
  @param         options
                 Additional information necessary to obtain the key, or nil if none. See AVContentKeyRequest*Key below.
  @param         handler
@@ -364,7 +405,7 @@
  @discussion    If option AVContentKeyRequestProtocolVersionsKey is not specified the default protocol version of 1 is assumed.
 */
 - (void)makeStreamingContentKeyRequestDataForApp:(NSData *)appIdentifier
-                               contentIdentifier:(nullable NSData *)contentIdentifier
+                               contentIdentifier:(NSData *)contentIdentifier
                                          options:(nullable NSDictionary<NSString *, id> *)options
                                completionHandler:(void (^)(NSData * _Nullable contentKeyRequestData, NSError * _Nullable error))handler;
 
@@ -388,13 +429,13 @@
 /*!
  @method        respondByRequestingPersistableContentKeyRequest
  @abstract      Informs the receiver to process a persistable content key request.
- @discussion    When you receive an AVContentKeyRequest via -contentKeySession:didProvideContentKeyRequest: and you want the resulting key response to produce a key that can persist across multiple playback sessions, you must invoke -respondByRequestingPersistableContentKeyRequest on that AVContentKeyRequest in order to signal that you want to process an AVPersistableContentKeyRequest instead. If the underlying protocol supports persistable content keys, in response your delegate will receive an AVPersistableContentKeyRequest via -contentKeySession:didProvidePersistableContentKeyRequest:.
+ @discussion    When you receive an AVContentKeyRequest via -contentKeySession:didProvideContentKeyRequest: and you want the resulting key response to produce a key that can persist across multiple playback sessions, you must invoke -respondByRequestingPersistableContentKeyRequest on that AVContentKeyRequest in order to signal that you want to process an AVPersistableContentKeyRequest instead. If the underlying protocol supports persistable content keys, in response your delegate will receive an AVPersistableContentKeyRequest via -contentKeySession:didProvidePersistableContentKeyRequest:. NSInternalInconsistencyException will be raised, if you are attempting to create and use a persistable key but your AVContentKeySession delegate does not respond to contentKeySession:didProvidePersistableContentKeyRequest:.
  */
-- (void)respondByRequestingPersistableContentKeyRequest;
+- (void)respondByRequestingPersistableContentKeyRequest API_AVAILABLE(ios(10.3)) API_UNAVAILABLE(macos, tvos, watchos);
 
 @end
 
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(ios(10.3)) API_UNAVAILABLE(macos, tvos, watchos)
 @interface AVPersistableContentKeyRequest : AVContentKeyRequest
 
 /*!
@@ -409,9 +450,9 @@
  @result        The persistable content key data that may be stored offline to answer future loading requests of the same content key.
  @discussion    The data returned from this method may be used to immediately satisfy an AVPersistableContentKeyRequest, as well as any subsequent requests for the same key url using processContentKeyResponse: method. When you receive an AVContentKeyRequest via -contentKeySession:didProvideContentKeyRequest: and you want to use existing persistent content key from storage, you must invoke -respondByRequestingPersistableContentKeyRequest on that AVContentKeyRequest in order to signal that you want to process an AVPersistableContentKeyRequest instead. If the underlying protocol supports persistable content keys, in response your delegate will receive an AVPersistableContentKeyRequest via -contentKeySession:didProvidePersistableContentKeyRequest:. You can set the persistent key from storage on the AVPersistableContentKeyRequest using processContentKeyResponse:.
  */
-- (NSData *)persistableContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse
-                                              options:(nullable NSDictionary <NSString *, id> *)options
-                                                error:(NSError **)outError;
+- (nullable NSData *)persistableContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse
+                                                        options:(nullable NSDictionary <NSString *, id> *)options
+                                                          error:(NSError * _Nullable * _Nullable)outError;
 
 @end
 
@@ -431,11 +472,11 @@
  @class         AVContentKeyResponse
  @abstract      AVContentKeyResponse is used to represent the data returned from the key server when requesting a key for decrypting content.
 */
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
 @interface AVContentKeyResponse : NSObject
 {
 @private
-    AVContentKeyResponseInternal *_keyResponse;
+    AVContentKeyResponseInternal * _keyResponse;
 }
 
 /*!
@@ -448,14 +489,27 @@
 */
 + (instancetype)contentKeyResponseWithFairPlayStreamingKeyResponseData:(NSData *)keyResponseData;
 
+/*!
+ @method		contentKeyResponseWithClearKeyData:initializationVector:
+ @abstract		Create an AVContentKeyResponse from the key and IV when using AVContentKeySystemClearKey as the key system
+ 
+ @param			keyData
+				The key used for decrypting content.
+ @param			initializationVector
+				The initialization vector used for decrypting content, or nil if initialization vector is available in the media to be decrypted
+ @result		A new AVContentKeyResponse holding Clear Key data.
+ @discussion	The object created by this method is typically used with an AVContentKeyRequest created by an AVContentKeySession using keySystem AVContentKeySystemClearKey. It is passed to AVContentKeyRequest -processContentKeyResponse: in order to supply the decryptor with key data.
+*/
++ (instancetype)contentKeyResponseWithClearKeyData:(NSData *)keyData initializationVector:(nullable NSData *)initializationVector API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
+
 @end
 
-// Options keys for use with -[AVContentKeySession makeStreamingContentKeyRequestDataForApp:contentIdentifier:options:completionHandler:]
+// Options keys for use with -[AVContentKeyRequest makeStreamingContentKeyRequestDataForApp:contentIdentifier:options:completionHandler:]
 /*!
  @constant      AVContentKeyRequestProtocolVersionsKey
  @abstract      Specifies the versions of the content protection protocol supported by the application as an NSArray of one or more NSNumber objects.
  */
-AVF_EXPORT NSString *const AVContentKeyRequestProtocolVersionsKey API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT NSString *const AVContentKeyRequestProtocolVersionsKey API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
 
 /*!
   @protocol      AVContentKeyRecipient
@@ -463,7 +517,7 @@
   @abstract
     Classes of objects that may require decryption keys for media data in order to enable processing, such as parsing or playback, conform to this protocol.
 */
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2), watchos(3.3))
 @protocol AVContentKeyRecipient
 
 @required
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h	2017-05-24 00:37:44.000000000 -0400
@@ -0,0 +1,206 @@
+/*
+    File:  AVDepthData.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVCameraCalibrationData.h>
+#import <Foundation/Foundation.h>
+#import <CoreVideo/CVPixelBufferPool.h>
+#import <ImageIO/CGImageProperties.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*
+ @enum AVDepthDataAccuracy
+ @abstract
+    Constants indicating the accuracy of the units expressed by depth data map values.
+
+ @constant AVDepthDataAccuracyRelative
+    Values within the depth data map are usable for foreground / background separation, but are not absolutely accurate in the physical world.
+ @constant AVDepthDataAccuracyAbsolute
+    Values within the depth map are absolutely accurate within the physical world.
+ 
+ @discussion
+    The accuracy of a depth data map is highly dependent on the camera calibration data used to generate it. If the camera's focal length cannot be precisely determined at the time of capture, scaling error in the z (depth) plane will be introduced. If the camera's optical center can't be precisely determined at capture time, principal point error will be introduced, leading to an offset error in the disparity estimate. AVDepthDataAccuracy constants report the accuracy of a map's values with respect to its reported units. If the accuracy is reported to be AVDepthDataAccuracyRelative, the values within the map are usable relative to one another (that is, larger depth values are farther away than smaller depth values), but do not accurately convey real world distance. Disparity maps with relative accuracy may still be used to reliably determine the difference in disparity between two points in the same map.
+*/
+typedef NS_ENUM(NSInteger, AVDepthDataAccuracy) {
+    AVDepthDataAccuracyRelative    = 0,
+    AVDepthDataAccuracyAbsolute    = 1,
+} NS_AVAILABLE(10_13, 11_0) __TVOS_AVAILABLE(11_0) __WATCHOS_PROHIBITED;
+
+@class AVDepthDataInternal;
+
+/*!
+ @class AVDepthData
+ @abstract
+    An object wrapping a map of disparity or depth pixel data, plus metadata.
+ 
+ @discussion
+    "Depth Data" is a generic term for a map of pixel data containing depth-related information. AVDepthData wraps a disparity or depth map and provides conversion methods, focus information, and camera calibration data to aid in using the map for rendering or computer vision tasks. CoreVideo supports the following four depth data pixel formats:
+          kCVPixelFormatType_DisparityFloat16    = 'hdis'
+          kCVPixelFormatType_DisparityFloat32    = 'fdis'
+          kCVPixelFormatType_DepthFloat16        = 'hdep'
+          kCVPixelFormatType_DepthFloat32        = 'fdep'
+ 
+    The disparity formats describe normalized shift values when comparing two images. Units are 1/meters: ( pixelShift / (pixelFocalLength * baselineInMeters) ). 
+    The depth formats describe the distance to an object in meters.
+ 
+    Disparity / depth maps are generated from camera images containing non-rectilinear data. Camera lenses have small imperfections that cause small distortions in their resultant images compared to a pinhole camera. AVDepthData maps contain non-rectilinear (non-distortion-corrected) data as well. Their values are warped to match the lens distortion characteristics present in their accompanying YUV image. Therefore an AVDepthData map can be used as a proxy for depth when rendering effects to its accompanying image, but not to correlate points in 3D space. In order to use AVDepthData for computer vision tasks, you should use its accompanying camera calibration data to rectify the depth data (see AVCameraCalibrationData).
+ 
+    When capturing depth data from a camera using AVCaptureDepthDataOutput, AVDepthData objects are delivered to your AVCaptureDepthDataOutputDelegate in a streaming fashion. When capturing depth data along with photos using AVCapturePhotoOutput, depth data is delivered to your AVCapturePhotoCaptureDelegate as a property of an AVCapturePhoto (see -[AVCapturePhotoCaptureDelegate captureOutput:didFinishProcessingPhoto:error:]). When working with image files containing depth information, AVDepthData may be instantiated using information obtained from ImageIO. When editing images containing depth information, derivative AVDepthData objects may be instantiated reflecting the edits that have been performed.
+ */
+NS_CLASS_AVAILABLE(10_13, 11_0) __TVOS_AVAILABLE(11_0) __WATCHOS_PROHIBITED
+@interface AVDepthData : NSObject
+{
+@private
+    AVDepthDataInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method depthDataFromDictionaryRepresentation:error:
+ @abstract
+    Returns an AVDepthData instance from depth information in an image file.
+ 
+ @param imageSourceAuxDataInfoDictionary
+    A dictionary of primitive depth-related information obtained from CGImageSourceCopyAuxiliaryDataInfoAtIndex.
+ @param outError
+    On return, if the depth data cannot be created, points to an NSError describing the problem.
+ @result
+    An AVDepthData instance, or nil if the auxiliary data info dictionary was malformed.
+ 
+ @discussion
+    When using ImageIO framework's CGImageSource API to read from a HEIF, JPEG, or DNG file containing depth data, AVDepthData can be instantiated using the result of CGImageSourceCopyAuxiliaryDataInfoAtIndex, which returns a CFDictionary of primitive map information.
+ */
++ (nullable instancetype)depthDataFromDictionaryRepresentation:(NSDictionary *)imageSourceAuxDataInfoDictionary error:(NSError * _Nullable * _Nullable)outError;
+
+/*!
+ @method depthDataByConvertingToDepthDataType:
+ @abstract
+    Returns a converted, derivative AVDepthData instance in the specified depthDataType.
+ 
+ @param depthDataType
+    The OSType of depthData object to which you'd like to convert. Must be present in availableDepthDataTypes.
+ @result
+    An AVDepthData instance.
+ 
+ @discussion
+    This method throws an NSInvalidArgumentException if you pass an unrecognized depthDataType. See
+ */
+- (instancetype)depthDataByConvertingToDepthDataType:(OSType)depthDataType;
+
+/*!
+ @method depthDataByApplyingExifOrientation:
+ @abstract
+    Returns a derivative AVDepthData instance in which the specified Exif orientation has been applied.
+ 
+ @param exifOrientation
+    One of the 8 standard Exif orientation tags expressing how the depth data should be rotated / mirrored.
+ @result
+    An AVDepthData instance.
+ 
+ @discussion
+    When applying simple 90 degree rotation or mirroring edits to media containing depth data, you may use this initializer to create a derivative copy of the depth in which the specified orientation is applied to both the underlying pixel map data and the camera calibration data. This method throws an NSInvalidArgumentException if you pass an unrecognized exifOrientation.
+ */
+- (instancetype)depthDataByApplyingExifOrientation:(CGImagePropertyOrientation)exifOrientation;
+
+/*!
+ @method depthDataByReplacingDepthDataMapWithPixelBuffer:error:
+ @abstract
+    Returns an AVDepthData instance wrapping the replacement depth data map pixel buffer.
+ 
+ @param pixelBuffer
+    A pixel buffer containing depth data information in one of the 4 supported disparity / depth pixel formats.
+ @param outError
+    On return, if the depth data cannot be created, points to an NSError describing the problem.
+ @result
+    An AVDepthData instance, or nil if the pixel buffer is malformed.
+ 
+ @discussion
+    When applying complex edits to media containing depth data, you may create a derivative map with arbitrary transforms applied to it, then use this initializer to create a new AVDepthData. Note that this new depth data object has no camera calibration data, so its cameraCalibrationData property always returns nil.
+ */
+- (nullable instancetype)depthDataByReplacingDepthDataMapWithPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError * _Nullable * _Nullable)outError;
+
+/*!
+ @property availableDepthDataTypes
+ @abstract
+    Specifies which depth data pixel formats may be used with depthDataByConvertingToDepthDataType:.
+ 
+ @discussion
+    This property presents the available pixel format types as an array of NSNumbers, each wrapping an OSType (CV pixel format type).
+ */
+@property(readonly) NSArray<NSNumber *> *availableDepthDataTypes;
+
+/*!
+ @method dictionaryRepresentationForAuxiliaryDataType:
+ @abstract
+    Returns a dictionary of primitive map information to be used when writing an image file with depth data.
+ 
+ @param outAuxDataType
+    On output, either kCGImageAuxiliaryDataTypeDisparity or kCGImageAuxiliaryDataTypeDepth, depending on the depth data's file.
+ @result
+    A dictionary of CGImageDestination compatible depth information, or nil if the auxDataType is unsupported.
+ 
+ @discussion
+    When using ImageIO framework's CGImageDestination API to write depth data to a HEIF, JPEG, or DNG file, you may use this method to generate a dictionary of primitive map information consumed by CGImageDestinationAddAuxiliaryDataInfo.
+ */
+- (nullable NSDictionary *)dictionaryRepresentationForAuxiliaryDataType:(NSString * _Nullable * _Nullable)outAuxDataType;
+
+/*!
+ @property depthDataType
+ @abstract
+    Specifies the pixel format type of this depth data object's internal map.
+ 
+ @discussion
+    One of kCVPixelFormatType_DisparityFloat16, kCVPixelFormatType_DisparityFloat32, kCVPixelFormatType_DepthFloat16, or kCVPixelFormatType_DepthFloat32.
+ */
+@property(readonly) OSType depthDataType;
+
+/*!
+ @property depthDataMap
+ @abstract
+    Provides access to the depth data object's internal map.
+ 
+ @discussion
+    The depth data map's pixel format can be queried using the depthDataType property.
+ */
+@property(readonly) __attribute__((NSObject)) CVPixelBufferRef depthDataMap NS_RETURNS_INNER_POINTER;
+
+/*!
+ @property depthDataFiltered
+ @abstract
+    Specifies whether the depth data pixel buffer map contains filtered (hole-filled) data.
+ 
+ @discussion
+    By setting either AVCaptureDepthDataOutput's filteringEnabled property or AVCapturePhotoSettings' depthDataFiltered property to YES, the resulting depth data are filtered to remove invalid pixel values that may be present due to a variety of factors including low light and lens occlusion. If you've requested depth data filtering, all depth data holes are filled. Note that filtering the depth data makes it more usable for applying effects, but alters the data such that it may no longer be suitable for computer vision tasks. Unfiltered depth maps present missing data as NaN.
+ */
+@property(readonly, getter=isDepthDataFiltered) BOOL depthDataFiltered;
+
+/*!
+ @property depthDataAccuracy
+ @abstract
+    Specifies the accuracy of the units in the depth data map's values.
+ 
+ @discussion
+    See AVDepthDataAccuracy documentation for more information.
+ */
+@property(readonly) AVDepthDataAccuracy depthDataAccuracy;
+
+/*!
+ @property cameraCalibrationData
+ @abstract
+    The calibration data of the camera with which AVDepthData map's values are aligned.
+ 
+ @discussion
+    See AVCameraCalibrationData for more information.
+ */
+@property(nullable, readonly) AVCameraCalibrationData *cameraCalibrationData;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h	2016-09-12 23:29:50.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h	2017-05-24 00:41:54.000000000 -0400
@@ -3,7 +3,7 @@
  
 	Framework:  AVFoundation
  
-	Copyright 2010-2014 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
  */
 
@@ -101,4 +101,10 @@
 #endif
     AVErrorUnsupportedOutputSettings NS_AVAILABLE(10_12, 10_0) = -11861,
 	AVErrorOperationNotAllowed NS_AVAILABLE(10_12, 10_0) = -11862,
+	AVErrorContentIsUnavailable NS_AVAILABLE(10_13, 11_0) = -11863,
+	AVErrorFormatUnsupported NS_AVAILABLE(10_13, 11_0)  = -11864,
+	AVErrorMalformedDepth NS_AVAILABLE(10_13, 11_0)     = -11865,
+	AVErrorContentNotUpdated NS_AVAILABLE(10_13, 11_0)	= -11866,
+	AVErrorNoLongerPlayable	 NS_AVAILABLE(10_13, 11_0)	= -11867,
+	AVErrorNoCompatibleAlternatesForExternalDisplay API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) __WATCHOS_PROHIBITED = -11868,
 };
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes
--- /Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes	2017-05-23 21:01:42.000000000 -0400
@@ -0,0 +1,2140 @@
+---
+Name: AVFoundation
+Classes:
+- Name: AVAssetDownloadURLSession
+  Methods:
+  - Selector: 'assetDownloadTaskWithURLAsset:destinationURL:options:'
+    SwiftName: makeAssetDownloadTask(asset:destinationURL:options:)
+    MethodKind: Instance
+  - Selector: 'assetDownloadTaskWithURLAsset:assetTitle:assetArtworkData:options:'
+    SwiftName: makeAssetDownloadTask(asset:assetTitle:assetArtworkData:options:)
+    MethodKind: Instance
+- Name: AVAssetWriterInput
+  Methods:
+  - Selector: 'appendSampleBuffer:'
+    SwiftName: append(_:)
+    MethodKind: Instance
+- Name: AVAssetWriterInputPixelBufferAdaptor
+  Methods:
+  - Selector: 'appendPixelBuffer:withPresentationTime:'
+    SwiftName: append(_:withPresentationTime:)
+    MethodKind: Instance
+- Name: AVAssetWriterInputMetadataAdaptor
+  Methods:
+  - Selector: 'appendTimedMetadataGroup:'
+    SwiftName: append(_:)
+    MethodKind: Instance
+- Name: AVCaptureAudioDataOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+  - Selector: 'recommendedAudioSettingsForAssetWriterWithOutputFileType:'
+    SwiftName: 'recommendedAudioSettingsForAssetWriter(writingTo:)'
+    MethodKind: Instance
+- Name: AVCaptureAudioFileOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+  - Selector: 'startRecordingToOutputFileURL:outputFileType:recordingDelegate:'
+    SwiftName: 'startRecording(to:outputFileType:recordingDelegate:)'
+    MethodKind: Instance
+- Name: AVCaptureAudioPreviewOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureAutoExposureBracketedStillImageSettings
+  Methods:
+  - Selector: 'autoExposureSettingsWithExposureTargetBias:'
+    SwiftName: 'autoExposureSettings(exposureTargetBias:)'
+    MethodKind: Class
+- Name: AVCaptureDepthDataOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureDevice
+  Methods:
+  - Selector: 'authorizationStatusForMediaType:'
+    SwiftName: 'authorizationStatus(for:)'
+    MethodKind: Class
+  - Selector: 'chromaticityValuesForDeviceWhiteBalanceGains:'
+    SwiftName: 'chromaticityValues(for:)'
+    MethodKind: Instance
+  - Selector: 'defaultDeviceWithDeviceType:mediaType:position:'
+    SwiftName: 'default(_:for:position:)'
+    MethodKind: Class
+  - Selector: 'defaultDeviceWithMediaType:'
+    SwiftName: 'default(for:)'
+    MethodKind: Class
+  - Selector: 'devicesWithMediaType:'
+    SwiftName: 'devices(for:)'
+    MethodKind: Class
+  - Selector: 'requestAccessForMediaType:completionHandler:'
+    SwiftName: 'requestAccess(for:completionHandler:)'
+    MethodKind: Class
+  - Selector: 'setExposureModeCustomWithDuration:ISO:completionHandler:'
+    SwiftName: 'setExposureModeCustom(duration:iso:completionHandler:)'
+    MethodKind: Instance
+  - Selector: 'setFocusModeLockedWithLensPosition:completionHandler:'
+    SwiftName: 'setFocusModeLocked(lensPosition:completionHandler:)'
+    MethodKind: Instance
+  - Selector: 'setTorchModeOnWithLevel:error:'
+    SwiftName: 'setTorchModeOn(level:)'
+    MethodKind: Instance
+  - Selector: 'setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:'
+    SwiftName: 'setWhiteBalanceModeLocked(with:completionHandler:)'
+    MethodKind: Instance
+  - Selector: 'supportsAVCaptureSessionPreset:'
+    SwiftName: 'supportsSessionPreset(_:)'
+    MethodKind: Instance
+  - Selector: 'temperatureAndTintValuesForDeviceWhiteBalanceGains:'
+    SwiftName: 'temperatureAndTintValues(for:)'
+    MethodKind: Instance
+- Name: AVCaptureDeviceDiscoverySession
+  SwiftName: AVCaptureDevice.DiscoverySession
+- Name: AVCaptureDeviceFormat
+  SwiftName: AVCaptureDevice.Format
+- Name: AVCaptureDeviceInputSource
+  SwiftName: AVCaptureDevice.InputSource
+- Name: AVCaptureFileOutput
+  Methods:
+  - Selector: 'startRecordingToOutputFileURL:recordingDelegate:'
+    SwiftName: 'startRecording(to:recordingDelegate:)'
+    MethodKind: Instance
+- Name: AVCaptureInputPort
+  SwiftName: AVCaptureInput.Port
+- Name: AVCaptureManualExposureBracketedStillImageSettings
+  Methods:
+  - Selector: 'manualExposureSettingsWithExposureDuration:ISO:'
+    SwiftName: 'manualExposureSettings(exposureDuration:iso:)'
+    MethodKind: Class
+- Name: AVCaptureMetadataOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureMovieFileOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+  - Selector: 'setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:'
+    SwiftName: 'setRecordsVideoOrientationAndMirroringChangesAsMetadataTrack(_:for:)'
+    MethodKind: Instance
+- Name: AVCaptureOutput
+  Methods:
+  - Selector: 'connectionWithMediaType:'
+    SwiftName: 'connection(with:)'
+    MethodKind: Instance
+  - Selector: 'metadataOutputRectOfInterestForRect:'
+    SwiftName: 'metadataOutputRectConverted(fromOutputRect:)'
+    MethodKind: Instance
+  - Selector: 'rectForMetadataOutputRectOfInterest:'
+    SwiftName: 'outputRectConverted(fromMetadataOutputRect:)'
+    MethodKind: Instance
+- Name: AVCapturePhotoOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureStillImageOutput
+  Methods:
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureVideoDataOutput
+  Methods:
+  - Selector: 'availableVideoCodecTypesForAssetWriterWithOutputFileType:'
+    SwiftName: 'availableVideoCodecTypesForAssetWriter(writingTo:)'
+    MethodKind: Instance
+  - Selector: 'new'
+    MethodKind: Class
+    Availability: nonswift
+    AvailabilityMsg: 'use object initializers instead'
+  - Selector: 'recommendedVideoSettingsForAssetWriterWithOutputFileType:'
+    SwiftName: 'recommendedVideoSettingsForAssetWriter(writingTo:)'
+    MethodKind: Instance
+- Name: AVCaptureVideoPreviewLayer
+  Methods:
+  - Selector: 'captureDevicePointOfInterestForPoint:'
+    SwiftName: 'captureDevicePointConverted(fromLayerPoint:)'
+    MethodKind: Instance
+  - Selector: 'metadataOutputRectOfInterestForRect:'
+    SwiftName: 'metadataOutputRectConverted(fromLayerRect:)'
+    MethodKind: Instance
+  - Selector: 'pointForCaptureDevicePointOfInterest:'
+    SwiftName: 'layerPointConverted(fromCaptureDevicePoint:)'
+    MethodKind: Instance
+  - Selector: 'rectForMetadataOutputRectOfInterest:'
+    SwiftName: 'layerRectConverted(fromMetadataOutputRect:)'
+    MethodKind: Instance
+- Name: AVFrameRateRange
+  SwiftName: AVCaptureDeviceFormat.FrameRateRange
+- Name: AVMutableComposition
+  Methods:
+  - Selector: 'insertTimeRange:ofAsset:atTime:error:'
+    SwiftName: insertTimeRange(_:of:at:)
+    MethodKind: Instance
+  - Selector: 'insertEmptyTimeRange:'
+    SwiftName: insertEmptyTimeRange(_:)
+    MethodKind: Instance
+  - Selector: 'removeTimeRange:'
+    SwiftName: removeTimeRange(_:)
+    MethodKind: Instance
+  - Selector: 'scaleTimeRange:toDuration:'
+    SwiftName: scaleTimeRange(_:toDuration:)
+    MethodKind: Instance
+  - Selector: 'compositionWithURLAssetInitializationOptions:'
+    SwiftName: 'init(urlAssetInitializationOptions:)'
+    MethodKind: Class
+- Name: AVCompositionTrackSegment
+  Methods:
+  - Selector: 'compositionTrackSegmentWithURL:trackID:sourceTimeRange:targetTimeRange:'
+    SwiftName: init(url:trackID:sourceTimeRange:targetTimeRange:)
+    MethodKind: Class
+- Name: AVMutableMovie
+  Methods:
+  - Selector: 'insertTimeRange:ofAsset:atTime:copySampleData:error:'
+    SwiftName: 'insertTimeRange(_:of:at:copySampleData:)'
+    MethodKind: Instance
+  - Selector: 'insertEmptyTimeRange:'
+    SwiftName: 'insertEmptyTimeRange(_:)'
+    MethodKind: Instance
+  - Selector: 'removeTimeRange:'
+    SwiftName: 'removeTimeRange(_:)'
+    MethodKind: Instance
+- Name: AVPlayer
+  Properties:
+  - Name: 'outputObscuredDueToInsufficientExternalProtection'
+    SwiftName: isOutputObscuredDueToInsufficientExternalProtection
+- Name: AVPlayerItem
+  Methods:
+  - Selector: 'selectMediaOption:inMediaSelectionGroup:'
+    SwiftName: select(_:in:)
+    MethodKind: Instance
+  - Selector: 'addOutput:'
+    SwiftName: add(_:)
+    MethodKind: Instance
+  - Selector: 'removeOutput:'
+    SwiftName: remove(_:)
+    MethodKind: Instance
+  - Selector: 'addMediaDataCollector:'
+    SwiftName: add(_:)
+    MethodKind: Instance
+  - Selector: 'removeMediaDataCollector:'
+    SwiftName: remove(_:)
+    MethodKind: Instance
+- Name: AVSampleCursor
+  Methods:
+  - Selector: 'stepByDecodeTime:wasPinned:'
+    SwiftName: step(byDecodeTime:wasPinned:)
+    MethodKind: Instance
+  - Selector: 'samplesWithEarlierDecodeTimeStampsMayHaveLaterPresentationTimeStampsThanCursor:'
+    SwiftName: maySamplesWithEarlierDecodeTimeStampsHavePresentationTimeStamps(laterThan:)
+    MethodKind: Instance
+  - Selector: 'samplesWithLaterDecodeTimeStampsMayHaveEarlierPresentationTimeStampsThanCursor:'
+    SwiftName: maySamplesWithLaterDecodeTimeStampsHavePresentationTimeStamps(earlierThan:)
+    MethodKind: Instance
+- Name: AVVideoComposition
+  Methods:
+  - Selector: 'videoCompositionWithPropertiesOfAsset:'
+    SwiftName: init(propertiesOf:)
+    MethodKind: Instance
+  - Selector: 'videoCompositionWithAsset:applyingCIFiltersWithHandler:'
+    SwiftName: init(asset:filterApplier:)
+    MethodKind: Instance
+- Name: AVMutableVideoCompositionLayerInstruction
+  Methods:
+  - Selector: 'setTransformRampFromStartTransform:toEndTransform:timeRange:'
+    SwiftName: setTransformRamp(fromStart:toEnd:timeRange:)
+    MethodKind: Instance
+  - Selector: 'setOpacityRampFromStartOpacity:toEndOpacity:timeRange:'
+    SwiftName: setOpacityRamp(fromStartOpacity:toEndOpacity:timeRange:)
+    MethodKind: Instance
+  - Selector: 'setCropRectangleRampFromStartCropRectangle:toEndCropRectangle:timeRange:'
+    SwiftName: setCropRectangleRamp(fromStartCropRectangle:toEndCropRectangle:timeRange:)
+    MethodKind: Instance
+- Name: AVAssetReader
+  Methods:
+  - Selector: 'canAddOutput:'
+    SwiftName: canAdd(_:)
+    MethodKind: Instance
+  - Selector: 'addOutput:'
+    SwiftName: add(_:)
+    MethodKind: Instance
+- Name: AVAssetResourceLoadingRequest
+  Methods:
+  - Selector: 'finishLoadingWithError:'
+    SwiftName: finishLoading(with:)
+    MethodKind: Instance
+- Name: AVAssetTrack
+  Methods:
+  - Selector: 'makeSampleCursorWithPresentationTimeStamp:'
+    SwiftName: makeSampleCursor(presentationTimeStamp:)
+    MethodKind: Instance
+- Name: AVAssetWriter
+  Methods:
+  - Selector: 'initWithURL:fileType:error:'
+    SwiftName: init(outputURL:fileType:)
+    MethodKind: Instance
+  - Selector: 'canApplyOutputSettings:forMediaType:'
+    SwiftName: canApply(outputSettings:forMediaType:)
+    MethodKind: Instance
+  - Selector: 'canAddInput:'
+    SwiftName: canAdd(_:)
+    MethodKind: Instance
+  - Selector: 'addInput:'
+    SwiftName: add(_:)
+    MethodKind: Instance
+  - Selector: 'canAddInputGroup:'
+    SwiftName: canAdd(_:)
+    MethodKind: Instance
+  - Selector: 'addInputGroup:'
+    SwiftName: add(_:)
+    MethodKind: Instance
+- Name: AVMutableAudioMixInputParameters
+  Methods:
+  - Selector: 'setVolumeRampFromStartVolume:toEndVolume:timeRange:'
+    SwiftName: setVolumeRamp(fromStartVolume:toEndVolume:timeRange:)
+    MethodKind: Instance
+- Name: AVMutableMediaSelection
+  Methods:
+  - Selector: 'selectMediaOption:inMediaSelectionGroup:'
+    SwiftName: select(_:in:)
+    MethodKind: Instance
+- Name: AVMovie
+  Methods:
+  - Selector: 'movieHeaderWithFileType:error:'
+    SwiftName: makeMovieHeader(fileType:)
+    MethodKind: Instance
+  - Selector: 'movieWithURL:options:'
+    SwiftName: init(url:options:)
+    MethodKind: Class
+- Name: AVMutableMovieTrack
+  Methods:
+  - Selector: 'appendSampleBuffer:decodeTime:presentationTime:error:'
+    SwiftName: append(_:decodeTime:presentationTime:)
+    MethodKind: Instance
+- Name: AVQueuePlayer
+  Methods:
+  - Selector: 'canInsertItem:afterItem:'
+    SwiftName: canInsert(_:after:)
+    MethodKind: Instance
+  - Selector: 'insertItem:afterItem:'
+    SwiftName: insert(_:after:)
+    MethodKind: Instance
+  - Selector: 'removeItem:'
+    SwiftName: remove(_:)
+    MethodKind: Instance
+- Name: NSValue
+  Methods:
+  - Selector: 'valueWithCMTime:'
+    SwiftName: init(time:)
+    MethodKind: Class
+  - Selector: 'valueWithCMTimeRange:'
+    SwiftName: init(timeRange:)
+    MethodKind: Class
+  - Selector: 'valueWithCMTimeMapping:'
+    SwiftName: init(timeMapping:)
+    MethodKind: Class
+  Properties:
+  - Name: CMTimeValue
+    SwiftName: timeValue
+  - Name: CMTimeRangeValue
+    SwiftName: timeRangeValue
+  - Name: CMTimeMappingValue
+    SwiftName: timeMappingValue
+- Name: NSCoder
+  Methods:
+  - Selector: 'decodeCMTimeForKey:'
+    SwiftName: decodeTime(forKey:)
+    MethodKind: Instance
+  - Selector: 'decodeCMTimeRangeForKey:'
+    SwiftName: decodeTimeRange(forKey:)
+    MethodKind: Instance
+  - Selector: 'decodeCMTimeMappingForKey:'
+    SwiftName: decodeTimeMapping(forKey:)
+    MethodKind: Instance
+- Name: AVAsynchronousVideoCompositionRequest
+  Methods:
+  - Selector: 'finishWithError:'
+    SwiftName: finish(with:)
+    MethodKind: Instance
+- Name: AVAsynchronousCIImageFilteringRequest
+  Methods:
+  - Selector: 'finishWithError:'
+    SwiftName: finish(with:)
+    MethodKind: Instance
+Protocols:
+- Name: AVCaptureAudioDataOutputSampleBufferDelegate
+  Methods:
+  - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+    SwiftName: 'captureOutput(_:didOutput:from:)'
+    MethodKind: Instance
+- Name: AVCaptureDataOutputSynchronizerDelegate
+  Methods:
+  - Selector: 'dataOutputSynchronizer:didOutputSynchronizedDataCollection:'
+    SwiftName: 'dataOutputSynchronizer(_:didOutput:)'
+    MethodKind: Instance
+- Name: AVCaptureDepthDataOutputDelegate
+  Methods:
+  - Selector: 'depthDataOutput:didOutputDepthData:timestamp:connection:'
+    SwiftName: 'depthDataOutput(_:didOutput:timestamp:connection:)'
+    MethodKind: Instance
+- Name: AVCaptureFileOutputDelegate
+  Methods:
+  - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+    SwiftName: 'fileOutput(_:didOutputSampleBuffer:from:)'
+    MethodKind: Instance
+  - Selector: 'captureOutputShouldProvideSampleAccurateRecordingStart:'
+    SwiftName: 'fileOutputShouldProvideSampleAccurateRecordingStart(_:)'
+    MethodKind: Instance
+    Parameters:
+    - Position: 0
+      Nullability: N
+      Type: 'AVCaptureFileOutput *'
+- Name: AVCaptureFileOutputRecordingDelegate
+  Methods:
+  - Selector: 'captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:'
+    SwiftName: 'fileOutput(_:didFinishRecordingTo:from:error:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:'
+    SwiftName: 'fileOutput(_:didPauseRecordingTo:from:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:'
+    SwiftName: 'fileOutput(_:didResumeRecordingTo:from:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:'
+    SwiftName: 'fileOutput(_:didStartRecordingTo:from:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:'
+    SwiftName: 'fileOutput(_:willFinishRecordingTo:from:error:)'
+    MethodKind: Instance
+- Name: AVCaptureMetadataOutputObjectsDelegate
+  Methods:
+  - Selector: 'captureOutput:didOutputMetadataObjects:fromConnection:'
+    SwiftName: 'metadataOutput(_:didOutput:from:)'
+    MethodKind: Instance
+    Parameters:
+    - Position: 0
+      Nullability: N
+      Type: 'AVCaptureMetadataOutput *'
+- Name: AVCapturePhotoCaptureDelegate
+  Methods:
+  - Selector: 'captureOutput:didFinishProcessingPhoto:error:'
+    SwiftName: 'photoOutput(_:didFinishProcessingPhoto:error:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:willBeginCaptureForResolvedSettings:'
+    SwiftName: 'photoOutput(_:willBeginCaptureFor:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:willCapturePhotoForResolvedSettings:'
+    SwiftName: 'photoOutput(_:willCapturePhotoFor:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didCapturePhotoForResolvedSettings:'
+    SwiftName: 'photoOutput(_:didCapturePhotoFor:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didFinishCaptureForResolvedSettings:error:'
+    SwiftName: 'photoOutput(_:didFinishCaptureFor:error:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:'
+    SwiftName: 'photoOutput(_:didFinishProcessingLivePhotoToMovieFileAt:duration:photoDisplayTime:resolvedSettings:error:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:'
+    SwiftName: 'photoOutput(_:didFinishProcessingPhoto:previewPhoto:resolvedSettings:bracketSettings:error:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:'
+    SwiftName: 'photoOutput(_:didFinishProcessingRawPhoto:previewPhoto:resolvedSettings:bracketSettings:error:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didFinishRecordingLivePhotoMovieForEventualFileAtURL:resolvedSettings:'
+    SwiftName: 'photoOutput(_:didFinishRecordingLivePhotoMovieForEventualFileAt:resolvedSettings:)'
+    MethodKind: Instance
+- Name: AVCaptureVideoDataOutputSampleBufferDelegate
+  Methods:
+  - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+    SwiftName: 'captureOutput(_:didOutput:from:)'
+    MethodKind: Instance
+  - Selector: 'captureOutput:didDropSampleBuffer:fromConnection:'
+    SwiftName: 'captureOutput(_:didDrop:from:)'
+    MethodKind: Instance
+- Name: AVVideoCompositing
+  Methods:
+  - Selector: 'startVideoCompositionRequest:'
+    SwiftName: startRequest(_:)
+    MethodKind: Instance
+- Name: AVVideoCompositionValidationHandling
+  Methods:
+  - Selector: 'videoComposition:shouldContinueValidatingAfterFindingEmptyTimeRange:'
+    SwiftName: videoComposition(_:shouldContinueValidatingAfterFindingEmptyTimeRange:)
+    MethodKind: Instance
+Functions:
+- Name: AVMakeRectWithAspectRatioInsideRect
+  SwiftName: AVMakeRect(aspectRatio:insideRect:)
+Enumerators:
+- Name: AVCaptureColorSpace_sRGB
+  SwiftName: sRGB
+- Name: AVCaptureColorSpace_P3_D65
+  SwiftName: P3_D65
+- Name: AVCaptureDeviceTransportControlsNotPlayingMode
+  SwiftName: notPlaying
+- Name: AVCaptureDeviceTransportControlsPlayingMode
+  SwiftName: playing
+- Name: AVMovieWritingAddMovieHeaderToDestination
+  SwiftName: addMovieHeaderToDestination
+- Name: AVMusicSequenceLoadSMF_ChannelsToTracks
+  SwiftName: smfChannelsToTracks
+Tags:
+- Name: AVCaptureAutoFocusRangeRestriction
+  SwiftName: AVCaptureDevice.AutoFocusRangeRestriction
+- Name: AVCaptureAutoFocusSystem
+  SwiftName: AVCaptureDeviceFormat.AutoFocusSystem
+- Name: AVCaptureDevicePosition
+  SwiftName: AVCaptureDevice.Position
+- Name: AVCaptureDeviceTransportControlsPlaybackMode
+  SwiftName: AVCaptureDevice.TransportControlsPlaybackMode
+- Name: AVCaptureExposureMode
+  SwiftName: AVCaptureDevice.ExposureMode
+- Name: AVCaptureFlashMode
+  SwiftName: AVCaptureDevice.FlashMode
+- Name: AVCaptureFocusMode
+  SwiftName: AVCaptureDevice.FocusMode
+- Name: AVCaptureLensStabilizationStatus
+  SwiftName: AVCaptureDevice.LensStabilizationStatus
+- Name: AVCaptureOutputDataDroppedReason
+  SwiftName: AVCaptureOutput.DataDroppedReason
+- Name: AVCaptureSessionInterruptionReason
+  SwiftName: AVCaptureSession.InterruptionReason
+- Name: AVCaptureTorchMode
+  SwiftName: AVCaptureDevice.TorchMode
+- Name: AVCaptureWhiteBalanceMode
+  SwiftName: AVCaptureDevice.WhiteBalanceMode
+- Name: AVError
+  NSErrorDomain: AVFoundationErrorDomain
+Typedefs:
+- Name: AVCaptureDeviceTransportControlsSpeed
+  SwiftName: AVCaptureDevice.TransportControlsSpeed
+- Name: AVCaptureDeviceType
+  SwiftName: AVCaptureDevice.DeviceType
+- Name: AVCaptureSessionPreset
+  SwiftName: AVCaptureSession.Preset
+- Name: AVCaptureWhiteBalanceChromaticityValues
+  SwiftName: AVCaptureDevice.WhiteBalanceChromaticityValues
+- Name: AVCaptureWhiteBalanceGains
+  SwiftName: AVCaptureDevice.WhiteBalanceGains
+- Name: AVCaptureWhiteBalanceTemperatureAndTintValues
+  SwiftName: AVCaptureDevice.WhiteBalanceTemperatureAndTintValues
+- Name: AVMetadataObjectType
+  SwiftName: AVMetadataObject.ObjectType
+- Name: AVPlayerWaitingReason
+  SwiftName: AVPlayer.WaitingReason
+- Name: AVTrackAssociationType
+  SwiftName: AVAssetTrack.AssociationType
+Globals:
+# AVCaptureDevice constants
+- Name: AVCaptureExposureDurationCurrent
+  SwiftName: AVCaptureDevice.currentExposureDuration
+- Name: AVCaptureExposureTargetBiasCurrent
+  SwiftName: AVCaptureDevice.currentExposureTargetBias
+- Name: AVCaptureISOCurrent
+  SwiftName: AVCaptureDevice.currentISO
+- Name: AVCaptureLensPositionCurrent
+  SwiftName: AVCaptureDevice.currentLensPosition
+- Name: AVCaptureMaxAvailableTorchLevel
+  SwiftName: AVCaptureDevice.maxAvailableTorchLevel
+- Name: AVCaptureWhiteBalanceGainsCurrent
+  SwiftName: AVCaptureDevice.currentWhiteBalanceGains
+
+# AVCaptureSessionPreset
+- Name: AVCaptureSessionPreset320x240
+  SwiftName: qvga320x240
+- Name: AVCaptureSessionPreset352x288
+  SwiftName: cif352x288
+- Name: AVCaptureSessionPreset640x480
+  SwiftName: vga640x480
+- Name: AVCaptureSessionPreset960x540
+  SwiftName: qHD960x540
+- Name: AVCaptureSessionPreset1280x720
+  SwiftName: hd1280x720
+- Name: AVCaptureSessionPreset1920x1080
+  SwiftName: hd1920x1080
+- Name: AVCaptureSessionPreset3840x2160
+  SwiftName: hd4K3840x2160
+- Name: AVCaptureSessionPresetiFrame960x540
+  SwiftName: iFrame960x540
+- Name: AVCaptureSessionPresetiFrame1280x720
+  SwiftName: iFrame1280x720
+
+# AVFileType
+- Name: AVFileType3GPP
+  SwiftName: mobile3GPP
+- Name: AVFileType3GPP2
+  SwiftName: mobile3GPP2
+- Name: AVFileTypeAC3
+  SwiftName: ac3
+- Name: AVFileTypeAIFC
+  SwiftName: aifc
+- Name: AVFileTypeAIFF
+  SwiftName: aiff
+- Name: AVFileTypeAMR
+  SwiftName: amr
+- Name: AVFileTypeAVCI
+  SwiftName: avci
+- Name: AVFileTypeAppleM4A
+  SwiftName: m4a
+- Name: AVFileTypeAppleM4V
+  SwiftName: m4v
+- Name: AVFileTypeCoreAudioFormat
+  SwiftName: caf
+- Name: AVFileTypeDNG
+  SwiftName: dng
+- Name: AVFileTypeEnhancedAC3
+  SwiftName: eac3
+- Name: AVFileTypeHEIC
+  SwiftName: heic
+- Name: AVFileTypeHEIF
+  SwiftName: heif
+- Name: AVFileTypeJPEG
+  SwiftName: jpg
+- Name: AVFileTypeMPEG4
+  SwiftName: mp4
+- Name: AVFileTypeMPEGLayer3
+  SwiftName: mp3
+- Name: AVFileTypeQuickTimeMovie
+  SwiftName: mov
+- Name: AVFileTypeSunAU
+  SwiftName: au
+- Name: AVFileTypeTIFF
+  SwiftName: tif
+- Name: AVFileTypeWAVE
+  SwiftName: wav
+
+# AVMetadataExtraAttributeKey
+- Name: AVMetadataExtraAttributeValueURIKey
+  SwiftName: valueURI
+- Name: AVMetadataExtraAttributeBaseURIKey
+  SwiftName: baseURI
+- Name: AVMetadataExtraAttributeInfoKey
+  SwiftName: info
+
+# AVMetadataFormat
+- Name: AVMetadataFormatiTunesMetadata
+  SwiftName: iTunesMetadata
+
+# AVMetadataIdentifieriTunesMetadata
+- Name: AVMetadataIdentifieriTunesMetadataAlbum
+  SwiftName: iTunesMetadataAlbum
+- Name: AVMetadataIdentifieriTunesMetadataArtist
+  SwiftName: iTunesMetadataArtist
+- Name: AVMetadataIdentifieriTunesMetadataUserComment
+  SwiftName: iTunesMetadataUserComment
+- Name: AVMetadataIdentifieriTunesMetadataCoverArt
+  SwiftName: iTunesMetadataCoverArt
+- Name: AVMetadataIdentifieriTunesMetadataCopyright
+  SwiftName: iTunesMetadataCopyright
+- Name: AVMetadataIdentifieriTunesMetadataReleaseDate
+  SwiftName: iTunesMetadataReleaseDate
+- Name: AVMetadataIdentifieriTunesMetadataEncodedBy
+  SwiftName: iTunesMetadataEncodedBy
+- Name: AVMetadataIdentifieriTunesMetadataPredefinedGenre
+  SwiftName: iTunesMetadataPredefinedGenre
+- Name: AVMetadataIdentifieriTunesMetadataUserGenre
+  SwiftName: iTunesMetadataUserGenre
+- Name:
Clone this wiki locally