-
Notifications
You must be signed in to change notification settings - Fork 516
AVFoundation watchOS xcode14.0 beta1
Manuel de la Pena edited this page Nov 6, 2022
·
3 revisions
#AVFoundation.framework https://github.com/xamarin/xamarin-macios/pull/16408
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h 2022-02-23 07:59:46.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h 2022-06-03 18:09:33.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -77,19 +77,19 @@
/* Indicates the duration of the asset. If @"providesPreciseDurationAndTiming" is NO, a best-available estimate of the duration is returned. The degree of precision preferred for timing-related properties can be set at initialization time for assets initialized with URLs. See AVURLAssetPreferPreciseDurationAndTimingKey for AVURLAsset below.
*/
-@property (nonatomic, readonly) CMTime duration;
+@property (nonatomic, readonly) CMTime duration AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.duration) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the natural rate at which the asset is to be played; often but not always 1.0
*/
-@property (nonatomic, readonly) float preferredRate;
+@property (nonatomic, readonly) float preferredRate AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.preferredRate) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the preferred volume at which the audible media of an asset is to be played; often but not always 1.0
*/
-@property (nonatomic, readonly) float preferredVolume;
+@property (nonatomic, readonly) float preferredVolume AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.preferredVolume) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the preferred transform to apply to the visual content of the asset for presentation or processing; the value is often but not always the identity transform
*/
-@property (nonatomic, readonly) CGAffineTransform preferredTransform;
+@property (nonatomic, readonly) CGAffineTransform preferredTransform AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.preferredTransform) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* The following property is deprecated. Instead, use the naturalSize and preferredTransform, as appropriate, of the receiver's video tracks. See -tracksWithMediaType: below.
*/
@@ -99,14 +99,24 @@
@property preferredDisplayCriteria
@abstract Guides to a display mode that is optimal for playing this particular asset.
*/
-@property (nonatomic, readonly) AVDisplayCriteria *preferredDisplayCriteria API_AVAILABLE(tvos(11.2)) API_UNAVAILABLE(ios) API_UNAVAILABLE(macos, watchos);
+@property (nonatomic, readonly) AVDisplayCriteria *preferredDisplayCriteria
+#if __swift__
+API_DEPRECATED("Use load(.preferredDisplayCriteria) instead", tvos(11.2, 16.0)) API_UNAVAILABLE(ios) API_UNAVAILABLE(macos, watchos);
+#else
+API_AVAILABLE(tvos(11.2)) API_UNAVAILABLE(ios) API_UNAVAILABLE(macos, watchos);
+#endif
/*!
@property minimumTimeOffsetFromLive
@abstract Indicates how close to the latest content in a live stream playback can be sustained.
@discussion For non-live assets this value is kCMTimeInvalid.
*/
-@property (nonatomic, readonly) CMTime minimumTimeOffsetFromLive API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
+@property (nonatomic, readonly) CMTime minimumTimeOffsetFromLive
+#if __swift__
+API_DEPRECATED("Use load(.minimumTimeOffsetFromLive) instead", macos(10.15, 13.0), ios(13.0, 16.0), tvos(13.0, 16.0), watchos(6.0, 9.0));
+#else
+API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
+#endif
@end
@@ -115,7 +125,7 @@
/* Indicates that the asset provides precise timing. See @"duration" above and AVURLAssetPreferPreciseDurationAndTimingKey below.
*/
-@property (nonatomic, readonly) BOOL providesPreciseDurationAndTiming;
+@property (nonatomic, readonly) BOOL providesPreciseDurationAndTiming AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.providesPreciseDurationAndTiming) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@method cancelLoading
@@ -176,7 +186,7 @@
@property tracks
@abstract Provides the array of AVAssetTracks contained by the asset
*/
-@property (nonatomic, readonly) NSArray<AVAssetTrack *> *tracks;
+@property (nonatomic, readonly) NSArray<AVAssetTrack *> *tracks AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.tracks) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@method trackWithTrackID:
@@ -186,7 +196,12 @@
@result An instance of AVAssetTrack; may be nil if no track of the specified trackID is available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (nullable AVAssetTrack *)trackWithTrackID:(CMPersistentTrackID)trackID;
+- (nullable AVAssetTrack *)trackWithTrackID:(CMPersistentTrackID)trackID
+#if __swift__
+API_DEPRECATED("Use loadTrack(withTrackID:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTrackWithTrackID:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTrackWithTrackID:completionHandler:
@@ -206,7 +221,12 @@
@result An NSArray of AVAssetTracks; may be empty if no tracks of the specified media type are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
+- (NSArray<AVAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaType:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTracksWithMediaType:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaType:completionHandler:
@@ -226,7 +246,12 @@
@result An NSArray of AVAssetTracks; may be empty if no tracks with the specified characteristic are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (NSArray<AVAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaCharacteristic:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTracksWithMediaCharacteristic:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaCharacteristic:completionHandler:
@@ -246,7 +271,12 @@
@discussion
The value of this property is an NSArray of AVAssetTrackGroups, each representing a different grouping of tracks in the receiver.
*/
-@property (nonatomic, readonly) NSArray<AVAssetTrackGroup *> *trackGroups API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVAssetTrackGroup *> *trackGroups
+#if __swift__
+API_DEPRECATED("Use load(.trackGroups) instead", macos(10.9, 13.0), ios(7.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+#endif
@end
@@ -257,23 +287,33 @@
/* Indicates the creation date of the asset as an AVMetadataItem. May be nil. If a creation date has been stored by the asset in a form that can be converted to an NSDate, the dateValue property of the AVMetadataItem will provide an instance of NSDate. Otherwise the creation date is available only as a string value, via -[AVMetadataItem stringValue].
*/
-@property (nonatomic, readonly, nullable) AVMetadataItem *creationDate API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, nullable) AVMetadataItem *creationDate
+#if __swift__
+API_DEPRECATED("Use load(.creationDate) instead", macos(10.8, 13.0), ios(5.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+#endif
/* Provides access to the lyrics of the asset suitable for the current locale.
*/
-@property (nonatomic, readonly, nullable) NSString *lyrics;
+@property (nonatomic, readonly, nullable) NSString *lyrics AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.lyrics) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* Provides access to an array of AVMetadataItems for each common metadata key for which a value is available; items can be filtered according to language via +[AVMetadataItem metadataItemsFromArray:filteredAndSortedAccordingToPreferredLanguages:] and according to identifier via +[AVMetadataItem metadataItemsFromArray:filteredByIdentifier:].
*/
-@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata;
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.commonMetadata) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* Provides access to an array of AVMetadataItems for all metadata identifiers for which a value is available; items can be filtered according to language via +[AVMetadataItem metadataItemsFromArray:filteredAndSortedAccordingToPreferredLanguages:] and according to identifier via +[AVMetadataItem metadataItemsFromArray:filteredByIdentifier:].
*/
-@property (nonatomic, readonly) NSArray<AVMetadataItem *> *metadata API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *metadata
+#if __swift__
+API_DEPRECATED("Use load(.metadata) instead", macos(10.10, 13.0), ios(8.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+#endif
/* Provides an NSArray of NSStrings, each representing a metadata format that's available to the asset (e.g. ID3, iTunes metadata, etc.). Metadata formats are defined in AVMetadataFormat.h.
*/
-@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.availableMetadataFormats) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@method metadataForFormat:
@@ -283,7 +323,12 @@
@result An NSArray containing AVMetadataItems; may be empty if there is no metadata of the specified format.
@discussion Becomes callable without blocking when the key @"availableMetadataFormats" has been loaded
*/
-- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format
+#if __swift__
+API_DEPRECATED("Use loadMetadata(for:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadMetadataForFormat:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadMetadataForFormat:completionHandler:
@@ -304,7 +349,12 @@
/* array of NSLocale
*/
-@property (readonly) NSArray<NSLocale *> *availableChapterLocales API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (readonly) NSArray<NSLocale *> *availableChapterLocales
+#if __swift__
+API_DEPRECATED("Use load(.availableChapterLocales) instead", macos(10.7, 13.0), ios(4.3, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+#endif
/*!
@method chapterMetadataGroupsWithTitleLocale:containingItemsWithCommonKeys:
@@ -322,7 +372,12 @@
Further filtering of the metadata items in AVTimedMetadataGroups according to language can be accomplished using +[AVMetadataItem metadataItemsFromArray:filteredAndSortedAccordingToPreferredLanguages:]; filtering of the metadata items according to locale can be accomplished using +[AVMetadataItem metadataItemsFromArray:withLocale:].
*/
-- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<AVMetadataKey> *)commonKeys API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<AVMetadataKey> *)commonKeys
+#if __swift__
+API_DEPRECATED("Use loadChapterMetadataGroups(withTitleLocale:containingItemsWithCommonKeys:) instead", macos(10.7, 13.0), ios(4.3, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadChapterMetadataGroupsWithTitleLocale:containingItemsWithCommonKeys:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.3, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadChapterMetadataGroupsWithTitleLocale:containingItemsWithCommonKeys:completionHandler:
@@ -359,7 +414,12 @@
Further filtering of the metadata items in AVTimedMetadataGroups according to language can be accomplished using +[AVMetadataItem metadataItemsFromArray:filteredAndSortedAccordingToPreferredLanguages:]; filtering of the metadata items according to locale can be accomplished using +[AVMetadataItem metadataItemsFromArray:withLocale:].
.
*/
-- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsBestMatchingPreferredLanguages:(NSArray<NSString *> *)preferredLanguages API_AVAILABLE(macos(10.8), ios(6.0), tvos(9.0), watchos(1.0));
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsBestMatchingPreferredLanguages:(NSArray<NSString *> *)preferredLanguages
+#if __swift__
+API_DEPRECATED("Use loadChapterMetadataGroups(bestMatchingPreferredLanguages:) instead", macos(10.8, 13.0), ios(6.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadChapterMetadataGroupsBestMatchingPreferredLanguages:completionHandler", macos(10.8, API_TO_BE_DEPRECATED), ios(6.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadChapterMetadataGroupsBestMatchingPreferredLanguages:completionHandler:
@@ -386,7 +446,12 @@
/* Provides an NSArray of NSStrings, each NSString indicating a media characteristic for which a media selection option is available.
*/
-@property (nonatomic, readonly) NSArray<AVMediaCharacteristic> *availableMediaCharacteristicsWithMediaSelectionOptions API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMediaCharacteristic> *availableMediaCharacteristicsWithMediaSelectionOptions
+#if __swift__
+API_DEPRECATED("Use load(.availableMediaCharacteristicsWithMediaSelectionOptions) instead", macos(10.8, 13.0), ios(5.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+#endif
/*!
@method mediaSelectionGroupForMediaCharacteristic:
@@ -405,7 +470,12 @@
Filtering of the options in the returned AVMediaSelectionGroup according to playability, locale, and additional media characteristics can be accomplished using the category AVMediaSelectionOptionFiltering defined on AVMediaSelectionGroup.
*/
-- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic
+#if __swift__
+API_DEPRECATED("Use loadMediaSelectionGroup(for:) instead", macos(10.8, 13.0), ios(5.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadMediaSelectionGroupForMediaCharacteristic:completionHandler:", macos(10.8, API_TO_BE_DEPRECATED), ios(5.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadMediaSelectionGroupForMediaCharacteristic:completionHandler:
@@ -428,13 +498,23 @@
@property preferredMediaSelection
@abstract Provides an instance of AVMediaSelection with default selections for each of the receiver's media selection groups.
*/
-@property (nonatomic, readonly) AVMediaSelection *preferredMediaSelection API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0), watchos(2.0));
+@property (nonatomic, readonly) AVMediaSelection *preferredMediaSelection
+#if __swift__
+API_DEPRECATED("Use load(.preferredMediaSelection) instead", macos(10.11, 13.0), ios(9.0, 16.0), tvos(9.0, 16.0), watchos(2.0, 9.0));
+#else
+API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0), watchos(2.0));
+#endif
/*!
@property allMediaSelections
@abstract Provides an array of all permutations of AVMediaSelection for this asset.
*/
-@property (nonatomic, readonly) NSArray <AVMediaSelection *> *allMediaSelections API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0), watchos(4.0));
+@property (nonatomic, readonly) NSArray <AVMediaSelection *> *allMediaSelections
+#if __swift__
+API_DEPRECATED("Use load(.allMediaSelections) instead", macos(10.13, 13.0), ios(11.0, 16.0), tvos(11.0, 16.0), watchos(4.0, 9.0));
+#else
+API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0), watchos(4.0));
+#endif
@end
@@ -446,7 +526,12 @@
@abstract Indicates whether or not the asset has protected content.
@discussion Assets containing protected content may not be playable without successful authorization, even if the value of the "playable" property is YES. See the properties in the AVAssetUsability category for details on how such an asset may be used. On OS X, clients can use the interfaces in AVPlayerItemProtectedContentAdditions.h to request authorization to play the asset.
*/
-@property (nonatomic, readonly) BOOL hasProtectedContent API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) BOOL hasProtectedContent
+#if __swift__
+API_DEPRECATED("Use load(.hasProtectedContent) instead", macos(10.7, 13.0), ios(4.2, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(watchos);
+#else
+API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif
@end
@@ -459,21 +544,36 @@
@discussion For QuickTime movie files and MPEG-4 files, the value of canContainFragments is YES if an 'mvex' box is present in the 'moov' box. For those types, the 'mvex' box signals the possible presence of later 'moof' boxes.
*/
-@property (nonatomic, readonly) BOOL canContainFragments API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) BOOL canContainFragments
+#if __swift__
+API_DEPRECATED("Use load(.canContainFragments) instead", macos(10.11, 13.0), ios(9.0, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(watchos);
+#else
+API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif
/*!
@property containsFragments
@abstract Indicates whether the asset is extended by at least one fragment.
@discussion For QuickTime movie files and MPEG-4 files, the value of this property is YES if canContainFragments is YES and at least one 'moof' box is present after the 'moov' box.
*/
-@property (nonatomic, readonly) BOOL containsFragments API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) BOOL containsFragments
+#if __swift__
+API_DEPRECATED("Use load(.containsFragments) instead", macos(10.11, 13.0), ios(9.0, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(watchos);
+#else
+API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif
/*!
@property overallDurationHint
@abstract Indicates the total duration of fragments that either exist now or may be appended in the future in order to extend the duration of the asset.
@discussion For QuickTime movie files and MPEG-4 files, the value of this property is obtained from the 'mehd' box of the 'mvex' box, if present. If no total fragment duration hint is available, the value of this property is kCMTimeInvalid.
*/
-@property (nonatomic, readonly) CMTime overallDurationHint API_AVAILABLE(macos(10.12.2), ios(10.2), tvos(10.2), watchos(3.2));
+@property (nonatomic, readonly) CMTime overallDurationHint
+#if __swift__
+API_DEPRECATED("Use load(.overallDurationHint) instead", macos(10.12.2, 13.0), ios(10.2, 16.0), tvos(10.2, 16.0), watchos(3.2, 9.0));
+#else
+API_AVAILABLE(macos(10.12.2), ios(10.2), tvos(10.2), watchos(3.2));
+#endif
@end
@@ -485,30 +585,60 @@
@abstract Indicates whether an AVPlayer can play the contents of the asset in a manner that meets user expectations.
@discussion A client can attempt playback when playable is NO, this however may lead to a substandard playback experience.
*/
-@property (nonatomic, readonly, getter=isPlayable) BOOL playable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isPlayable) BOOL playable
+#if __swift__
+API_DEPRECATED("Use load(.isPlayable) instead", macos(10.7, 13.0), ios(4.3, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+#endif
/* indicates whether an AVAssetExportSession can be used with the receiver for export
*/
-@property (nonatomic, readonly, getter=isExportable) BOOL exportable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, getter=isExportable) BOOL exportable
+#if __swift__
+API_DEPRECATED("Use load(.isExportable) instead", macos(10.7, 13.0), ios(4.3, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(watchos);
+#else
+API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif
/* indicates whether an AVAssetReader can be used with the receiver for extracting media data
*/
-@property (nonatomic, readonly, getter=isReadable) BOOL readable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, getter=isReadable) BOOL readable
+#if __swift__
+API_DEPRECATED("Use load(.isReadable) instead", macos(10.7, 13.0), ios(4.3, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(watchos);
+#else
+API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif
/* indicates whether the receiver can be used to build an AVMutableComposition
*/
-@property (nonatomic, readonly, getter=isComposable) BOOL composable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isComposable) BOOL composable
+#if __swift__
+API_DEPRECATED("Use load(.isComposable) instead", macos(10.7, 13.0), ios(4.3, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+#endif
/* indicates whether the receiver can be written to the saved photos album
*/
-@property (nonatomic, readonly, getter=isCompatibleWithSavedPhotosAlbum) BOOL compatibleWithSavedPhotosAlbum API_AVAILABLE(ios(5.0), tvos(9.0)) API_UNAVAILABLE(macos, watchos);
+@property (nonatomic, readonly, getter=isCompatibleWithSavedPhotosAlbum) BOOL compatibleWithSavedPhotosAlbum
+#if __swift__
+API_DEPRECATED("Use load(.isCompatibleWithSavedPhotosAlbum) instead", ios(5.0, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(macos, watchos);
+#else
+API_AVAILABLE(ios(5.0), tvos(9.0)) API_UNAVAILABLE(macos, watchos);
+#endif
/*!
@property compatibleWithAirPlayVideo
@abstract Indicates whether the asset is compatible with AirPlay Video.
@discussion YES if an AVPlayerItem initialized with the receiver can be played by an external device via AirPlay Video.
*/
-@property (nonatomic, readonly, getter=isCompatibleWithAirPlayVideo) BOOL compatibleWithAirPlayVideo API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, getter=isCompatibleWithAirPlayVideo) BOOL compatibleWithAirPlayVideo
+#if __swift__
+API_DEPRECATED("Use load(.isCompatibleWithAirPlayVideo) instead", macos(10.11, 13.0), ios(9.0, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(watchos);
+#else
+API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif
@end
@@ -605,6 +735,25 @@
*/
AVF_EXPORT NSString *const AVURLAssetURLRequestAttributionKey API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0));
+/*!
+ @constant AVURLAssetHTTPUserAgentKey
+ @abstract
+ Specifies the value of the User-Agent header to add to HTTP requests made by this asset.
+ @discussion
+ Value is an NSString
+ Default value is the systems's default User-Agent.
+*/
+AVF_EXPORT NSString *const AVURLAssetHTTPUserAgentKey API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @constant AVURLAssetPrimarySessionIdentifierKey
+ @abstract
+ Specifies a UUID to append as the value of the query parameter "_HLS_primary_id" to selected HTTP requests issued on behalf of the asset. Supported for HLS assets only.
+ @discussion
+ Value is an NSUUID. Its UUID string value will be used as the query parameter.
+ If you create AVURLAssets for the templateItems of AVPlayerInterstitialEvents and you want the instances of AVURLAsset that you create to be used during interstitial playback rather than equivalent AVURLAssets with the same URL, you must provide a value for this key that's equal to the httpSessionIdentifier of the primary AVPlayerItem's asset. See AVPlayerInterstitialEventController.h. This is especially useful if you require the use of a custom AVAssetResourceLoader delegate for interstitial assets.
+*/
+AVF_EXPORT NSString *const AVURLAssetPrimarySessionIdentifierKey API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
/*!
@class AVURLAsset
@@ -672,10 +821,23 @@
*/
- (instancetype)initWithURL:(NSURL *)URL options:(nullable NSDictionary<NSString *, id> *)options NS_DESIGNATED_INITIALIZER;
-/* indicates the URL with which the instance of AVURLAsset was initialized
+/*!
+ @property URL
+ @abstract
+ Indicates the URL with which the instance of AVURLAsset was initialized.
*/
@property (nonatomic, readonly, copy) NSURL *URL;
+/*!
+ @property httpSessionIdentifier
+ @abstract
+ Provides the identifier that's automatically included in any HTTP request issued on behalf of this asset in the HTTP header field "X-Playback-Session-Id".
+ @discussion
+ The value is an NSUUID from which the UUID string can be obtained.
+ Note that copies of an AVURLAsset vend an equivalent httpSessionIdentifier.
+*/
+@property (nonatomic, readonly) NSUUID *httpSessionIdentifier API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
@end
@@ -719,7 +881,12 @@
Finds a track of the target with content that can be accommodated by the specified composition track.
The logical complement of -[AVMutableComposition mutableTrackCompatibleWithTrack:].
*/
-- (nullable AVAssetTrack *)compatibleTrackForCompositionTrack:(AVCompositionTrack *)compositionTrack;
+- (nullable AVAssetTrack *)compatibleTrackForCompositionTrack:(AVCompositionTrack *)compositionTrack
+#if __swift__
+API_DEPRECATED("Use findCompatibleTrack(for:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("findCompatibleTrackForCompositionTrack:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method findCompatibleTrackForCompositionTrack:completionHandler:
@@ -744,7 +911,12 @@
@abstract Provides an array of AVAssetVariants contained in the asset
@discussion Some variants may not be playable according to the current device configuration.
*/
-@property (nonatomic, readonly) NSArray<AVAssetVariant *> *variants API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0));
+@property (nonatomic, readonly) NSArray<AVAssetVariant *> *variants
+#if __swift__
+API_DEPRECATED("Use load(.variants) instead", macos(10.12, 13.0), ios(15.0, 16.0), tvos(15.0, 16.0), watchos(8.0, 9.0));
+#else
+API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0));
+#endif
@end
@@ -842,7 +1014,7 @@
@abstract The tracks in an asset.
@discussion The value of this property is an array of tracks the asset contains; the tracks are of type AVFragmentedAssetTrack.
*/
-@property (nonatomic, readonly) NSArray<AVFragmentedAssetTrack *> *tracks;
+@property (nonatomic, readonly) NSArray<AVFragmentedAssetTrack *> *tracks AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.tracks) instead", macos(10.11, 13.0), ios(12.0, 16.0), tvos(12.0, 16.0), watchos(6.0, 9.0));
@end
@@ -856,7 +1028,12 @@
@result An instance of AVFragmentedAssetTrack; may be nil if no track of the specified trackID is available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (nullable AVFragmentedAssetTrack *)trackWithTrackID:(CMPersistentTrackID)trackID;
+- (nullable AVFragmentedAssetTrack *)trackWithTrackID:(CMPersistentTrackID)trackID
+#if __swift__
+API_DEPRECATED("Use loadTrack(withTrackID:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTrackWithTrackID:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTrackWithTrackID:completionHandler:
@@ -876,7 +1053,12 @@
@result An NSArray of AVFragmentedAssetTracks; may be empty if no tracks of the specified media type are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
+- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaType:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTracksWithMediaType:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaType:completionHandler:
@@ -896,7 +1078,12 @@
@result An NSArray of AVFragmentedAssetTracks; may be empty if no tracks with the specified characteristic are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaCharacteristic:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTracksWithMediaCharacteristic:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaCharacteristic:completionHandler:
@@ -950,6 +1137,7 @@
/*!
@property mindingInterval
@abstract An NSTimeInterval indicating how often a check for additional fragments should be performed. The default interval is 10.0.
+ @discussion This property throws an excepion if a value is set less than one millisecond (0.001) in duration.
*/
@property (nonatomic) NSTimeInterval mindingInterval;
@@ -964,6 +1152,7 @@
@abstract Adds a fragmented asset to the array of assets being minded.
@param asset
The fragmented asset to add to the minder.
+ @discussion This method throws an exception if the asset is not a supported type (AVFragmentedAsset, AVFragmentedMovie), or if the asset is already being minded by another fragment minder.
*/
- (void)addFragmentedAsset:(AVAsset<AVFragmentMinding> *)asset;
@@ -972,12 +1161,12 @@
@abstract Removes a fragmented asset from the array of assets being minded.
@param asset
The fragmented asset to remove from the minder.
+ @discussion This method throws an exception if the asset is not a supported type (AVFragmentedAsset, AVFragmentedMovie).
*/
- (void)removeFragmentedAsset:(AVAsset<AVFragmentMinding> *)asset;
@end
-API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVURLAsset (AVURLAssetContentKeyEligibility) <AVContentKeyRecipient>
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h 2022-02-23 07:10:12.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h 2022-05-31 14:52:19.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -103,6 +103,22 @@
};
/*!
+ @typedef AVAssetTrackGroupOutputHandling
+ @abstract A bitfield type that specifies output handling policies for alternate tracks in a track group.
+
+ @constant AVAssetTrackGroupOutputHandlingNone
+ @abstract No specific processing directives are applied to alternate tracks. The output is produced without regard to alternate track group assignments in the original asset.
+ @constant AVAssetTrackGroupOutputHandlingPreserveAlternateTracks
+ @abstract Preserve alternate tracks via pass-through.
+ */
+typedef NS_OPTIONS(NSUInteger, AVAssetTrackGroupOutputHandling) {
+ AVAssetTrackGroupOutputHandlingNone = 0UL,
+ AVAssetTrackGroupOutputHandlingPreserveAlternateTracks = (1UL << 0),
+
+ AVAssetTrackGroupOutputHandlingDefaultPolicy = AVAssetTrackGroupOutputHandlingNone
+} API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!
@class AVAssetExportSession
@abstract An AVAssetExportSession creates a new timed media resource from the contents of an
@@ -173,7 +189,7 @@
Setting the value of this property to a file type that's not among the session's supported file types will result in an NSInvalidArgumentException. See supportedFileTypes. */
@property (nonatomic, copy, nullable) AVFileType outputFileType;
-/* Indicates the URL of the export session's output. You may use UTTypeCopyPreferredTagWithClass(outputFileType, kUTTagClassFilenameExtension) to obtain an appropriate path extension for the outputFileType you have specified. For more information about UTTypeCopyPreferredTagWithClass and kUTTagClassFilenameExtension, on iOS see <CoreServices/UTType.h> and on Mac OS X see <LaunchServices/UTType.h>. */
+/* Indicates the URL of the export session's output. You may use [[UTType typeWithIdentifier:outputFileType] preferredFilenameExtension] to obtain an appropriate path extension for the outputFileType you have specified. For more information, see <UniformTypeIdentifiers/UTType.h>. */
@property (nonatomic, copy, nullable) NSURL *outputURL;
/* indicates that the output file should be optimized for network use, e.g. that a QuickTime movie file should support "fast start" */
@@ -213,6 +229,7 @@
@end
+API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetExportSession (AVAssetExportSessionPresets)
/*!
@@ -239,7 +256,7 @@
The array is a complete list of the valid identifiers that can be used as arguments to
initWithAsset:presetName: with the specified asset.
*/
-+ (NSArray<NSString *> *)exportPresetsCompatibleWithAsset:(AVAsset *)asset;
++ (NSArray<NSString *> *)exportPresetsCompatibleWithAsset:(AVAsset *)asset API_DEPRECATED_WITH_REPLACEMENT("determineCompatibilityOfExportPreset:withAsset:outputFileType:completionHandler:", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0)) API_UNAVAILABLE(watchos);
/*!
@method determineCompatibilityOfExportPreset:withAsset:outputFileType:completionHandler:
@@ -257,6 +274,7 @@
@end
+API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetExportSession (AVAssetExportSessionFileTypes)
/* Indicates the types of files the target can write, according to the preset the target was initialized with.
@@ -274,6 +292,7 @@
@end
+API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetExportSession (AVAssetExportSessionDurationAndLength)
/* Specifies a time range to be exported from the source. The default timeRange of an export session is kCMTimeZero..kCMTimePositiveInfinity, meaning that the full duration of the asset will be exported. */
@@ -308,6 +327,7 @@
@end
+API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetExportSession (AVAssetExportSessionMetadata)
/* Specifies an NSArray of AVMetadataItems that are to be written to the output file by the export session.
@@ -322,6 +342,7 @@
@end
+API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetExportSession (AVAssetExportSessionMediaProcessing)
/* Indicates the processing algorithm used to manage audio pitch for scaled audio edits.
@@ -338,8 +359,21 @@
/* Indicates the custom video compositor instance used, if any */
@property (nonatomic, readonly, nullable) id <AVVideoCompositing> customVideoCompositor API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+/*!
+ @property audioTrackGroupHandling
+ @abstract Defines export policy for handling alternate audio tracks
+
+ @discussion
+ Specifies the handling of audio tracks that are members of the same alternate track group corresponding to an exported audio track in the source asset.
+ If no audio track group is present, the value of this property has no effect.
+ If necessary, use the trackGroups property of AVAsset to determine whether any audio track groups are present.
+ The AVAudioMix property is not allowed to be used when also specifying alternate track output handling. An exception will be thrown if both are specified.
+*/
+@property (nonatomic) AVAssetTrackGroupOutputHandling audioTrackGroupHandling API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
@end
+API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetExportSession (AVAssetExportSessionMultipass)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h 2022-02-23 07:16:17.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h 2022-05-31 14:49:52.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2020 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -93,9 +93,17 @@
/* Specifies the aperture mode for the generated image. Default is AVAssetImageGeneratorApertureModeCleanAperture. */
@property (nonatomic, copy, nullable) AVAssetImageGeneratorApertureMode apertureMode;
-/* Specifies the video composition to use when extracting images from assets with multiple video tracks.
- If no videoComposition is specified, only the first enabled video track will be used.
- If a videoComposition is specified, the value of appliesPreferredTrackTransform is ignored. */
+/*!
+ @property videoComposition
+ @abstract Specifies the video composition to use when extracting images from assets with multiple video tracks.
+ @discussion If no videoComposition is specified, only the first enabled video track will be used.
+ If a videoComposition is specified, the value of appliesPreferredTrackTransform is ignored.
+ This property throws an exception if a video composition is set with any of the following property values:
+ - "renderScale" is not equal to one
+ - "renderSize" width or height is less than zero
+ - "frameDuration" is invalid or less than or equal to zero
+ - "sourceTrackIDForFrameTiming" is less than zero
+ */
@property (nonatomic, copy, nullable) AVVideoComposition *videoComposition;
/* Indicates the custom video compositor instance used, if any */
@@ -165,7 +173,7 @@
On iOS and tvOS, it is particularly important to avoid blocking. To preserve responsiveness, a synchronous request that blocks for too long (eg, a request to generate an image from an asset on a slow HTTP server) may lead to media services being reset.
*/
-- (nullable CGImageRef)copyCGImageAtTime:(CMTime)requestedTime actualTime:(nullable CMTime *)actualTime error:(NSError * _Nullable * _Nullable)outError CF_RETURNS_RETAINED;
+- (nullable CGImageRef)copyCGImageAtTime:(CMTime)requestedTime actualTime:(nullable CMTime *)actualTime error:(NSError * _Nullable * _Nullable)outError CF_RETURNS_RETAINED API_DEPRECATED_WITH_REPLACEMENT("generateCGImageAsynchronouslyForTime:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED)) API_UNAVAILABLE(watchos);
/* error object indicates the reason for failure if the result is AVAssetImageGeneratorFailed */
typedef void (^AVAssetImageGeneratorCompletionHandler)(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error);
@@ -185,6 +193,20 @@
- (void)generateCGImagesAsynchronouslyForTimes:(NSArray<NSValue *> *)requestedTimes completionHandler:(AVAssetImageGeneratorCompletionHandler)handler NS_SWIFT_DISABLE_ASYNC;
/*!
+ @method generateCGImageAsynchronouslyForTime:completionHandler:
+ @abstract Returns a CGImageRef for an asset at or near the specified time.
+ @param requestedTime
+ A CMTime, specifying the asset time at which an image is requested.
+ @param handler
+ A block that will be called when the image request is complete.
+ @discussion The client will receive exactly one handler callback for requestedTime.
+ Changes to generator properties (snap behavior, maximum size, etc...) will not affect outstanding asynchronous image generation requests.
+ The generated image is not retained. Clients should retain the image if they wish it to persist after the completion handler returns.
+ If image generation succeeds, the `image` parameter to the completion handler will be non-NULL and the `error` parameter will be nil. If image generation fails or was cancelled, the `image` parameter will be NULL and the `error` parameter will describe what went wrong. For cancelled images, the returned error will be AVErrorOperationCancelled.
+*/
+- (void)generateCGImageAsynchronouslyForTime:(CMTime)requestedTime completionHandler:(void (^)(CGImageRef _Nullable image, CMTime actualTime, NSError * _Nullable error))handler NS_REFINED_FOR_SWIFT_ASYNC(2) API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!
@method cancelAllCGImageGeneration
@abstract Cancels all outstanding image generation requests.
@discussion Calls the handler block with AVAssetImageGeneratorCancelled for each image time in every previous invocation of -generateCGImagesAsynchronouslyForTimes:completionHandler:
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetPlaybackAssistant.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetPlaybackAssistant.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetPlaybackAssistant.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetPlaybackAssistant.h 2022-05-31 15:04:23.000000000 -0400
@@ -0,0 +1,67 @@
+#if !__has_include(<AVFCore/AVAssetPlaybackAssistant.h>)
+/*
+ File: AVAssetPlaybackAssistant.h
+
+ Framework: AVFoundation
+
+ Copyright 2021 Apple Inc. All rights reserved.
+
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+
+@class AVAsset;
+
+NS_ASSUME_NONNULL_BEGIN
+
+typedef NSString *AVAssetPlaybackConfigurationOption NS_STRING_ENUM API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @constant AVAssetPlaybackConfigurationOptionStereoVideo
+ @abstract Indicates whether or not the asset can render stereo video.
+ @discussion Clients may use this property to determine whether to configure stereo video rendering.
+*/
+AVF_EXPORT AVAssetPlaybackConfigurationOption const AVAssetPlaybackConfigurationOptionStereoVideo API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @constant AVAssetPlaybackConfigurationOptionStereoMultiviewVideo
+ @abstract Indicates whether or not the asset can render stereo video and is also in a multiview compression format.
+ @discussion Clients may use this property to determine whether to configure stereo video rendering.
+*/
+AVF_EXPORT AVAssetPlaybackConfigurationOption const AVAssetPlaybackConfigurationOptionStereoMultiviewVideo API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @class AVAssetPlaybackAssistant
+ @abstract AVAssetPlaybackAssistant provides playback information for an asset.
+*/
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0))
+@interface AVAssetPlaybackAssistant : NSObject
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method assetPlaybackAssistantWithAsset:
+ @abstract Returns an instance of AVAssetPlaybackAssistant for inspection of an AVAsset object.
+ @param asset
+ An instance of AVAsset.
+ @result An instance of AVAssetPlaybackAssistant.
+*/
++ (instancetype)assetPlaybackAssistantWithAsset:(AVAsset *)asset;
+
+/*!
+ @method loadPlaybackConfigurationOptionsWithCompletionHandler:
+ @abstract Calls the completionHandler with information about the asset.
+ @param completionHandler
+ Called with an array of AVAssetPlaybackConfigurationOption values describing capabilities of the asset.
+ @discussion completionHandler is called when all of the AVAssetPlaybackConfigurationOption values have been loaded. If AVAssetPlaybackAssistant encounters failures when inspecting the contents of the asset, it will return no AVAssetPlaybackConfigurationOptions associated with those contents.
+*/
+- (void)loadPlaybackConfigurationOptionsWithCompletionHandler:(void (^)(NSArray<AVAssetPlaybackConfigurationOption> *playbackConfigurationOptions))completionHandler NS_SWIFT_ASYNC_NAME(getter:playbackConfigurationOptions());
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#else
+#import <AVFCore/AVAssetPlaybackAssistant.h>
+#endif
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReader.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReader.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReader.h 2022-02-23 07:10:12.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReader.h 2022-05-31 15:03:40.000000000 -0400
@@ -135,7 +135,7 @@
@discussion
The intersection of the value of timeRange and CMTimeRangeMake(kCMTimeZero, asset.duration) will determine the time range of the asset from which media data will be read. The default value of timeRange is CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity).
- This property cannot be set after reading has started.
+ This property throws an exception if a value is set after reading has started.
*/
@property (nonatomic) CMTimeRange timeRange;
@@ -175,7 +175,7 @@
@discussion
Outputs are created with a reference to one or more AVAssetTrack objects. These tracks must be owned by the asset returned by the receiver's asset property.
- Outputs cannot be added after reading has started.
+ This method throws an exception if the output has already been added to an AVAssetReader or if reading has started (`status` has progressed beyond AVAssetReaderStatusUnknown).
*/
- (void)addOutput:(AVAssetReaderOutput *)output;
@@ -191,6 +191,8 @@
This method validates the entire collection of settings for outputs for tracks, for audio mixing, and for video composition and initiates reading from the receiver's asset.
If this method returns NO, clients can determine the nature of the failure by checking the value of the status and error properties.
+
+ This method throws an exception if reading has already started (`status` has progressed beyond AVAssetReaderStatusUnknown).
*/
- (BOOL)startReading;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h 2022-02-23 07:10:13.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h 2022-05-31 15:02:34.000000000 -0400
@@ -64,6 +64,8 @@
When the value of this property is YES, the AVAssetReaderOutput will always vend a buffer with copied data to the client. Data in such buffers can be freely modified by the client. When the value of this property is NO, the buffers vended to the client may not be copied. Such buffers may still be referenced by other entities. The result of modifying a buffer whose data hasn't been copied is undefined. Requesting buffers whose data hasn't been copied when possible can lead to performance improvements.
The default value is YES.
+
+ This property throws an exception if a value is set after reading has started (the asset reader has progressed beyond AVAssetReaderStatusUnknown).
*/
@property (nonatomic) BOOL alwaysCopiesSampleData API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -77,6 +79,8 @@
@discussion
The client is responsible for calling CFRelease on the returned CMSampleBuffer object when finished with it. This method will return NULL if there are no more sample buffers available for the receiver within the time range specified by its AVAssetReader's timeRange property, or if there is an error that prevents the AVAssetReader from reading more media data. When this method returns NULL, clients should check the value of the associated AVAssetReader's status property to determine why no more samples could be read.
+
+ This method throws an exception if this output is not added to an instance of AVAssetReader (using -addOutput:) and -startReading is not called on that asset reader.
*/
- (nullable CMSampleBufferRef)copyNextSampleBuffer CF_RETURNS_RETAINED;
@@ -95,7 +99,7 @@
The default value is NO, which means that the asset reader output may not be reconfigured once reading has begun. When the value of this property is NO, AVAssetReader may be able to read media data more efficiently, particularly when multiple asset reader outputs are attached.
- This property may not be set after -startReading has been called on the attached asset reader.
+ This property throws an exception if a value is set after reading has started (the asset reader has progressed beyond AVAssetReaderStatusUnknown).
*/
@property (nonatomic) BOOL supportsRandomAccess API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -112,11 +116,21 @@
This method is often used in conjunction with AVAssetWriter multi-pass (see AVAssetWriterInput category AVAssetWriterInputMultiPass). In this usage, the caller will invoke -copyNextSampleBuffer until that method returns NULL and then ask the AVAssetWriterInput for a set of time ranges from which it thinks media data should be re-encoded. These time ranges are then given to this method to set up the asset reader output for the next pass.
- The time ranges set here override the time range set on AVAssetReader.timeRange. Just as with that property, for each time range in the array the intersection of that time range and CMTimeRangeMake(kCMTimeZero, asset.duration) will take effect. If the start times of each time range in the array are not strictly increasing or if two or more time ranges in the array overlap, an NSInvalidArgumentException will be raised. It is an error to include a time range with a non-numeric start time or duration (see CMTIME_IS_NUMERIC), unless the duration is kCMTimePositiveInfinity.
+ The time ranges set here override the time range set on AVAssetReader.timeRange. Just as with that property, for each time range in the array the intersection of that time range and CMTimeRangeMake(kCMTimeZero, asset.duration) will take effect.
If this method is invoked after the status of the attached AVAssetReader has become AVAssetReaderStatusFailed or AVAssetReaderStatusCancelled, no change in status will occur and the result of the next call to -copyNextSampleBuffer will be NULL.
- If this method is invoked before all media data has been read (i.e. -copyNextSampleBuffer has not yet returned NULL), an exception will be thrown. This method may not be called before -startReading has been invoked on the attached asset reader.
+ This method throws an exception if the following conditions are not honored:
+ - each item in time ranges must be an NSValue
+ - the start of each time range must be numeric - see CMTIME_IS_NUMERIC
+ - the duration of each time range must be nonnegative and numeric, or kCMTimePositiveInfinity
+ - the start of each time range must be greater than or equal to the end of the previous time range
+ - start times must be strictly increasing
+ - time ranges must not overlap
+ - cannot be called before -startReading has been invoked on the attached asset reader
+ - cannot be called until all samples of media data have been read (i.e. copyNextSampleBuffer returns NULL and the asset reader has not entered a failure state)
+ - cannot be called without setting "supportsRandomAccess" to YES
+ - cannot be called after calling -markConfigurationAsFinal
*/
- (void)resetForReadingTimeRanges:(NSArray<NSValue *> *)timeRanges API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -221,6 +235,12 @@
ProRes encoded media can contain up to 12bits/ch. If your source is ProRes encoded and you wish to preserve more than 8bits/ch during decompression then use one of the following pixel formats: kCVPixelFormatType_4444AYpCbCr16, kCVPixelFormatType_422YpCbCr16, kCVPixelFormatType_422YpCbCr10, or kCVPixelFormatType_64ARGB. AVAssetReader does not support scaling with any of these high bit depth pixel formats. If you use them then do not specify kCVPixelBufferWidthKey or kCVPixelBufferHeightKey in your outputSettings dictionary. If you plan to append these sample buffers to an AVAssetWriterInput then note that only the ProRes encoders support these pixel formats.
ProRes 4444 encoded media can contain a mathematically lossless alpha channel. To preserve the alpha channel during decompression use a pixel format with an alpha component such as kCVPixelFormatType_4444AYpCbCr16 or kCVPixelFormatType_64ARGB. To test whether your source contains an alpha channel check that the track's format description has kCMFormatDescriptionExtension_Depth and that its value is 32.
+
+ This method throws an exception for any of the following reasons:
+ - the output settings dictionary contains an unsupported key mentioned above
+ - the output settings dictionary does not contain any recognized key
+ - output settings are not compatible with track's media type
+ - track output settings would cause the output to yield compressed samples
*/
- (instancetype)initWithTrack:(AVAssetTrack *)track outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings NS_DESIGNATED_INITIALIZER;
@@ -253,6 +273,10 @@
Constants for various time pitch algorithms, e.g. AVAudioTimePitchAlgorithmSpectral, are defined in AVAudioProcessingSettings.h. An NSInvalidArgumentException will be raised if this property is set to a value other than the constants defined in that file.
The default value is AVAudioTimePitchAlgorithmSpectral.
+
+ This property throws an exception for any of the following reasons:
+ - a value is set value after reading has started
+ - a value is set other than AVAudioTimePitchAlgorithmSpectral, AVAudioTimePitchAlgorithmTimeDomain, or AVAudioTimePitchAlgorithmVarispeed.
*/
@property (nonatomic, copy) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -316,6 +340,12 @@
For non-nil values of audioSettings, the audio settings dictionary must contain values for keys in AVAudioSettings.h (linear PCM only). Initialization will fail if the audio settings cannot be used with the specified tracks. AVSampleRateConverterAudioQualityKey is not supported.
A value of nil for audioSettings configures the output to return samples in a convenient uncompressed format, with sample rate and other properties determined according to the properties of the specified audio tracks as well as other considerations that may vary according to device capabilities, operating system version, and other factors. Therefore if you wish to perform any processing on the output, you must examine the CMAudioFormatDescription of the CMSampleBuffers that are provided in order to ensure that your processing is appropriately configured for the output format.
+
+ This method throws an exception for any of the following reasons:
+ - an audio track does not have media type AVMediaTypeAudio
+ - an audio track belongs to a different AVAsset
+ - the audio settings contains an AVSampleRateConverterAudioQualityKey
+ - the output would be compressed
*/
- (instancetype)initWithAudioTracks:(NSArray<AVAssetTrack *> *)audioTracks audioSettings:(nullable NSDictionary<NSString *, id> *)audioSettings NS_DESIGNATED_INITIALIZER;
@@ -347,7 +377,10 @@
@discussion
The value of this property is an AVAudioMix that can be used to specify how the volume of audio samples read from each source track will change over the timeline of the source asset.
- This property cannot be set after reading has started.
+ This property throws an exception for any of the following reasons:
+ - an audio mix is set after reading has started (the asset reader has progressed beyond AVAssetReaderStatusUnknown)
+ - setting an audio mix containing a track that was not used to create the receiver
+ - an audio mix is set containing an invalid audio time pitch algorithm
*/
@property (nonatomic, copy, nullable) AVAudioMix *audioMix;
@@ -425,11 +458,18 @@
A value of nil for videoSettings configures the output to return samples in a convenient uncompressed format, with properties determined according to the properties of the specified video tracks. Initialization will fail if the video settings cannot be used with the specified tracks.
- AVAssetReaderVideoCompositionOutput can only produce uncompressed output. This means that the video settings dictionary must follow the rules for uncompressed video output, as laid out in AVVideoSettings.h. In addition, the following keys are not supported:
+ AVAssetReaderVideoCompositionOutput can only produce uncompressed output. This means that the video settings dictionary must follow the rules for uncompressed video output, as laid out in AVVideoSettings.h.
- AVVideoCleanApertureKey
- AVVideoPixelAspectRatioKey
- AVVideoScalingModeKey
+ This method throws an exception for any of the following reasons:
+ - any video track is not of media type AVMediaTypeVideo
+ - any video track is not part of this asset reader output's AVAsset
+ - track output settings would cause the output to yield compressed samples
+ - video settings does not follow the rules for uncompressed video output (AVVideoSettings.h)
+ - video settings contains any of the following keys:
+ - AVVideoCleanApertureKey
+ - AVVideoPixelAspectRatioKey
+ - AVVideoScalingModeKey
+ - AVVideoDecompressionPropertiesKey
*/
- (instancetype)initWithVideoTracks:(NSArray<AVAssetTrack *> *)videoTracks videoSettings:(nullable NSDictionary<NSString *, id> *)videoSettings NS_DESIGNATED_INITIALIZER;
@@ -461,7 +501,7 @@
@discussion
The value of this property is an AVVideoComposition that can be used to specify the visual arrangement of video frames read from each source track over the timeline of the source asset.
- This property cannot be set after reading has started.
+ This property throws an exception if a value is set after reading has started.
*/
@property (nonatomic, copy, nullable) AVVideoComposition *videoComposition;
@@ -526,6 +566,8 @@
It is an error to create a timed metadata group adaptor with an asset reader output that does not vend metadata. It is also an error to create a timed metadata group adaptor with an asset reader output whose asset reader has already started reading, or an asset reader output that already has been used to initialize another timed metadata group adaptor.
Clients should not mix calls to -[AVAssetReaderTrackOutput copyNextSampleBuffer] and -[AVAssetReaderOutputMetadataAdaptor nextTimedMetadataGroup]. Once an AVAssetReaderTrackOutput instance has been used to initialize an AVAssetReaderOutputMetadataAdaptor, calling -copyNextSampleBuffer on that instance will result in an exception being thrown.
+
+ This method throws an exception if the track's output was used to initialize another adaptor or if the track output's asset reader has already started reading.
*/
- (instancetype)initWithAssetReaderTrackOutput:(AVAssetReaderTrackOutput *)trackOutput NS_DESIGNATED_INITIALIZER;
@@ -550,6 +592,8 @@
Unlike -[AVAssetReaderTrackOutput copyNextSampleBuffer], this method returns an autoreleased object.
Before calling this method, you must ensure that the output which underlies the receiver is attached to an AVAssetReader via a prior call to -addOutput: and that -startReading has been called on the asset reader.
+
+ This method throws an exception if track output is not attached to an asset reader and reading has not yet begun.
*/
- (nullable AVTimedMetadataGroup *)nextTimedMetadataGroup;
@@ -610,6 +654,8 @@
An instance of AVCaption representing the next caption.
@discussion
The method returns the next caption group.
+
+ This method throws an exception if the track output is not attached to an asset reader and reading has not yet begun.
*/
- (nullable AVCaptionGroup *)nextCaptionGroup;
@@ -633,6 +679,7 @@
@abstract
Category of AVAssetReaderOutputCaptionAdaptor for caption validation handling
*/
+API_AVAILABLE(macos(12.0)) API_UNAVAILABLE(ios, tvos, watchos)
@interface AVAssetReaderOutputCaptionAdaptor (AVAssetReaderCaptionValidation)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h 2022-02-23 07:56:31.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h 2022-05-31 15:02:35.000000000 -0400
@@ -332,6 +332,13 @@
*/
@property (nonatomic, copy, nullable) NSDate *renewalDate API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+/*!
+ @property entireLengthAvailableOnDemand
+ @abstract Indicates whether asset data loading can expect data to be produced immediately.
+ @discussion Before you finish loading an AVAssetResourceLoadingRequest, if its contentInformationRequest is not nil, you may set this property to YES to indicate that all asset data can be produced immediately, e.g., because the data is fully cached, or because the custom URL scheme ultimately refers to files on local storage. This allows significant data flow optimizations. For backward compatibility, this property defaults to NO.
+*/
+@property (nonatomic, getter=isEntireLengthAvailableOnDemand) BOOL entireLengthAvailableOnDemand API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
@end
/*!
@@ -407,6 +414,7 @@
@end
+API_AVAILABLE(macos(10.9), ios(6.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetResourceLoadingRequest (AVAssetResourceLoadingRequestContentKeyRequestSupport)
/*!
@@ -447,6 +455,7 @@
*/
AVF_EXPORT NSString *const AVAssetResourceLoadingRequestStreamingContentKeyRequestRequiresPersistentKey API_DEPRECATED_WITH_REPLACEMENT("-[AVPersistableContentKeyRequest persistableContentKeyFromKeyVendorResponse:options:error:]", macos(10.14, API_TO_BE_DEPRECATED), ios(9.0, API_TO_BE_DEPRECATED)) API_UNAVAILABLE(watchos);
+API_AVAILABLE(macos(10.9), ios(6.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetResourceLoadingRequest (AVAssetResourceLoadingRequestDeprecated)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h 2022-02-23 07:13:13.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h 2022-05-31 14:49:51.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -59,23 +59,33 @@
each of which indicates the format of media samples referenced by the track;
a track that presents uniform media, e.g. encoded according to the same encoding settings,
will provide an array with a count of 1 */
-@property (nonatomic, readonly) NSArray *formatDescriptions;
+@property (nonatomic, readonly) NSArray *formatDescriptions AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.formatDescriptions) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* Indicates whether the receiver is playable in the current environment; if YES, an AVPlayerItemTrack of an AVPlayerItem initialized with the receiver's asset can be enabled for playback. */
-@property (nonatomic, readonly, getter=isPlayable) BOOL playable API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isPlayable) BOOL playable
+#if __swift__
+API_DEPRECATED("Use load(.isPlayable) instead", macos(10.8, 13.0), ios(5.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+#endif
/* Indicates whether the receiver is decodable in the current environment; if YES, the track can be decoded even though decoding may be too slow for real time playback. */
-@property (nonatomic, readonly, getter=isDecodable) BOOL decodable API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0), watchos(4.0));
+@property (nonatomic, readonly, getter=isDecodable) BOOL decodable
+#if __swift__
+API_DEPRECATED("Use load(.isDecodable) instead", macos(10.13, 13.0), ios(11.0, 16.0), tvos(11.0, 16.0), watchos(4.0, 9.0));
+#else
+API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0), watchos(4.0));
+#endif
/* indicates whether the track is enabled according to state stored in its container or construct;
note that its presentation state can be changed from this default via AVPlayerItemTrack */
-@property (nonatomic, readonly, getter=isEnabled) BOOL enabled;
+@property (nonatomic, readonly, getter=isEnabled) BOOL enabled AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.isEnabled) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates whether the track references sample data only within its storage container */
-@property (nonatomic, readonly, getter=isSelfContained) BOOL selfContained;
+@property (nonatomic, readonly, getter=isSelfContained) BOOL selfContained AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.isSelfContained) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the total number of bytes of sample data required by the track */
-@property (nonatomic, readonly) long long totalSampleDataLength;
+@property (nonatomic, readonly) long long totalSampleDataLength AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.totalSampleDataLength) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@method hasMediaCharacteristic:
@@ -85,7 +95,7 @@
as defined above.
@result YES if the track references media with the specified characteristic, otherwise NO.
*/
-- (BOOL)hasMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (BOOL)hasMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.mediaCharacteristics) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
@end
@@ -94,13 +104,13 @@
/* Indicates the timeRange of the track within the overall timeline of the asset;
a track with CMTIME_COMPARE_INLINE(timeRange.start, >, kCMTimeZero) will initially present an empty interval. */
-@property (nonatomic, readonly) CMTimeRange timeRange;
+@property (nonatomic, readonly) CMTimeRange timeRange AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.timeRange) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates a timescale in which time values for the track can be operated upon without extraneous numerical conversion */
-@property (nonatomic, readonly) CMTimeScale naturalTimeScale;
+@property (nonatomic, readonly) CMTimeScale naturalTimeScale AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.naturalTimeScale) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the estimated data rate of the media data referenced by the track, in units of bits per second */
-@property (nonatomic, readonly) float estimatedDataRate;
+@property (nonatomic, readonly) float estimatedDataRate AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.estimatedDataRate) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
@end
@@ -109,11 +119,11 @@
/* indicates the language associated with the track, as an ISO 639-2/T language code;
may be nil if no language is indicated */
-@property (nonatomic, readonly, nullable) NSString *languageCode;
+@property (nonatomic, readonly, nullable) NSString *languageCode AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.languageCode) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the language tag associated with the track, as an IETF BCP 47 (RFC 4646) language identifier;
may be nil if no language tag is indicated */
-@property (nonatomic, readonly, nullable) NSString *extendedLanguageTag;
+@property (nonatomic, readonly, nullable) NSString *extendedLanguageTag AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.extendedLanguageTag) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
@end
@@ -121,11 +131,11 @@
@interface AVAssetTrack (AVAssetTrackPropertiesForVisualCharacteristic)
/* indicates the natural dimensions of the media data referenced by the track as a CGSize */
-@property (nonatomic, readonly) CGSize naturalSize;
+@property (nonatomic, readonly) CGSize naturalSize AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.naturalSize) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the transform specified in the track's storage container as the preferred transformation of the visual media data for display purposes;
its value is often but not always CGAffineTransformIdentity */
-@property (nonatomic, readonly) CGAffineTransform preferredTransform;
+@property (nonatomic, readonly) CGAffineTransform preferredTransform AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.preferredTransform) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
@end
@@ -133,10 +143,15 @@
@interface AVAssetTrack (AVAssetTrackPropertiesForAudibleCharacteristic)
/* indicates the volume specified in the track's storage container as the preferred volume of the audible media data */
-@property (nonatomic, readonly) float preferredVolume;
+@property (nonatomic, readonly) float preferredVolume AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.preferredVolume) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates whether this audio track has dependencies (e.g. kAudioFormatMPEGD_USAC) */
-@property (nonatomic, readonly) BOOL hasAudioSampleDependencies API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
+@property (nonatomic, readonly) BOOL hasAudioSampleDependencies
+#if __swift__
+API_DEPRECATED("Use load(.hasAudioSampleDependencies) instead", macos(10.15, 13.0), ios(13.0, 16.0), tvos(13.0, 16.0), watchos(6.0, 9.0));
+#else
+API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
+#endif
@end
@@ -148,16 +163,26 @@
@abstract For tracks that carry a full frame per media sample, indicates the frame rate of the track in units of frames per second.
@discussion For field-based video tracks that carry one field per media sample, the value of this property is the field rate, not the frame rate.
*/
-@property (nonatomic, readonly) float nominalFrameRate;
+@property (nonatomic, readonly) float nominalFrameRate AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.nominalFrameRate) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* indicates the minimum duration of the track's frames; the value will be kCMTimeInvalid if the minimum frame duration is not known or cannot be calculated */
-@property (nonatomic, readonly) CMTime minFrameDuration API_AVAILABLE(macos(10.10), ios(7.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) CMTime minFrameDuration
+#if __swift__
+API_DEPRECATED("Use load(.minFrameDuration) instead", macos(10.10, 13.0), ios(7.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.10), ios(7.0), tvos(9.0), watchos(1.0));
+#endif
/*!
@property requiresFrameReordering
@abstract Indicates whether samples in the track may have different values for their presentation and decode timestamps.
*/
-@property (nonatomic, readonly) BOOL requiresFrameReordering API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) BOOL requiresFrameReordering
+#if __swift__
+API_DEPRECATED("Use load(.requiresFrameReordering) instead", macos(10.10, 13.0), ios(8.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+#endif
@end
@@ -166,7 +191,7 @@
/* Provides an array of AVAssetTrackSegments with time mappings from the timeline of the track's media samples to the timeline of the track.
Empty edits, i.e. timeRanges for which no media data is available to be presented, have a value of AVAssetTrackSegment.empty equal to YES. */
-@property (nonatomic, copy, readonly) NSArray<AVAssetTrackSegment *> *segments;
+@property (nonatomic, copy, readonly) NSArray<AVAssetTrackSegment *> *segments AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.segments) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@method segmentForTrackTime:
@@ -176,7 +201,12 @@
@result An AVAssetTrackSegment.
@discussion If the trackTime does not map to a sample presentation time (e.g. it's outside the track's timeRange), the segment closest in time to the specified trackTime is returned.
*/
-- (nullable AVAssetTrackSegment *)segmentForTrackTime:(CMTime)trackTime;
+- (nullable AVAssetTrackSegment *)segmentForTrackTime:(CMTime)trackTime
+#if __swift__
+API_DEPRECATED("Use loadSegment(forTrackTime:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadSegmentForTrackTime:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadSegmentForTrackTime:completionHandler:
@@ -196,7 +226,12 @@
The trackTime for which a sample presentation time is requested.
@result A CMTime; will be invalid if the trackTime is out of range
*/
-- (CMTime)samplePresentationTimeForTrackTime:(CMTime)trackTime;
+- (CMTime)samplePresentationTimeForTrackTime:(CMTime)trackTime
+#if __swift__
+API_DEPRECATED("Use loadSamplePresentationTime(forTrackTime:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadSamplePresentationTimeForTrackTime:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadSamplePresentationTimeForTrackTime:completionHandler:
@@ -216,15 +251,20 @@
// high-level access to selected metadata of common interest
/* provides access to an array of AVMetadataItems for each common metadata key for which a value is available */
-@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata;
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.commonMetadata) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* Provides access to an array of AVMetadataItems for all metadata identifiers for which a value is available; items can be filtered according to language via +[AVMetadataItem metadataItemsFromArray:filteredAndSortedAccordingToPreferredLanguages:] and according to identifier via +[AVMetadataItem metadataItemsFromArray:filteredByIdentifier:].
*/
-@property (nonatomic, readonly) NSArray<AVMetadataItem *> *metadata API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *metadata
+#if __swift__
+API_DEPRECATED("Use load(.metadata) instead", macos(10.10, 13.0), ios(8.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+#endif
/* provides an NSArray of NSStrings, each representing a format of metadata that's available for the track (e.g. QuickTime userdata, etc.)
Metadata formats are defined in AVMetadataItem.h. */
-@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.availableMetadataFormats) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@method metadataForFormat:
@@ -234,7 +274,12 @@
@result An NSArray containing AVMetadataItems.
@discussion Becomes callable without blocking when the key @"availableMetadataFormats" has been loaded
*/
-- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format
+#if __swift__
+API_DEPRECATED("Use loadMetadata(for:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadMetadataForFormat:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadMetadataForFormat:completionHandler:
@@ -318,8 +363,12 @@
/* Provides an NSArray of NSStrings, each representing a type of track association that the receiver has with one or more of the other tracks of the asset (e.g. AVTrackAssociationTypeChapterList, AVTrackAssociationTypeTimecode, etc.).
Track association types are defined immediately above. */
-@property (nonatomic, readonly) NSArray<AVTrackAssociationType> *availableTrackAssociationTypes API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
-
+@property (nonatomic, readonly) NSArray<AVTrackAssociationType> *availableTrackAssociationTypes
+#if __swift__
+API_DEPRECATED("Use load(.availableTrackAssociationTypes) instead", macos(10.9, 13.0), ios(7.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+#endif
/*!
@method associatedTracksOfType:
@abstract Provides an NSArray of AVAssetTracks, one for each track associated with the receiver with the specified type of track association.
@@ -328,7 +377,12 @@
@result An NSArray containing AVAssetTracks; may be empty if there is no associated tracks of the specified type.
@discussion Becomes callable without blocking when the key @"availableTrackAssociationTypes" has been loaded.
*/
-- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(AVTrackAssociationType)trackAssociationType API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(AVTrackAssociationType)trackAssociationType
+#if __swift__
+API_DEPRECATED("Use loadAssociatedTracks(ofType:) instead", macos(10.9, 13.0), ios(7.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadAssociatedTracksOfType:completionHandler:", macos(10.9, API_TO_BE_DEPRECATED), ios(7.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadAssociatedTracksOfType:completionHandler:
@@ -348,7 +402,12 @@
@interface AVAssetTrack (AVAssetTrackSampleCursorProvision)
/* Indicates whether the receiver can provide instances of AVSampleCursor for traversing its media samples and discovering information about them. */
-@property (nonatomic, readonly) BOOL canProvideSampleCursors API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos);
+@property (nonatomic, readonly) BOOL canProvideSampleCursors
+#if __swift__
+API_DEPRECATED("Use load(.canProvideSampleCursors) instead", macos(10.10, 13.0)) API_UNAVAILABLE(ios, tvos, watchos);
+#else
+API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0));
+#endif
/*!
@method makeSampleCursorWithPresentationTimeStamp:
@@ -360,7 +419,7 @@
If the receiver's asset has a value of NO for providesPreciseDurationAndTiming, and it is prohibitively expensive to locate the precise sample at the desired timestamp, the sample cursor may be approximately positioned.
This method will return nil if there are no samples in the track.
*/
-- (nullable AVSampleCursor *)makeSampleCursorWithPresentationTimeStamp:(CMTime)presentationTimeStamp API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos);
+- (nullable AVSampleCursor *)makeSampleCursorWithPresentationTimeStamp:(CMTime)presentationTimeStamp API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0));
/*!
@method makeSampleCursorAtFirstSampleInDecodeOrder:
@@ -368,7 +427,7 @@
@result An instance of AVSampleCursor.
@discussion This method will return nil if there are no samples in the track.
*/
-- (nullable AVSampleCursor *)makeSampleCursorAtFirstSampleInDecodeOrder API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos);
+- (nullable AVSampleCursor *)makeSampleCursorAtFirstSampleInDecodeOrder API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0));
/*!
@method makeSampleCursorAtLastSampleInDecodeOrder:
@@ -376,7 +435,7 @@
@result An instance of AVSampleCursor.
@discussion This method will return nil if there are no samples in the track.
*/
-- (nullable AVSampleCursor *)makeSampleCursorAtLastSampleInDecodeOrder API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos);
+- (nullable AVSampleCursor *)makeSampleCursorAtLastSampleInDecodeOrder API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0));
@end
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h 2022-02-23 07:16:16.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h 2022-06-03 18:07:15.000000000 -0400
@@ -107,7 +107,7 @@
@discussion
Writing will fail if a file already exists at the specified URL.
- UTIs for container formats that can be written are declared in AVMediaFormat.h.
+ This method throws an exception if the output file type is not declared in AVMediaFormat.h.
*/
- (nullable instancetype)initWithURL:(NSURL *)outputURL fileType:(AVFileType)outputFileType error:(NSError * _Nullable * _Nullable)outError NS_DESIGNATED_INITIALIZER;
@@ -126,7 +126,7 @@
Clients may use +typeWithIdentifier: with a UTI to create an instance of UTType. See <UniformTypeIdentifiers/UTType.h>.
- UTIs for container formats that can be output are declared in AVMediaFormat.h.
+ This method throws an exception if the output content type UTI for container format is not declared in AVMediaFormat.h.
*/
- (instancetype)initWithContentType:(UTType *)outputContentType NS_DESIGNATED_INITIALIZER API_AVAILABLE(macos(11.0), ios(14.0), tvos(14.0)) API_UNAVAILABLE(watchos);
@@ -135,7 +135,7 @@
@abstract
The location of the file for which the instance of AVAssetWriter was initialized for writing.
@discussion
- You may use UTTypeCopyPreferredTagWithClass(outputFileType, kUTTagClassFilenameExtension) to obtain an appropriate path extension for the outputFileType you have specified. For more information about UTTypeCopyPreferredTagWithClass and kUTTagClassFilenameExtension, on iOS see <CoreServices/UTType.h> and on Mac OS X see <LaunchServices/UTType.h>.
+ You may use [[UTType typeWithIdentifier:outputFileType] preferredFilenameExtension] to obtain an appropriate path extension for the outputFileType you have specified. For more information, see <UniformTypeIdentifiers/UTType.h>.
*/
@property (nonatomic, copy, readonly) NSURL *outputURL;
@@ -269,6 +269,14 @@
Inputs are created with a media type and output settings. These both must be compatible with the receiver.
Inputs cannot be added after writing has started.
+
+ This method throws an exception if any of the following conditions are satisfied:
+ - the input's media type is not allowed for this asset writer
+ - writing uncompressed video in a specific format
+ - passthrough* to files (other than AVFileTypeQuickTimeMovie) is missing a format hint in the AVAssetWriterInput initializer
+ - passthrough* is not supported for this media/file type combination (for example, AVFileTypeWAVE only supports AVMediaTypeAudio)
+
+ *Passthrough is indicated when the input's output settings are nil.
*/
- (void)addInput:(AVAssetWriterInput *)input;
@@ -326,6 +334,8 @@
It is an error to append samples outside of a sample-writing session. To append more samples after invoking -endSessionAtSourceTime:, you must first start a new session using -startSessionAtSourceTime:.
NOTE: Multiple sample-writing sessions are currently not supported. It is an error to call -startSessionAtSourceTime: a second time after calling -endSessionAtSourceTime:.
+
+ This method throws an exception if the session is ended without first starting it.
*/
- (void)endSessionAtSourceTime:(CMTime)endTime;
@@ -380,7 +390,7 @@
@end
-
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriter (AVAssetWriterFileTypeSpecificProperties)
/*!
@@ -456,6 +466,7 @@
@class AVAssetWriterInputGroup;
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriter (AVAssetWriterInputGroups)
/*!
@@ -470,6 +481,10 @@
@discussion
If outputFileType specifies a container format that does not support mutually exclusive relationships among tracks, or if the specified instance of AVAssetWriterInputGroup contains inputs with media types that cannot be related, the group cannot be added to the AVAssetWriter.
+
+ This method throws an exception if any of the following conditions are satisfied:
+ - this writer's output file type does not support mutually exclusive relationships among tracks (allowed types are AVFileTypeQuickTimeMovie, AVFileTypeAppleM4A, AVFileTypeAppleM4V, AVFileType3GPP [iPhone only], AVFileTypeMPEG4)
+ - any AVAssetWriterInput in the input group is also present in an input group already added
*/
- (BOOL)canAddInputGroup:(AVAssetWriterInputGroup *)inputGroup API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -574,6 +589,7 @@
@protocol AVAssetWriterDelegate;
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriter (AVAssetWriterSegmentation)
/*!
@@ -634,8 +650,8 @@
Closes the current segment and outputs it to the -assetWriter:didOutputSegmentData:segmentType:segmentReport: or -assetWriter:didOutputSegmentData:segmentType: delegate method.
@discussion
- Use this method only when the value of preferredOutputSegmentInterval property is set to kCMTimeIndefinite.
- */
+ This method throws an exception if the delegate method to output segment data is not implemented, or if the value of the preferredOutputSegmentInterval property is not kCMTimeIndefinite.
+ */
- (void)flushSegment API_AVAILABLE(macos(11.0), ios(14.0), tvos(14.0)) API_UNAVAILABLE(watchos);
@end
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h 2022-02-23 10:57:32.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h 2022-05-31 14:49:49.000000000 -0400
@@ -85,7 +85,13 @@
@discussion
A version of +assetWriterInputWithMediaType:outputSettings: that includes the ability to hint at the format of media data that will be appended to the new instance of AVAssetWriterInput. When a source format hint is provided, the outputSettings dictionary is not required to be fully specified. For AVMediaTypeAudio, this means that AVFormatIDKey is the only required key. For AVMediaTypeVideo, this means that AVVideoCodecKey is the only required key. Values for the remaining keys will be chosen by the asset writer input, with consideration given to the attributes of the source format. To guarantee successful file writing, clients who specify a format hint should ensure that subsequently-appended buffers are of the specified format.
- An NSInvalidArgumentException will be thrown if the media type of the format description does not match the media type string passed into this method.
+ This method throws an exception for any of the following reasons:
+ - the media type of the format description does not match the media type passed into this method
+ - the width and height of video format hint are not positive
+ - the output settings do not match the supplied media type
+ - for video inputs, the output settings do not contain a required key (AVVideoCodecKey, AVVideoWidthKey, AVVideoHeightKey)
+ - the output scaling mode is AVVideoScalingModeFit
+ - the output settings contain AVSampleRateConverterAudioQualityKey or AVVideoDecompressionPropertiesKey
*/
+ (instancetype)assetWriterInputWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint API_AVAILABLE(macos(10.8), ios(6.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -109,6 +115,13 @@
For AVMediaTypeAudio the following keys are not currently supported in the outputSettings dictionary: AVSampleRateConverterAudioQualityKey. When using this initializer, an audio settings dictionary must be fully specified, meaning that it must contain AVFormatIDKey, AVSampleRateKey, and AVNumberOfChannelsKey. If no other channel layout information is available, a value of 1 for AVNumberOfChannelsKey will result in mono output and a value of 2 will result in stereo output. If AVNumberOfChannelsKey specifies a channel count greater than 2, the dictionary must also specify a value for AVChannelLayoutKey. For kAudioFormatLinearPCM, all relevant AVLinearPCM*Key keys must be included, and for kAudioFormatAppleLossless, AVEncoderBitDepthHintKey keys must be included. See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.
For AVMediaTypeVideo, any output settings dictionary must request a compressed video format. This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h. When using this initializer, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey. See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys. On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecTypeH264 and AVVideoCodecTypeJPEG. AVVideoCodecTypeH264 is not supported on iPhone 3G. For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
+
+ This method throws an exception for any of the following reasons:
+ - the media type of the format description does not match the media type passed into this method
+ - the output settings do not match the supplied media type
+ - for video inputs, the output settings do not contain a required key (AVVideoCodecKey, AVVideoWidthKey, AVVideoHeightKey)
+ - the output scaling mode is AVVideoScalingModeFit
+ - the output settings contain AVSampleRateConverterAudioQualityKey or AVVideoDecompressionPropertiesKey
*/
- (instancetype)initWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
@@ -129,7 +142,13 @@
@discussion
A version of -initWithMediaType:outputSettings: that includes the ability to hint at the format of media data that will be appended to the new instance of AVAssetWriterInput. When a source format hint is provided, the outputSettings dictionary is not required to be fully specified. For AVMediaTypeAudio, this means that AVFormatIDKey is the only required key. For AVMediaTypeVideo, this means that AVVideoCodecKey is the only required key. Values for the remaining keys will be chosen by the asset writer input, with consideration given to the attributes of the source format. To guarantee successful file writing, clients who specify a format hint should ensure that subsequently-appended buffers are of the specified format.
- An NSInvalidArgumentException will be thrown if the media type of the format description does not match the media type string passed into this method.
+ This method throws an exception for any of the following reasons:
+ - the media type of the format description does not match the media type passed into this method
+ - the width and height of video format hint are not positive
+ - the output settings do not match the supplied media type
+ - for video inputs, the output settings do not contain a required key (AVVideoCodecKey, AVVideoWidthKey, AVVideoHeightKey)
+ - the output scaling mode is AVVideoScalingModeFit
+ - the output settings contain AVSampleRateConverterAudioQualityKey or AVVideoDecompressionPropertiesKey
*/
- (instancetype)initWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint API_AVAILABLE(macos(10.8), ios(6.0), tvos(9.0)) API_UNAVAILABLE(watchos) NS_DESIGNATED_INITIALIZER;
@@ -139,7 +158,7 @@
The media type of the samples that can be appended to the receiver.
@discussion
- The value of this property is one of the media type strings defined in AVMediaFormat.h.
+ The value of this property is one of the media types defined in AVMediaFormat.h.
*/
@property (nonatomic, readonly) AVMediaType mediaType;
@@ -246,6 +265,8 @@
When using a push-style buffer source, it is generally better to immediately append each buffer to the AVAssetWriterInput, directly via -[AVAssetWriter appendSampleBuffer:], as it is received. Using this strategy, it is often possible to avoid having to queue up buffers in between the buffer source and the AVAssetWriterInput. Note that many of these push-style buffer sources also produce buffers in real-time, in which case the client should set expectsMediaDataInRealTime to YES.
Before calling this method, you must ensure that the receiver is attached to an AVAssetWriter via a prior call to -addInput: and that -startWriting has been called on the asset writer.
+
+ This method throws an exception if this method is called more than once.
*/
- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
@@ -286,6 +307,8 @@
As of OS X 10.10 and iOS 8.0, this method can be used to add sample buffers that reference existing data in a file instead of containing media data to be appended to the file. This can be used to generate tracks that are not self-contained. In order to append such a sample reference to the track create a CMSampleBufferRef with a NULL dataBuffer and dataReady set to true and set the kCMSampleBufferAttachmentKey_SampleReferenceURL and kCMSampleBufferAttachmentKey_SampleReferenceByteOffset attachments on the sample buffer. Further documentation on how to create such a "sample reference" sample buffer can be found in the description of the kCMSampleBufferAttachmentKey_SampleReferenceURL and kCMSampleBufferAttachmentKey_SampleReferenceByteOffset attachment keys in the CMSampleBuffer documentation.
Before calling this method, you must ensure that the receiver is attached to an AVAssetWriter via a prior call to -addInput: and that -startWriting has been called on the asset writer. It is an error to invoke this method before starting a session (via -[AVAssetWriter startSessionAtSourceTime:]) or after ending a session (via -[AVAssetWriter endSessionAtSourceTime:]).
+
+ This method throws an exception if the sample buffer's media type does not match the asset writer input's media type.
*/
- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@@ -305,7 +328,7 @@
@end
-
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriterInput (AVAssetWriterInputLanguageProperties)
/*!
@@ -317,6 +340,8 @@
Also see extendedLanguageTag below.
This property cannot be set after writing on the receiver's AVAssetWriter has started.
+
+ This property throws an exception if a language code is set which does not conform to the ISO 639-2/T language codes.
*/
@property (nonatomic, copy, nullable) NSString *languageCode API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -328,13 +353,15 @@
@discussion
Extended language tags are normally set only when an ISO 639-2/T language code by itself is ambiguous, as in cases in which media data should be distinguished not only by language but also by the regional dialect in use or the writing system employed.
- This property cannot be set after writing on the receiver's AVAssetWriter has started.
+ This property cannot be set after writing on the receiver's AVAssetWriter has started.
+
+ This property throws an exception if an extended language tag is set which does not conform to the IETF BCP 47 (RFC 4646) language identifiers.
*/
@property (nonatomic, copy, nullable) NSString *extendedLanguageTag API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@end
-
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriterInput (AVAssetWriterInputPropertiesForVisualCharacteristic)
/*!
@@ -363,7 +390,7 @@
@end
-
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriterInput (AVAssetWriterInputPropertiesForAudibleCharacteristic)
/*!
@@ -380,7 +407,7 @@
@end
-
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriterInput (AVAssetWriterInputFileTypeSpecificProperties)
/*!
@@ -392,6 +419,8 @@
When an input group is added to an AVAssetWriter (see -[AVAssetWriter addInputGroup:]), the value of marksOutputTrackAsEnabled will automatically be set to YES for the default input and set to NO for all of the other inputs in the group. In this case, if a new value is set on this property then an exception will be raised.
This property cannot be set after writing on the receiver's AVAssetWriter has started.
+
+ This property throws an exception if a value is set on an asset writer input that is contained in an input group.
*/
@property (nonatomic) BOOL marksOutputTrackAsEnabled API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -404,6 +433,8 @@
The default value is 0, which indicates that the receiver should choose a convenient value, if applicable. It is an error to set a value other than 0 if the receiver has media type AVMediaTypeAudio.
This property cannot be set after writing has started.
+
+ This property throws an exception if a value is set on an asset writer input with media type AVMediaTypeAudio.
*/
@property (nonatomic) CMTimeScale mediaTimeScale API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -417,9 +448,11 @@
A "chunk" contains one or more samples. The total duration of the samples in a chunk is no greater than this preferred chunk duration, or the duration of a single sample if the sample's duration is greater than this preferred chunk duration.
- The default value is kCMTimeInvalid, which means that the receiver will choose an appropriate default value. It is an error to set a chunk duration that is negative or non-numeric.
+ The default value is kCMTimeInvalid, which means that the receiver will choose an appropriate default value.
This property cannot be set after -startWriting has been called on the receiver.
+
+ This property throws an exception if a duration is set which is non-numeric or non-positive (see CMTIME_IS_NUMERIC).
*/
@property (nonatomic) CMTime preferredMediaChunkDuration API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -490,7 +523,7 @@
@end
-
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriterInput (AVAssetWriterInputTrackAssociations)
/*!
@@ -522,6 +555,8 @@
If the type of association requires tracks of specific media types that don't match the media types of the inputs, or if the output file type does not support track associations, an NSInvalidArgumentException is raised.
Track associations cannot be added after writing on the receiver's AVAssetWriter has started.
+
+ This method throws an exception if the input and track association type cannot be added (see -canAddTrackAssociationWithTrackOfInput:type:).
*/
- (void)addTrackAssociationWithTrackOfInput:(AVAssetWriterInput *)input type:(NSString *)trackAssociationType API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -530,6 +565,7 @@
@class AVAssetWriterInputPassDescription;
+API_AVAILABLE(macos(10.7), ios(4.1), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVAssetWriterInput (AVAssetWriterInputMultiPass)
/*!
@@ -604,6 +640,8 @@
When all media data has been appended for the current request, call -markCurrentPassAsFinished to begin the process of determining whether an additional pass is warranted. If an additional pass is warranted, the block passed to this method will be invoked to begin the next pass. If no additional passes are needed, the block passed to this method will be invoked one final time so the client can invoke -markAsFinished in response to the value of currentPassDescription becoming nil.
Before calling this method, you must ensure that the receiver is attached to an AVAssetWriter via a prior call to -addInput: and that -startWriting has been called on the asset writer.
+
+ This method throws an exception if called more than once.
*/
- (void)respondToEachPassDescriptionOnQueue:(dispatch_queue_t)queue usingBlock:(dispatch_block_t)block API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -694,8 +732,8 @@
Pixel buffer attributes keys for the pixel buffer pool are defined in <CoreVideo/CVPixelBuffer.h>. To specify the pixel format type, the pixelBufferAttributes dictionary should contain a value for kCVPixelBufferPixelFormatTypeKey. For example, use [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] for 8-bit-per-channel BGRA. See the discussion under appendPixelBuffer:withPresentationTime: for advice on choosing a pixel format.
Clients that do not need a pixel buffer pool for allocating buffers should set sourcePixelBufferAttributes to nil.
-
- It is an error to initialize an instance of AVAssetWriterInputPixelBufferAdaptor with a sample buffer input that is already attached to another instance of AVAssetWriterInputPixelBufferAdaptor.
+
+ This method throws an exception if the input is already attached to another asset writer input pixel buffer adaptor or if the input has already started writing (the asset writer has progressed beyond AVAssetWriterStatusUnknown).
*/
+ (instancetype)assetWriterInputPixelBufferAdaptorWithAssetWriterInput:(AVAssetWriterInput *)input sourcePixelBufferAttributes:(nullable NSDictionary<NSString *, id> *)sourcePixelBufferAttributes;
@@ -717,8 +755,8 @@
Pixel buffer attributes keys for the pixel buffer pool are defined in <CoreVideo/CVPixelBuffer.h>. To specify the pixel format type, the pixelBufferAttributes dictionary should contain a value for kCVPixelBufferPixelFormatTypeKey. For example, use [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] for 8-bit-per-channel BGRA. See the discussion under appendPixelBuffer:withPresentationTime: for advice on choosing a pixel format.
Clients that do not need a pixel buffer pool for allocating buffers should set sourcePixelBufferAttributes to nil.
-
- It is an error to initialize an instance of AVAssetWriterInputPixelBufferAdaptor with an asset writer input that is already attached to another instance of AVAssetWriterInputPixelBufferAdaptor. It is also an error to initialize an instance of AVAssetWriterInputPixelBufferAdaptor with an asset writer input whose asset writer has progressed beyond AVAssetWriterStatusUnknown.
+
+ This method throws an exception if the input is already attached to another asset writer input pixel buffer adaptor or if the input has already started writing (the asset writer has progressed beyond AVAssetWriterStatusUnknown).
*/
- (instancetype)initWithAssetWriterInput:(AVAssetWriterInput *)input sourcePixelBufferAttributes:(nullable NSDictionary<NSString *, id> *)sourcePixelBufferAttributes NS_DESIGNATED_INITIALIZER;
@@ -750,6 +788,8 @@
The value of this property will be NULL before -[AVAssetWriter startWriting] is called on the associated AVAssetWriter object.
This property is key value observable.
+
+ This property throws an exception if a pixel buffer pool cannot be created with this asset writer input pixel buffer adaptor's source pixel buffer attributes (must specify width, height, and either pixel format or pixel format description).
*/
@property (nonatomic, readonly, nullable) CVPixelBufferPoolRef pixelBufferPool;
@@ -779,6 +819,8 @@
If you are working with high bit depth sources the following yuv pixel formats are recommended when encoding to ProRes: kCVPixelFormatType_4444AYpCbCr16, kCVPixelFormatType_422YpCbCr16, and kCVPixelFormatType_422YpCbCr10. When working in the RGB domain kCVPixelFormatType_64ARGB is recommended. Scaling and color matching are not currently supported when using AVAssetWriter with any of these high bit depth pixel formats. Please make sure that your track's output settings dictionary specifies the same width and height as the buffers you will be appending. Do not include AVVideoScalingModeKey or AVVideoColorPropertiesKey.
Before calling this method, you must ensure that the input that underlies the receiver is attached to an AVAssetWriter via a prior call to -addInput: and that -startWriting has been called on the asset writer. It is an error to invoke this method before starting a session (via -[AVAssetWriter startSessionAtSourceTime:]) or after ending a session (via -[AVAssetWriter endSessionAtSourceTime:]).
+
+ This method throws an exception if the presentation time is is non-numeric (see CMTIME_IS_NUMERIC) or if "readyForMoreMediaData" is NO.
*/
- (BOOL)appendPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime;
@@ -813,7 +855,11 @@
@discussion
The instance of AVAssetWriterInput passed in to this method must have been created with a format hint indicating all possible combinations of identifier (or, alternatively, key and keySpace), dataType, and extendedLanguageTag that will be appended to the metadata adaptor. It is an error to append metadata items not represented in the input's format hint.
- It is an error to initialize an instance of AVAssetWriterInputMetadataAdaptor with an asset writer input that is already attached to another instance of AVAssetWriterInputMetadataAdaptor. It is also an error to initialize an instance of AVAssetWriterInputMetadataAdaptor with an asset writer input whose asset writer has progressed beyond AVAssetWriterStatusUnknown.
+ This method throws an exception for any of the following reasons:
+ - input is already attached to another instance of AVAssetWriterInputMetadataAdaptor
+ - input's asset writer has already started writing (progressed beyond AVAssetWriterStatusUnknown)
+ - input's asset writer does not carry a source format hint
+ - input's source format hint media subtype is not kCMMetadataFormatType_Boxed
*/
+ (instancetype)assetWriterInputMetadataAdaptorWithAssetWriterInput:(AVAssetWriterInput *)input;
@@ -830,7 +876,11 @@
@discussion
The instance of AVAssetWriterInput passed in to this method must have been created with a format hint indicating all possible combinations of identifier (or, alternatively, key and keySpace), dataType, and extendedLanguageTag that will be appended to the metadata adaptor. It is an error to append metadata items not represented in the input's format hint. For help creating a suitable format hint, see -[AVTimedMetadataGroup copyFormatDescription].
- It is an error to initialize an instance of AVAssetWriterInputMetadataAdaptor with an asset writer input that is already attached to another instance of AVAssetWriterInputMetadataAdaptor. It is also an error to initialize an instance of AVAssetWriterInputMetadataAdaptor with an asset writer input whose asset writer has progressed beyond AVAssetWriterStatusUnknown.
+ This method throws an exception for any of the following reasons:
+ - input is already attached to another instance of AVAssetWriterInputMetadataAdaptor
+ - input's asset writer has already started writing (progressed beyond AVAssetWriterStatusUnknown)
+ - input's asset writer does not carry a source format hint
+ - input's source format hint media subtype is not kCMMetadataFormatType_Boxed
*/
- (instancetype)initWithAssetWriterInput:(AVAssetWriterInput *)input NS_DESIGNATED_INITIALIZER;
@@ -857,6 +907,8 @@
The timing of the metadata items in the output asset will correspond to the timeRange of the AVTimedMetadataGroup, regardless of the values of the time and duration properties of the individual items.
Before calling this method, you must ensure that the input that underlies the receiver is attached to an AVAssetWriter via a prior call to -addInput: and that -startWriting has been called on the asset writer. It is an error to invoke this method before starting a session (via -[AVAssetWriter startSessionAtSourceTime:]) or after ending a session (via -[AVAssetWriter endSessionAtSourceTime:]).
+
+ This method throws an exception if the attached asset writer input has not been added to an asset writer or -startWriting has not been called on that asset writer.
*/
- (BOOL)appendTimedMetadataGroup:(AVTimedMetadataGroup *)timedMetadataGroup;
@@ -886,6 +938,13 @@
@method initWithAssetWriterInput:
@abstract
Creates a new caption adaptor for writing to the specified asset writer input.
+
+ @discussion
+ This method thows an exception for any of the following reasons:
+ - input is nil
+ - the input's media type is not supported (should use text or closed caption)
+ - the input is already attached to an asset writer caption adaptor
+ - the input has already started writing
*/
- (instancetype)initWithAssetWriterInput:(AVAssetWriterInput *)input;
@@ -909,7 +968,7 @@
The start time of each caption's timeRange property must be numeric (see CMTIME_IS_NUMERIC) and must be at least as large as the start time of any previous caption (including any captions present in a group appended via -appendCaptionGroup:). In other words, the sequence of captions appended using this method must have monotonically increasing start times.
- The duration of each caption's timeRange property must either be numeric.
+ The duration of each caption's timeRange property must be numeric.
*/
- (BOOL)appendCaption:(AVCaption *)caption;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsynchronousKeyValueLoading.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsynchronousKeyValueLoading.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsynchronousKeyValueLoading.h 2022-02-15 02:25:30.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsynchronousKeyValueLoading.h 2022-06-03 18:09:30.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2018 Apple Inc. All rights reserved.
+ Copyright 2010-2021 Apple Inc. All rights reserved.
*/
@@ -19,7 +19,7 @@
AVKeyValueStatusLoaded = 2,
AVKeyValueStatusFailed = 3,
AVKeyValueStatusCancelled = 4
-};
+} AVF_DEPRECATED_FOR_SWIFT_ONLY("Use AVAsyncProperty.Status instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@protocol AVAsynchronousKeyValueLoading
@@ -54,7 +54,7 @@
The sole exception to this general rule is in usage on Mac OS X on the desktop, where it may be acceptable to block in cases in which the client is preparing objects for use on background threads or in operation queues. On iOS, values should always be loaded asynchronously prior to calling getters for the values, in any usage scenario.
*/
-- (AVKeyValueStatus)statusOfValueForKey:(NSString *)key error:(NSError * _Nullable * _Nullable)outError;
+- (AVKeyValueStatus)statusOfValueForKey:(NSString *)key error:(NSError * _Nullable * _Nullable)outError AVF_DEPRECATED_FOR_SWIFT_ONLY("Use status(of:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/*!
@method loadValuesAsynchronouslyForKeys:completionHandler:
@@ -64,7 +64,7 @@
@param handler
The block to be invoked when loading succeeds, fails, or is cancelled.
*/
-- (void)loadValuesAsynchronouslyForKeys:(NSArray<NSString *> *)keys completionHandler:(nullable void (^)(void))handler;
+- (void)loadValuesAsynchronouslyForKeys:(NSArray<NSString *> *)keys completionHandler:(nullable void (^)(void))handler AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(_:) instead. For non-deprecated properties that do not have an AVAsyncProperty equivalent, continue to query these properties synchronously", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
@end
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h 2022-02-23 07:16:16.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h 2022-06-03 18:07:15.000000000 -0400
@@ -196,12 +196,14 @@
/*
@method setVolumeRampFromStartVolume:toEndVolume:timeRange:
@abstract Sets a volume ramp to apply during the specified timeRange.
+ @discussion This method throws an exception if the time range's start or duration is not numeric.
*/
- (void)setVolumeRampFromStartVolume:(float)startVolume toEndVolume:(float)endVolume timeRange:(CMTimeRange)timeRange;
/*
@method setVolume:atTime:
@abstract Sets the value of the audio volume at a specific time.
+ @discussion This method throws an exception if the time is not numeric.
*/
- (void)setVolume:(float)volume atTime:(CMTime)time;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h 2022-02-23 07:14:12.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h 2022-05-31 15:04:24.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2017 Apple Inc. All rights reserved.
+ Copyright 2010-2021 Apple Inc. All rights reserved.
*/
@@ -22,6 +22,13 @@
#define AV_INIT_UNAVAILABLE - (instancetype)init NS_UNAVAILABLE; \
+ (instancetype)new NS_UNAVAILABLE;
+// Some API is deprecated in Swift only. They remain available in Objective-C.
+#if defined(__swift__)
+#define AVF_DEPRECATED_FOR_SWIFT_ONLY(...) API_DEPRECATED(__VA_ARGS__)
+#else
+#define AVF_DEPRECATED_FOR_SWIFT_ONLY(...)
+#endif
+
#ifndef __has_feature
#define __has_feature(FEATURE) 0
#endif
@@ -201,6 +208,20 @@
#endif
+// Removing Main Actor annotations
+#ifndef AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+ #if TARGET_OS_TV
+ #define AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER (__TV_OS_VERSION_MIN_REQUIRED >= 160000)
+ #elif TARGET_OS_WATCH
+ #define AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER (__WATCH_OS_VERSION_MIN_REQUIRED >= 90000)
+ #elif TARGET_OS_IPHONE
+ #define AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER (__IPHONE_OS_VERSION_MIN_REQUIRED >= 160000)
+ #elif TARGET_OS_MAC
+ #define AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER (__MAC_OS_X_VERSION_MIN_REQUIRED >= 130000)
+ #else
+ #define AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER 0
+ #endif
+#endif // AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
#else
#import <AVFCore/AVBase.h>
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h 2022-02-23 07:59:47.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h 2022-05-31 15:03:43.000000000 -0400
@@ -26,7 +26,7 @@
@discussion
Instances of AVCaptureAudioDataOutput produce audio sample buffers suitable for processing using other media APIs. Applications can access the sample buffers with the captureOutput:didOutputSampleBuffer:fromConnection: delegate method.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureAudioDataOutput : AVCaptureOutput
{
@private
@@ -115,7 +115,7 @@
@abstract
Defines an interface for delegates of AVCaptureAudioDataOutput to receive captured audio sample buffers.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@protocol AVCaptureAudioDataOutputSampleBufferDelegate <NSObject>
@optional
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h 2022-02-23 10:57:37.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h 2022-06-03 18:09:33.000000000 -0400
@@ -24,13 +24,13 @@
AVCaptureDataOutputSynchronizer synchronizes the delivery of data from multiple capture data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, AVCaptureAudioDataOutput) to a single delegate callback.
@discussion
- AVCaptureDataOutputSynchronizer is initialized with an array of data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, or AVCaptureAudioDataOutput) from which you'd like to receive a single, synchronized delegate callback. The first output in the array acts as the master data output and determines when the synchronized callback is delivered. When data is received for the master data output, it is held until all other data outputs have received data with an equal or later presentation time stamp, or it has been determined that there is no data for a particular output at the master data output's pts. Once all other outputs are ready, a single delegate callback is sent with all the data aligned with the master data output's data. Separate delegate callbacks are sent for any other data received with presentation time stamps earlier than the next master data output time.
+ AVCaptureDataOutputSynchronizer is initialized with an array of data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, or AVCaptureAudioDataOutput) from which you'd like to receive a single, synchronized delegate callback. The first output in the array acts as the primary data output and determines when the synchronized callback is delivered. When data is received for the primary data output, it is held until all other data outputs have received data with an equal or later presentation time stamp, or it has been determined that there is no data for a particular output at the primary data output's pts. Once all other outputs are ready, a single delegate callback is sent with all the data aligned with the primary data output's data. Separate delegate callbacks are sent for any other data received with presentation time stamps earlier than the next primary data output time.
- For instance, if you specify a video data output as your first (master) output and a metadata output for detected faces as your second output, your data callback will not be called until there is face data ready for a video frame, or it is assured that there is no face metadata for that particular video frame.
+ For instance, if you specify a video data output as your first (primary) output and a metadata output for detected faces as your second output, your data callback will not be called until there is face data ready for a video frame, or it is assured that there is no face metadata for that particular video frame.
Note that the AVCaptureDataOutputSynchronizer overrides each data output's -setSampleBufferDelegate:queue:, -setDepthDataDelegate:queue:, or -setMetadataObjectsDelegate:queue: method call. -[AVCaptureVideoDataOutput alwaysDiscardsLateVideoFrames] and -[AVCaptureDepthDataOutput alwaysDiscardsLateDepthData] properties are honored.
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDataOutputSynchronizer : NSObject
{
@private
@@ -45,7 +45,7 @@
Instantiates an AVCaptureDataOutputSynchronizer from one or more capture data outputs.
@param dataOutputs
- An array of capture data outputs where the first is the master.
+ An array of capture data outputs where the first is the primary output.
@result
A newly initialized AVCaptureDataOutputSynchronizer instance.
*/
@@ -98,7 +98,7 @@
@end
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@protocol AVCaptureDataOutputSynchronizerDelegate <NSObject>
@required
@@ -133,7 +133,7 @@
@discussion
AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedDataCollection: delegate method delivers a collection of AVCaptureSynchronizedData objects which can be iterated by use AVCaptureOutput. AVCaptureSynchronizedDataCollection supports object subscripting and fast enumeration of the data outputs as keys.
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureSynchronizedDataCollection : NSObject <NSFastEnumeration>
{
@private
@@ -195,7 +195,7 @@
@discussion
AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback delivers a dictionary of key/value pairs, with the keys being the AVCaptureOutput instances returning data, and the values being concrete subclasses of AVCaptureSynchronizedData.
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureSynchronizedData : NSObject
{
@private
@@ -229,7 +229,7 @@
@discussion
Synchronized sample buffer data is valid for the duration of AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback. To extend the sample buffer data beyond the callback, you must CFRetain it, and later call CFRelease when you're done with it.
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureSynchronizedSampleBufferData : AVCaptureSynchronizedData
{
@private
@@ -281,7 +281,7 @@
@discussion
A single AVCaptureMetadataOutput may be configured to deliver multiple kinds of metadata objects (such as QRCodes and detected faces). AVCaptureSynchronizedMetadataObjectData's -metadataObjects array may contain multiple AVMetadataObject subclasses, depending on how the AVCaptureMetadataOutput was configured. All synchronized metadata objects share a common timestamp.
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureSynchronizedMetadataObjectData : AVCaptureSynchronizedData
{
@private
@@ -313,7 +313,7 @@
@discussion
Depth data, like video, may be dropped if not serviced in a timely fashion.
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureSynchronizedDepthData : AVCaptureSynchronizedData
{
@private
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h 2022-02-23 07:56:28.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h 2022-05-31 15:04:21.000000000 -0400
@@ -28,7 +28,7 @@
AVCaptureDepthDataOutput always provides depth data in the format expressed by its source's -[AVCaptureDevice activeDepthDataFormat] property. If you wish to receive depth data in another format, you may choose from the -[AVCaptureDevice activeFormat]'s -[AVCaptureDeviceFormat supportedDepthDataFormats], and set it using -[AVCaptureDevice setActiveDepthDataFormat:].
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDepthDataOutput : AVCaptureOutput
{
@private
@@ -106,7 +106,7 @@
@abstract
Defines an interface for delegates of AVCaptureDepthDataOutput to receive captured depth data and be notified of late depth data that were dropped.
*/
-API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@protocol AVCaptureDepthDataOutputDelegate <NSObject>
@optional
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h 2022-02-23 10:57:35.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h 2022-05-31 15:02:34.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
#import <AVFoundation/AVBase.h>
@@ -25,7 +25,7 @@
@discussion
The notification object is an AVCaptureDevice instance representing the device that became available.
*/
-AVF_EXPORT NSString *const AVCaptureDeviceWasConnectedNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureDeviceWasConnectedNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceWasDisconnectedNotification
@@ -35,7 +35,7 @@
@discussion
The notification object is an AVCaptureDevice instance representing the device that became unavailable.
*/
-AVF_EXPORT NSString *const AVCaptureDeviceWasDisconnectedNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureDeviceWasDisconnectedNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceSubjectAreaDidChangeNotification
@@ -45,7 +45,7 @@
@discussion
Clients may observe the AVCaptureDeviceSubjectAreaDidChangeNotification to know when an instance of AVCaptureDevice has detected a substantial change to the video subject area. This notification is only sent if you first set subjectAreaChangeMonitoringEnabled to YES.
*/
-AVF_EXPORT NSString *const AVCaptureDeviceSubjectAreaDidChangeNotification API_AVAILABLE(ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureDeviceSubjectAreaDidChangeNotification API_AVAILABLE(ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
#pragma mark - AVCaptureDevice
@@ -65,7 +65,7 @@
Instances of AVCaptureDevice can be used to provide media data to an AVCaptureSession by creating an AVCaptureDeviceInput with the device and adding that to the capture session.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice : NSObject
{
@private
@@ -403,10 +403,10 @@
AVCaptureDevicePositionUnspecified = 0,
AVCaptureDevicePositionBack = 1,
AVCaptureDevicePositionFront = 2,
-} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDevicePosition)
/*!
@@ -428,7 +428,7 @@
@discussion
The AVCaptureDeviceType string constants are intended to be used in combination with the AVCaptureDeviceDiscoverySession class to obtain a list of devices matching certain search criteria.
*/
-typedef NSString *AVCaptureDeviceType NS_TYPED_ENUM API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+typedef NSString *AVCaptureDeviceType NS_TYPED_ENUM API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceTypeExternalUnknown
@@ -440,19 +440,19 @@
@constant AVCaptureDeviceTypeBuiltInMicrophone
A built-in microphone.
*/
-AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInMicrophone API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInMicrophone API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceTypeBuiltInWideAngleCamera
A built-in wide angle camera device. These devices are suitable for general purpose use.
*/
-AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInWideAngleCamera API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInWideAngleCamera API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceTypeBuiltInTelephotoCamera
A built-in camera device with a longer focal length than a wide angle camera. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession.
*/
-AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInTelephotoCamera API_AVAILABLE(ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInTelephotoCamera API_AVAILABLE(ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceTypeBuiltInUltraWideCamera
@@ -477,7 +477,7 @@
Even when locked, exposure duration, ISO, aperture, white balance gains, or lens position may change when the device switches from one camera to the other. The overall exposure, white balance, and focus position however should be consistent.
*/
-AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDualCamera API_AVAILABLE(ios(10.2), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDualCamera API_AVAILABLE(ios(10.2), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceTypeBuiltInDualWideCamera
@@ -518,7 +518,7 @@
@constant AVCaptureDeviceTypeBuiltInTrueDepthCamera
A device that consists of two cameras, one YUV and one Infrared. The infrared camera provides high quality depth information that is synchronized and perspective corrected to frames produced by the YUV camera. While the resolution of the depth data and YUV frames may differ, their field of view and aspect ratio always match. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
*/
-AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInTrueDepthCamera API_AVAILABLE(ios(11.1), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInTrueDepthCamera API_AVAILABLE(ios(11.1), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureDeviceTypeBuiltInLiDARDepthCamera
@@ -527,13 +527,19 @@
AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInLiDARDepthCamera API_AVAILABLE(ios(15.4), macCatalyst(15.4)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
+ @constant AVCaptureDeviceTypeDeskViewCamera
+ A distortion corrected cut out from an ultra wide camera, made to approximate an overhead camera pointing at a desk. Supports multicam operation.
+ */
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeDeskViewCamera API_AVAILABLE(macos(13.0)) API_UNAVAILABLE(ios, macCatalyst, tvos) API_UNAVAILABLE(watchos);
+
+/*!
@constant AVCaptureDeviceTypeBuiltInDuoCamera
A deprecated synonym for AVCaptureDeviceTypeBuiltInDualCamera. Please use AVCaptureDeviceTypeBuiltInDualCamera instead.
*/
-AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDuoCamera API_DEPRECATED("Use AVCaptureDeviceTypeBuiltInDualCamera instead.", ios(10.0, 10.2)) API_UNAVAILABLE(macos) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDuoCamera API_DEPRECATED("Use AVCaptureDeviceTypeBuiltInDualCamera instead.", ios(10.0, 10.2)) API_UNAVAILABLE(macos) API_UNAVAILABLE(tvos, watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceType)
/*!
@@ -546,6 +552,12 @@
*/
@property(nonatomic, readonly) AVCaptureDeviceType deviceType API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos);
+@end
+
+
+API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
+@interface AVCaptureDevice (AVCaptureDefaultDevice)
+
/*!
@method defaultDeviceWithDeviceType:mediaType:position:
@abstract
@@ -568,7 +580,37 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(13.0), macCatalyst(16.0)) API_UNAVAILABLE(ios, tvos, watchos)
+@interface AVCaptureDevice (PreferredCamera)
+
+/*!
+ @property userPreferredCamera
+ @abstract
+ Settable property that specifies a user preferred camera.
+
+ @discussion
+ Setting this property allows an application to persist its user’s preferred camera across app launches and reboots. The property internally maintains a short history, so if your user’s most recent preferred camera is not currently connected, it still reports the next best choice. This property always returns a device that is present. If no camera is available nil is returned. Setting the property to nil has no effect.
+*/
+@property(class, readwrite, nullable) AVCaptureDevice *userPreferredCamera API_AVAILABLE(macos(13.0), macCatalyst(16.0)) API_UNAVAILABLE(ios, tvos, watchos);
+
+/*!
+ @property systemPreferredCamera
+ @abstract
+ Specifies the best camera to use as determined by the system.
+
+ @discussion
+ Apple chooses the default value. This property incorporates userPreferredCamera as well as other factors, such as camera suspension and Apple cameras appearing that should be automatically chosen. The property may change spontaneously, such as when the preferred camera goes away. This property always returns a device that is present. If no camera is available nil is returned.
+
+ Applications that adopt this API should always key-value observe this property and update their AVCaptureSession’s input device to reflect changes to the systemPreferredCamera. The application can still offer users the ability to pick a camera by calling -setUserPreferredCamera:, which will cause the systemPreferredCamera API to put the user’s choice first until either another Apple-preferred device becomes available or the machine is rebooted (after which it reverts to its original behavior of returning the internally determined best camera to use).
+
+ If the application wishes to offer users a fully manual camera selection mode in addition to automatic camera selection, it is recommended to call setUserPeferredCamera: each time the user makes a camera selection, but ignore key-value observer updates to systemPreferredCamera while in manual selection mode.
+*/
+@property(class, readonly, nullable) AVCaptureDevice *systemPreferredCamera API_AVAILABLE(macos(13.0), macCatalyst(16.0)) API_UNAVAILABLE(ios, tvos, watchos);
+
+@end
+
+
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceSystemPressure)
/*!
@@ -635,7 +677,7 @@
AVCapturePrimaryConstituentDeviceRestrictedSwitchingBehaviorConditionExposureModeChanged = 1 << 2,
} NS_SWIFT_NAME(AVCaptureDevice.PrimaryConstituentDeviceRestrictedSwitchingBehaviorConditions) API_AVAILABLE(macos(12.0), ios(15.0), macCatalyst(15.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceVirtual)
/*!
@@ -773,10 +815,10 @@
AVCaptureFlashModeOff = 0,
AVCaptureFlashModeOn = 1,
AVCaptureFlashModeAuto = 2,
-} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceFlash)
/*!
@@ -855,17 +897,17 @@
AVCaptureTorchModeOff = 0,
AVCaptureTorchModeOn = 1,
AVCaptureTorchModeAuto = 2,
-} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureMaxAvailableTorchLevel
A special value that may be passed to -setTorchModeWithLevel:error: to set the torch to the maximum level currently available. Under thermal duress, the maximum available torch level may be less than 1.0.
*/
-AVF_EXPORT const float AVCaptureMaxAvailableTorchLevel API_AVAILABLE(macos(10.15), ios(6.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT const float AVCaptureMaxAvailableTorchLevel API_AVAILABLE(macos(10.15), ios(6.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceTorch)
/*!
@@ -962,7 +1004,7 @@
AVCaptureFocusModeLocked = 0,
AVCaptureFocusModeAutoFocus = 1,
AVCaptureFocusModeContinuousAutoFocus = 2,
-} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@@ -981,10 +1023,10 @@
AVCaptureAutoFocusRangeRestrictionNone = 0,
AVCaptureAutoFocusRangeRestrictionNear = 1,
AVCaptureAutoFocusRangeRestrictionFar = 2,
-} API_AVAILABLE(ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceFocus)
/*!
@@ -1128,7 +1170,7 @@
@constant AVCaptureLensPositionCurrent
A special value that may be passed as the lensPosition parameter of setFocusModeLockedWithLensPosition:completionHandler: to indicate that the caller does not wish to specify a value for the lensPosition property, and that it should instead be set to its current value. Note that the device may be adjusting lensPosition at the time of the call, in which case the value at which lensPosition is locked may differ from the value obtained by querying the lensPosition property.
*/
-AVF_EXPORT const float AVCaptureLensPositionCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos);
+AVF_EXPORT const float AVCaptureLensPositionCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@method setFocusModeLockedWithLensPosition:completionHandler:
@@ -1177,10 +1219,10 @@
AVCaptureExposureModeAutoExpose = 1,
AVCaptureExposureModeContinuousAutoExposure = 2,
AVCaptureExposureModeCustom API_AVAILABLE(macos(10.15), ios(8.0), macCatalyst(14.0)) = 3,
-} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceExposure)
/*!
@@ -1304,13 +1346,13 @@
@constant AVCaptureExposureDurationCurrent
A special value that may be passed as the duration parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate that the caller does not wish to specify a value for the exposureDuration property, and that it should instead be set to its current value. Note that the device may be adjusting exposureDuration at the time of the call, in which case the value to which exposureDuration is set may differ from the value obtained by querying the exposureDuration property.
*/
-AVF_EXPORT const CMTime AVCaptureExposureDurationCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT const CMTime AVCaptureExposureDurationCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureISOCurrent
A special value that may be passed as the ISO parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate that the caller does not wish to specify a value for the ISO property, and that it should instead be set to its current value. Note that the device may be adjusting ISO at the time of the call, in which case the value to which ISO is set may differ from the value obtained by querying the ISO property.
*/
-AVF_EXPORT const float AVCaptureISOCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT const float AVCaptureISOCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@method setExposureModeCustomWithDuration:ISO:completionHandler:
@@ -1373,7 +1415,7 @@
@constant AVCaptureExposureTargetBiasCurrent
A special value that may be passed as the bias parameter of setExposureTargetBias:completionHandler: to indicate that the caller does not wish to specify a value for the exposureTargetBias property, and that it should instead be set to its current value.
*/
-AVF_EXPORT const float AVCaptureExposureTargetBiasCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT const float AVCaptureExposureTargetBiasCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@method setExposureTargetBias:completionHandler:
@@ -1393,7 +1435,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceToneMapping)
/*!
@@ -1438,7 +1480,7 @@
AVCaptureWhiteBalanceModeLocked = 0,
AVCaptureWhiteBalanceModeAutoWhiteBalance = 1,
AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
-} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@@ -1450,7 +1492,7 @@
float redGain;
float greenGain;
float blueGain;
-} AVCaptureWhiteBalanceGains API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+} AVCaptureWhiteBalanceGains API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@@ -1461,7 +1503,7 @@
typedef struct {
float x;
float y;
-} AVCaptureWhiteBalanceChromaticityValues API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+} AVCaptureWhiteBalanceChromaticityValues API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@@ -1472,10 +1514,10 @@
typedef struct {
float temperature;
float tint;
-} AVCaptureWhiteBalanceTemperatureAndTintValues API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+} AVCaptureWhiteBalanceTemperatureAndTintValues API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceWhiteBalance)
/*!
@@ -1557,7 +1599,7 @@
@constant AVCaptureWhiteBalanceGainsCurrent
A special value that may be passed as a parameter of setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains, and that gains should instead be locked at their value at the moment that white balance is locked.
*/
-AVF_EXPORT const AVCaptureWhiteBalanceGains AVCaptureWhiteBalanceGainsCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT const AVCaptureWhiteBalanceGains AVCaptureWhiteBalanceGainsCurrent API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@method setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:
@@ -1637,7 +1679,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceSubjectAreaChangeMonitoring)
/*!
@@ -1653,7 +1695,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceLowLightBoost)
/*!
@@ -1689,19 +1731,19 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceVideoZoom)
/*!
@property videoZoomFactor
@abstract
Controls zoom level of image outputs
-
+
@discussion
Applies a centered crop for all image outputs, scaling as necessary to maintain output dimensions. Minimum value of 1.0 yields full field of view, increasing values will increase magnification, up to a maximum value specified in the activeFormat's videoMaxZoomFactor property. Modifying the zoom factor will cancel any active rampToVideoZoomFactor:withRate:, and snap directly to the assigned value. Assigning values outside the acceptable range will generate an NSRangeException. Clients can key value observe the value of this property.
-
+
-setVideoZoomFactor: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-
+
@seealso -[AVCaptureDeviceFormat videoMaxZoomFactor] and -[AVCaptureDeviceFormat videoZoomFactorUpscaleThreshold]
*/
@property(nonatomic) CGFloat videoZoomFactor API_AVAILABLE(ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos);
@@ -1776,10 +1818,10 @@
AVAuthorizationStatusRestricted = 1,
AVAuthorizationStatusDenied = 2,
AVAuthorizationStatusAuthorized = 3,
-} API_AVAILABLE(macos(10.14), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.14), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceAuthorization)
/*!
@@ -1847,7 +1889,7 @@
} API_AVAILABLE(macos(10.7)) API_UNAVAILABLE(ios, macCatalyst, watchos, tvos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceTransportControls)
/*!
@@ -1903,7 +1945,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceHighDynamicRangeSupport)
/*!
@@ -1945,11 +1987,11 @@
typedef NS_ENUM(NSInteger, AVCaptureColorSpace) {
AVCaptureColorSpace_sRGB = 0,
AVCaptureColorSpace_P3_D65 = 1,
- AVCaptureColorSpace_HLG_BT2020 API_AVAILABLE(ios(14.1), macCatalyst(14.1)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos) = 2,
-} API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+ AVCaptureColorSpace_HLG_BT2020 API_AVAILABLE(ios(14.1), macCatalyst(14.1)) API_UNAVAILABLE(macos, tvos) = 2,
+} API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceColorSpaceSupport)
/*!
@@ -1965,7 +2007,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceDepthSupport)
/*!
@@ -2003,9 +2045,9 @@
@property minAvailableVideoZoomFactor
@abstract
Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
-
+
@discussion
- On non-virtual devices the minAvailableVideoZoomFactor is always 1.0. On a virtual device the minAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat videoMinZoomFactorForDepthDataDelivery]). If the device's videoZoomFactor property is assigned a value smaller than 1.0, an NSRangeException is thrown. Setting the videoZoomFactor to a value greater than or equal to 1.0, but lower than minAvailableVideoZoomFactor results in the value being clamped to the minAvailableVideoZoomFactor. Clients can key value observe the value of this property.
+ On non-virtual devices the minAvailableVideoZoomFactor is always 1.0. On a virtual device the minAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat videoMinZoomFactorForDepthDataDelivery]). If the device's videoZoomFactor property is assigned a value smaller than 1.0, an NSRangeException is thrown. Setting the videoZoomFactor to a value greater than or equal to 1.0, but lower than minAvailableVideoZoomFactor results in the value being clamped to the minAvailableVideoZoomFactor. Clients can key value observe the value of this property.
*/
@property(nonatomic, readonly) CGFloat minAvailableVideoZoomFactor API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos);
@@ -2013,7 +2055,7 @@
@property maxAvailableVideoZoomFactor
@abstract
Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
-
+
@discussion
On non-virtual devices the maxAvailableVideoZoomFactor is always equal to the activeFormat.videoMaxZoomFactor. On a virtual device the maxAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat videoMaxZoomFactorForDepthDataDelivery]). If the device's videoZoomFactor property is assigned a value greater than activeFormat.videoMaxZoomFactor, an NSRangeException is thrown. Setting the videoZoomFactor to a value less than or equal to activeFormat.videoMaxZoomFactor, but greater than maxAvailableVideoZoomFactor results in the value being clamped to the maxAvailableVideoZoomFactor. Clients can key value observe the value of this property.
*/
@@ -2022,7 +2064,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceGeometricDistortionCorrection)
/*!
@@ -2048,7 +2090,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceCalibration)
/*!
@@ -2075,7 +2117,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureDeviceCenterStage)
/*!
@@ -2166,6 +2208,38 @@
@end
+API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
+@interface AVCaptureDevice (AVCaptureDeviceContinuityCamera)
+
+/*!
+ @property continuityCamera
+ @abstract
+ A property that reports YES if the receiver is a Continuity Camera.
+
+ @discussion
+ Access this property to discover if the receiver is a Continuity Camera (external iPhone webcam).
+*/
+@property(nonatomic, readonly, getter=isContinuityCamera) BOOL continuityCamera API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
+
+@end
+
+
+API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
+@interface AVCaptureDevice (AVCaptureDeviceDeskViewCamera)
+
+/*!
+ @property companionDeskViewCamera
+ @abstract
+ A reference to the Desk View Camera that is associated with and derived from this camera.
+
+ @discussion
+ The companionDeskViewCamera property allows you to discover if the receiver has a paired Desk View Camera which derives its desk framing from the receiver's ultra wide frame. In the presence of multiple Continuity Cameras, this property allows you to pair a particular Continuity Camera with its associated Desk View Camera.
+*/
+@property(nonatomic, readonly, nullable) AVCaptureDevice *companionDeskViewCamera API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
+
+@end
+
+
API_AVAILABLE(macos(12.0), ios(15.0), macCatalyst(15.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDevice (AVCaptureMicrophoneMode)
@@ -2254,7 +2328,7 @@
@discussion
This class allows clients to discover devices by providing certain search criteria. The objective of this class is to help find devices by device type and optionally by media type or position and allow you to key-value observe changes to the returned devices list.
*/
-API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDeviceDiscoverySession : NSObject
AV_INIT_UNAVAILABLE
@@ -2313,7 +2387,7 @@
@discussion
An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat wraps a CMFormatDescription and expresses a range of valid video frame rates as an NSArray of AVFrameRateRange objects. AVFrameRateRange expresses min and max frame rate as a rate in frames per second and duration (CMTime). An AVFrameRateRange object is immutable. Its values do not change for the life of the object.
*/
-API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVFrameRateRange : NSObject
{
@private
@@ -2385,9 +2459,9 @@
AVCaptureVideoStabilizationModeOff = 0,
AVCaptureVideoStabilizationModeStandard = 1,
AVCaptureVideoStabilizationModeCinematic = 2,
- AVCaptureVideoStabilizationModeCinematicExtended API_AVAILABLE(ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos) = 3,
+ AVCaptureVideoStabilizationModeCinematicExtended API_AVAILABLE(ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) = 3,
AVCaptureVideoStabilizationModeAuto = -1,
-} API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@@ -2406,7 +2480,7 @@
AVCaptureAutoFocusSystemNone = 0,
AVCaptureAutoFocusSystemContrastDetection = 1,
AVCaptureAutoFocusSystemPhaseDetection = 2,
-} API_AVAILABLE(macos(10.15), ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.15), ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
#pragma mark - AVCaptureDeviceFormat
@@ -2422,7 +2496,7 @@
@discussion
An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat is a thin wrapper around a CMFormatDescription, and can carry associated device format information that doesn't go in a CMFormatDescription, such as min and max frame rate. An AVCaptureDeviceFormat object is immutable. Its values do not change for the life of the object.
*/
-API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDeviceFormat : NSObject
{
@private
@@ -2646,7 +2720,7 @@
@property videoMinZoomFactorForDepthDataDelivery
@abstract
Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property when delivering depth data to one or more outputs.
-
+
@discussion
Virtual devices support a limited zoom range when delivering depth data to any output. If this device format has no -supportedDepthDataFormats, this property returns 1.0.
*/
@@ -2656,7 +2730,7 @@
@property videoMaxZoomFactorForDepthDataDelivery
@abstract
Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property when delivering depth data to one or more outputs.
-
+
@discussion
Virtual devices support a limited zoom range when delivering depth data to any output. If this device format has no -supportedDepthDataFormats, this property returns videoMaxZoomFactor.
*/
@@ -2685,7 +2759,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDeviceFormat (AVCaptureDeviceFormatDepthDataAdditions)
/*
@@ -2701,7 +2775,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDeviceFormat (AVCaptureDeviceFormatMultiCamAdditions)
/*!
@@ -2717,7 +2791,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDeviceFormat (AVCaptureDeviceFormatGeometricDistortionCorrection)
/*!
@@ -2733,7 +2807,7 @@
@end
-API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDeviceFormat (AVCaptureDeviceFormatCenterStage)
/*!
@@ -2805,6 +2879,57 @@
@end
+API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
+@interface AVCaptureDevice (AVCaptureDeviceStudioLight)
+
+/*!
+ @property studioLightEnabled
+ @abstract
+ A class property indicating whether the Studio Light feature is currently enabled in Control Center.
+
+ @discussion
+ This property changes to reflect the Studio Light state in Control Center. It is key-value observable.
+ */
+@property(class, getter=isStudioLightEnabled) BOOL studioLightEnabled API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @property studioLightActive
+ @abstract
+ Indicates whether Studio Light is currently active on a particular AVCaptureDevice.
+
+ @discussion
+ This readonly property returns YES when Studio Light is currently active on the receiver. When active, the subject's face is artificially lit to simulate the presence of a studio light near the camera.
+ */
+@property(nonatomic, readonly, getter=isStudioLightActive) BOOL studioLightActive API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+@end
+
+API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
+@interface AVCaptureDeviceFormat (AVCaptureDeviceFormatStudioLight)
+
+/*!
+ @property studioLightSupported
+ @abstract
+ Indicates whether the format supports the Studio Light feature.
+
+ @discussion
+ This property returns YES if the format supports Studio Light (artificial re-lighting of the subject's face). See +AVCaptureDevice.studioLightEnabled.
+ */
+@property(nonatomic, readonly, getter=isStudioLightSupported) BOOL studioLightSupported API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!
+ @property videoFrameRateRangeForStudioLight
+ @abstract
+ Indicates the minimum / maximum frame rates available when studioLight is YES.
+
+ @discussion
+ Devices may support a limited frame rate range when Studio Light is active. If this device format does not support Studio Light, this property returns nil.
+ */
+@property(nonatomic, readonly, nullable) AVFrameRateRange *videoFrameRateRangeForStudioLight API_AVAILABLE(macos(13.0), ios(16.0), macCatalyst(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+@end
+
+
#pragma mark - AVCaptureDeviceInputSource
@class AVCaptureDeviceInputSourceInternal;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h 2022-02-23 07:56:27.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h 2022-05-31 14:52:19.000000000 -0400
@@ -26,13 +26,13 @@
AVCaptureFileOutput is an abstract subclass of AVCaptureOutput that provides an interface for writing captured media to files.
@discussion
- This abstract superclass defines the interface for outputs that record media samples to files. File outputs can start recording to a new file using the startRecordingToOutputFileURL:recordingDelegate: method. On successive invocations of this method on Mac OS X, the output file can by changed dynamically without losing media samples. A file output can stop recording using the stopRecording method. Because files are recorded in the background, applications will need to specify a delegate for each new file so that they can be notified when recorded files are finished.
+ This abstract superclass defines the interface for outputs that record media samples to files. File outputs can start recording to a new file using the startRecordingToOutputFileURL:recordingDelegate: method. On successive invocations of this method on macOS, the output file can by changed dynamically without losing media samples. A file output can stop recording using the stopRecording method. Because files are recorded in the background, applications will need to specify a delegate for each new file so that they can be notified when recorded files are finished.
- On Mac OS X, clients can also set a delegate on the file output itself that can be used to control recording along exact media sample boundaries using the captureOutput:didOutputSampleBuffer:fromConnection: method.
+ On macOS, clients can also set a delegate on the file output itself that can be used to control recording along exact media sample boundaries using the captureOutput:didOutputSampleBuffer:fromConnection: method.
The concrete subclasses of AVCaptureFileOutput are AVCaptureMovieFileOutput, which records media to a QuickTime movie file, and AVCaptureAudioFileOutput, which writes audio media to a variety of audio file formats.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureFileOutput : AVCaptureOutput
{
@private
@@ -72,11 +72,11 @@
@discussion
The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
- Clients need not call stopRecording before calling this method while another recording is in progress. On Mac OS X, if this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
+ Clients need not call stopRecording before calling this method while another recording is in progress. On macOS, if this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
- On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
+ On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
Note: AVCaptureAudioFileOutput does not support -startRecordingToOutputFileURL:recordingDelegate:. Use -startRecordingToOutputFileURL:outputFileType:recordingDelegate: instead.
*/
@@ -92,7 +92,7 @@
When recording is stopped either by calling this method, by changing files using startRecordingToOutputFileURL:recordingDelegate:, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, before using the file, clients must wait until the delegate that was specified in startRecordingToOutputFileURL:recordingDelegate: is notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method.
- On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
+ On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
*/
- (void)stopRecording;
@@ -114,7 +114,7 @@
@discussion
This property indicates recording to the file returned by outputFileURL has been previously paused using the pauseRecording method. When a recording is paused, captured samples are not written to the output file, but new samples can be written to the same file in the future by calling resumeRecording.
*/
-@property(nonatomic, readonly, getter=isRecordingPaused) BOOL recordingPaused __IOS_PROHIBITED __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+@property(nonatomic, readonly, getter=isRecordingPaused) BOOL recordingPaused API_UNAVAILABLE(ios, tvos);
/*!
@method pauseRecording
@@ -124,9 +124,9 @@
@discussion
This method causes the receiver to stop writing captured samples to the current output file returned by outputFileURL, but leaves the file open so that samples can be written to it in the future, when resumeRecording is called. This allows clients to record multiple media segments that are not contiguous in time to a single file.
- On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
+ On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
*/
-- (void)pauseRecording __IOS_PROHIBITED __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+- (void)pauseRecording API_UNAVAILABLE(ios, tvos);
/*!
@method resumeRecording
@@ -136,9 +136,9 @@
@discussion
This method causes the receiver to resume writing captured samples to the current output file returned by outputFileURL, after recording was previously paused using pauseRecording. This allows clients to record multiple media segments that are not contiguous in time to a single file.
- On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the current file are guaranteed to be those contained in the sample buffer passed to that method.
+ On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the current file are guaranteed to be those contained in the sample buffer passed to that method.
*/
-- (void)resumeRecording __IOS_PROHIBITED __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+- (void)resumeRecording API_UNAVAILABLE(ios, tvos);
/*!
@property recordedDuration
@@ -198,7 +198,7 @@
@abstract
Defines an interface for delegates of AVCaptureFileOutput to respond to events that occur in the process of recording a single file.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@protocol AVCaptureFileOutputRecordingDelegate <NSObject>
@optional
@@ -328,7 +328,7 @@
The AVCaptureFileOutput instance with which the delegate is associated.
@discussion
- In apps linked before Mac OS X 10.8, delegates that implement the captureOutput:didOutputSampleBuffer:fromConnection: method can ensure frame accurate start / stop of a recording by calling startRecordingToOutputFileURL:recordingDelegate: from within the callback. Frame accurate start requires the capture output to apply outputSettings when the session starts running, so it is ready to record on any given frame boundary. Compressing all the time while the session is running has power, thermal, and CPU implications. In apps linked on or after Mac OS X 10.8, delegates must implement captureOutputShouldProvideSampleAccurateRecordingStart: to indicate whether frame accurate start/stop recording is required (returning YES) or not (returning NO). The output calls this method as soon as the delegate is added, and never again. If your delegate returns NO, the capture output applies compression settings when startRecordingToOutputFileURL:recordingDelegate: is called, and disables compression settings after the recording is stopped.
+ In apps linked before macOS 10.8, delegates that implement the captureOutput:didOutputSampleBuffer:fromConnection: method can ensure frame accurate start / stop of a recording by calling startRecordingToOutputFileURL:recordingDelegate: from within the callback. Frame accurate start requires the capture output to apply outputSettings when the session starts running, so it is ready to record on any given frame boundary. Compressing all the time while the session is running has power, thermal, and CPU implications. In apps linked on or after macOS 10.8, delegates must implement captureOutputShouldProvideSampleAccurateRecordingStart: to indicate whether frame accurate start/stop recording is required (returning YES) or not (returning NO). The output calls this method as soon as the delegate is added, and never again. If your delegate returns NO, the capture output applies compression settings when startRecordingToOutputFileURL:recordingDelegate: is called, and disables compression settings after the recording is stopped.
*/
- (BOOL)captureOutputShouldProvideSampleAccurateRecordingStart:(AVCaptureOutput *)output API_AVAILABLE(macos(10.8)) API_UNAVAILABLE(ios, macCatalyst, watchos, tvos);
@@ -372,9 +372,9 @@
AVCaptureMovieFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured media to QuickTime movie files.
@discussion
- AVCaptureMovieFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to QuickTime movie files. In addition, instances of AVCaptureMovieFileOutput allow clients to configure options specific to the QuickTime file format, including allowing them to write metadata collections to each file, specify media encoding options for each track (Mac OS X), and specify an interval at which movie fragments should be written.
+ AVCaptureMovieFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to QuickTime movie files. In addition, instances of AVCaptureMovieFileOutput allow clients to configure options specific to the QuickTime file format, including allowing them to write metadata collections to each file, specify media encoding options for each track (macOS), and specify an interval at which movie fragments should be written.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureMovieFileOutput : AVCaptureFileOutput
{
@private
@@ -456,7 +456,7 @@
The connection delivering the media to be encoded.
@discussion
- See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on how to construct an output settings dictionary. A value of an empty dictionary (i.e. [NSDictionary dictionary], means that the format of the media from the connection should not be changed before being written to the file. A value of nil means that the output format will be determined by the session preset. In this case, -outputSettingsForConnection: will return a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
+ See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on how to construct an output settings dictionary. A value of an empty dictionary (i.e. +[NSDictionary dictionary]), means that the format of the media from the connection should not be changed before being written to the file. A value of nil means that the output format will be determined by the session preset. In this case, -outputSettingsForConnection: will return a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
On iOS, your outputSettings dictionary may only contain keys listed in - supportedOutputSettingsKeysForConnection:. If you specify any other key, an NSInvalidArgumentException will be thrown. Further restrictions may be imposed on the AVVideoCodecTypeKey. Its value should be present in the -availableVideoCodecTypes array. If AVVideoCompressionPropertiesKey is specified, you must also specify a valid value for AVVideoCodecKey. On iOS versions prior to 12.0, the only settable key for video connections is AVVideoCodecTypeKey. On iOS 12.0 and later, video connections gain support for AVVideoCompressionPropertiesKey.
@@ -597,7 +597,7 @@
When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
- On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
+ On macOS, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
*/
- (void)startRecordingToOutputFileURL:(NSURL *)outputFileURL outputFileType:(AVFileType)fileType recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h 2022-02-23 07:16:13.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h 2022-05-31 15:02:32.000000000 -0400
@@ -33,7 +33,7 @@
@discussion
Concrete instances of AVCaptureInput representing input sources such as cameras can be added to instances of AVCaptureSession using the -[AVCaptureSession addInput:] method. An AVCaptureInput vends one or more streams of media data. For example, input devices can provide both audio and video data. Each media stream provided by an input is represented by an AVCaptureInputPort object. Within a capture session, connections are made between AVCaptureInput instances and AVCaptureOutput instances via AVCaptureConnection objects that define the mapping between a set of AVCaptureInputPort objects and a single AVCaptureOutput.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureInput : NSObject
{
@private
@@ -63,7 +63,7 @@
@discussion
The notification object is the AVCaptureInputPort instance whose format description changed.
*/
-AVF_EXPORT NSString *const AVCaptureInputPortFormatDescriptionDidChangeNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureInputPortFormatDescriptionDidChangeNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
#pragma mark - AVCaptureInputPort
@@ -78,7 +78,7 @@
@discussion
Instances of AVCaptureInputPort cannot be created directly. An AVCaptureInput exposes its input ports via its ports property. Input ports provide information about the format of their media data via the mediaType and formatDescription properties, and allow clients to control the flow of data via the enabled property. Input ports are used by an AVCaptureConnection to define the mapping between inputs and outputs in an AVCaptureSession.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureInputPort : NSObject
{
@private
@@ -177,7 +177,7 @@
@discussion
Instances of AVCaptureDeviceInput are input sources for AVCaptureSession that provide media data from devices connected to the system, represented by instances of AVCaptureDevice.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureDeviceInput : AVCaptureInput
{
@private
@@ -411,7 +411,7 @@
@discussion
Instances of AVCaptureMetadataInput are input sources for AVCaptureSession that provide AVMetadataItems to an AVCaptureSession. AVCaptureMetadataInputs present one and only one AVCaptureInputPort, which currently may only be connected to an AVCaptureMovieFileOutput. The metadata supplied over the input port is provided by the client, and must conform to a client-supplied CMFormatDescription. The AVMetadataItems are supplied in an AVTimedMetadataGroup.
*/
-API_AVAILABLE(ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureMetadataInput : AVCaptureInput
{
@private
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h 2022-02-23 07:59:47.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h 2022-05-31 14:52:22.000000000 -0400
@@ -25,7 +25,7 @@
@discussion
Instances of AVCaptureMetadataOutput emit arrays of AVMetadataObject instances (see AVMetadataObject.h), such as detected faces. Applications can access the metadata objects with the captureOutput:didOutputMetadataObjects:fromConnection: delegate method.
*/
-API_AVAILABLE(ios(6.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(13.0), ios(6.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureMetadataOutput : AVCaptureOutput
{
@private
@@ -115,7 +115,7 @@
@abstract
Defines an interface for delegates of AVCaptureMetadataOutput to receive emitted objects.
*/
-API_AVAILABLE(ios(6.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(13.0), ios(6.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@protocol AVCaptureMetadataOutputObjectsDelegate <NSObject>
@optional
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h 2022-02-23 07:16:18.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h 2022-05-31 15:03:43.000000000 -0400
@@ -31,7 +31,7 @@
Concrete AVCaptureOutput instances can be added to an AVCaptureSession using the -[AVCaptureSession addOutput:] and -[AVCaptureSession addOutputWithNoConnections:] methods.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureOutput : NSObject
{
@private
@@ -136,7 +136,7 @@
AVCaptureOutputDataDroppedReasonLateData = 1,
AVCaptureOutputDataDroppedReasonOutOfBuffers = 2,
AVCaptureOutputDataDroppedReasonDiscontinuity = 3,
-} API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h 2022-02-23 07:16:17.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h 2022-05-31 14:49:51.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2016-2021 Apple Inc. All rights reserved.
+ Copyright 2016-2022 Apple Inc. All rights reserved.
*/
#import <AVFoundation/AVCaptureOutputBase.h>
@@ -46,7 +46,7 @@
AVCapturePhotoOutput implicitly supports wide color photo capture, following the activeColorSpace of the source AVCaptureDevice. If the source device's activeColorSpace is AVCaptureColorSpace_P3_D65, photos are encoded with wide color information, unless you've specified an output format of '420v', which does not support wide color.
*/
-API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCapturePhotoOutput : AVCaptureOutput
{
@private
@@ -298,7 +298,7 @@
AVCapturePhotoQualityPrioritizationSpeed = 1,
AVCapturePhotoQualityPrioritizationBalanced = 2,
AVCapturePhotoQualityPrioritizationQuality = 3,
-} API_AVAILABLE(ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
+} API_AVAILABLE(macos(13.0), ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@property maxPhotoQualityPrioritization
@@ -312,7 +312,7 @@
Setting the maxPhotoQualityPrioritization to .quality will turn on optical image stabilization if the -isHighPhotoQualitySupported of the source device's -activeFormat is true.
*/
-@property(nonatomic) AVCapturePhotoQualityPrioritization maxPhotoQualityPrioritization API_AVAILABLE(ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
+@property(nonatomic) AVCapturePhotoQualityPrioritization maxPhotoQualityPrioritization API_AVAILABLE(macos(13.0), ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@property stillImageStabilizationSupported
@@ -416,7 +416,7 @@
@discussion
This property supersedes AVCaptureDevice's isFlashModeSupported: It returns an array of AVCaptureFlashMode constants. To test whether a particular flash mode is supported, use NSArray's containsObject API: [photoOutput.supportedFlashModes containsObject:@(AVCaptureFlashModeAuto)]. This property is key-value observable.
*/
-@property(nonatomic, readonly) NSArray<NSNumber *> *supportedFlashModes API_UNAVAILABLE(macos);
+@property(nonatomic, readonly) NSArray<NSNumber *> *supportedFlashModes API_AVAILABLE(macos(13.0));
/*!
@property autoRedEyeReductionSupported
@@ -586,7 +586,7 @@
@class AVCapturePhoto;
-API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCapturePhotoOutput (AVCapturePhotoOutputDepthDataDeliverySupport)
/*!
@@ -666,7 +666,7 @@
In the event of an error, all expected callbacks are fired with an appropriate error.
*/
-API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@protocol AVCapturePhotoCaptureDelegate <NSObject>
@optional
@@ -850,7 +850,7 @@
@discussion
To take a picture, a client instantiates and configures an AVCapturePhotoSettings object, then calls AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:, passing the settings and a delegate to be informed when events relating to the photo capture occur. Since AVCapturePhotoSettings has no reference to the AVCapturePhotoOutput instance with which it will be used, minimal validation occurs while you configure an AVCapturePhotoSettings instance. The bulk of the validation is executed when you call AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:.
*/
-API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCapturePhotoSettings : NSObject <NSCopying>
{
@private
@@ -1011,7 +1011,7 @@
@discussion
flashMode takes the place of the deprecated AVCaptureDevice -flashMode API. Setting AVCaptureDevice.flashMode has no effect on AVCapturePhotoOutput, which only pays attention to the flashMode specified in your AVCapturePhotoSettings. The default value is AVCaptureFlashModeOff. Flash modes are defined in AVCaptureDevice.h. If you specify a flashMode of AVCaptureFlashModeOn, it wins over autoStillImageStabilizationEnabled=YES. When the device becomes very hot, the flash becomes temporarily unavailable until the device cools down (see AVCaptureDevice's -flashAvailable). While the flash is unavailable, AVCapturePhotoOutput's -supportedFlashModes property still reports AVCaptureFlashModeOn and AVCaptureFlashModeAuto as being available, thus allowing you to specify a flashMode of AVCaptureModeOn. You should always check the AVCaptureResolvedPhotoSettings provided to you in the AVCapturePhotoCaptureDelegate callbacks, as the resolved flashEnabled property will tell you definitively if the flash is being used.
*/
-@property(nonatomic) AVCaptureFlashMode flashMode API_UNAVAILABLE(macos);
+@property(nonatomic) AVCaptureFlashMode flashMode API_AVAILABLE(macos(13.0));
/*!
@property autoRedEyeReductionEnabled
@@ -1031,7 +1031,7 @@
@discussion
Default value is AVCapturePhotoQualityPrioritizationBalanced. The AVCapturePhotoOutput is capable of applying a variety of techniques to improve photo quality (reduce noise, preserve detail in low light, freeze motion, etc), depending on the source device's activeFormat. Some of these techniques can take significant processing time before the photo is returned to your delegate callback. The photoQualityPrioritization property allows you to specify your preferred quality vs speed of delivery. By default, speed and quality are considered to be of equal importance. When you specify AVCapturePhotoQualityPrioritizationSpeed, you indicate that speed should be prioritized at the expense of quality. Likewise, when you choose AVCapturePhotoQualityPrioritizationQuality, you signal your willingness to prioritize the very best quality at the expense of speed, and your readiness to wait (perhaps significantly) longer for the photo to be returned to your delegate.
*/
-@property(nonatomic) AVCapturePhotoQualityPrioritization photoQualityPrioritization API_AVAILABLE(ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
+@property(nonatomic) AVCapturePhotoQualityPrioritization photoQualityPrioritization API_AVAILABLE(macos(13.0), ios(13.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@property autoStillImageStabilizationEnabled
@@ -1095,7 +1095,7 @@
Starting in iOS 14.5 if you disable geometric distortion correction, the high resolution photo emitted by AVCapturePhotoOutput may be is smaller depending on the format.
*/
-@property(nonatomic, getter=isHighResolutionPhotoEnabled) BOOL highResolutionPhotoEnabled API_UNAVAILABLE(macos);
+@property(nonatomic, getter=isHighResolutionPhotoEnabled) BOOL highResolutionPhotoEnabled API_AVAILABLE(macos(13.0));
/*!
@property depthDataDeliveryEnabled
@@ -1307,7 +1307,7 @@
When you request a bracketed capture, your AVCapturePhotoCaptureDelegate's -captureOutput:didFinishProcessing{Photo | RawPhoto}... callbacks are called back bracketSettings.count times and provided with the corresponding AVCaptureBracketedStillImageSettings object from your request.
*/
-API_AVAILABLE(ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCapturePhotoBracketSettings : AVCapturePhotoSettings
{
@private
@@ -1395,7 +1395,7 @@
@discussion
When you initiate a photo capture request using -capturePhotoWithSettings:delegate:, some of your settings are not yet certain. For instance, auto flash and auto still image stabilization allow the AVCapturePhotoOutput to decide just in time whether to employ flash or still image stabilization, depending on the current scene. Once the request is issued, AVCapturePhotoOutput begins the capture, resolves the uncertain settings, and in its first callback informs you of its choices through an AVCaptureResolvedPhotoSettings object. This same object is presented to all the callbacks fired for a particular photo capture request. Its uniqueID property matches that of the AVCapturePhotoSettings instance you used to initiate the photo request.
*/
-API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(10.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureResolvedPhotoSettings : NSObject
{
@private
@@ -1568,7 +1568,7 @@
@discussion
Beginning in iOS 11, AVCapturePhotoOutput's AVCapturePhotoCaptureDelegate supports a simplified callback for delivering image data, namely -captureOutput:didFinishingProcessingPhoto:error:. This callback presents each image result for your capture request as an AVCapturePhoto object, an immutable wrapper from which various properties of the photo capture may be queried, such as the photo's preview pixel buffer, metadata, depth data, camera calibration data, and image bracket specific properties. AVCapturePhoto can wrap file-containerized photo results, such as HEVC encoded image data, containerized in the HEIC file format. CMSampleBufferRef, on the other hand, may only be used to express non file format containerized photo data. For this reason, the AVCapturePhotoCaptureDelegate protocol methods that return CMSampleBuffers have been deprecated in favor of -captureOutput:didFinishingProcessingPhoto:error:. A AVCapturePhoto wraps a single image result. For instance, if you've requested a bracketed capture of 3 images, your callback is called 3 times, each time delivering an AVCapturePhoto.
*/
-API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCapturePhoto : NSObject
{
@private
@@ -1716,7 +1716,7 @@
@protocol AVCapturePhotoFileDataRepresentationCustomizer;
-API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCapturePhoto (AVCapturePhotoConversions)
/*!
@@ -1810,9 +1810,9 @@
AVCaptureLensStabilizationStatusActive = 2,
AVCaptureLensStabilizationStatusOutOfRange = 3,
AVCaptureLensStabilizationStatusUnavailable = 4,
-} API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.15), ios(11.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCapturePhoto (AVCapturePhotoBracketedCapture)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h 2022-02-23 07:16:14.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSession.h 2022-05-31 15:04:21.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
#import <AVFoundation/AVBase.h>
@@ -23,7 +23,7 @@
@discussion
The notification object is the AVCaptureSession instance that encountered a runtime error. The userInfo dictionary contains an NSError for the key AVCaptureSessionErrorKey.
*/
-AVF_EXPORT NSString *const AVCaptureSessionRuntimeErrorNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionRuntimeErrorNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionErrorKey
@@ -33,7 +33,7 @@
@discussion
AVCaptureSessionErrorKey may be found in the userInfo dictionary provided with an AVCaptureSessionRuntimeErrorNotification. The NSError associated with the notification gives greater detail on the nature of the error, and in some cases recovery suggestions.
*/
-AVF_EXPORT NSString *const AVCaptureSessionErrorKey API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionErrorKey API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionDidStartRunningNotification
@@ -43,7 +43,7 @@
@discussion
Clients may observe the AVCaptureSessionDidStartRunningNotification to know when an instance of AVCaptureSession starts running.
*/
-AVF_EXPORT NSString *const AVCaptureSessionDidStartRunningNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionDidStartRunningNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionDidStopRunningNotification
@@ -53,7 +53,7 @@
@discussion
Clients may observe the AVCaptureSessionDidStopRunningNotification to know when an instance of AVCaptureSession stops running. An AVCaptureSession instance may stop running automatically due to external system conditions, such as the device going to sleep, or being locked by a user.
*/
-AVF_EXPORT NSString *const AVCaptureSessionDidStopRunningNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionDidStopRunningNotification API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionWasInterruptedNotification
@@ -65,7 +65,7 @@
Beginning in iOS 9.0, the AVCaptureSessionWasInterruptedNotification userInfo dictionary contains an AVCaptureSessionInterruptionReasonKey indicating the reason for the interruption.
*/
-AVF_EXPORT NSString *const AVCaptureSessionWasInterruptedNotification API_AVAILABLE(macos(10.14), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionWasInterruptedNotification API_AVAILABLE(macos(10.14), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@@ -90,7 +90,7 @@
AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient = 3,
AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps = 4,
AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableDueToSystemPressure API_AVAILABLE(ios(11.1), macCatalyst(14.0)) = 5,
-} API_AVAILABLE(ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@@ -101,7 +101,7 @@
@discussion
AVCaptureSessionInterruptionReasonKey may be found in the userInfo dictionary provided with an AVCaptureSessionWasInterruptedNotification. The NSNumber associated with the notification tells you why the interruption occurred.
*/
-AVF_EXPORT NSString *const AVCaptureSessionInterruptionReasonKey API_AVAILABLE(ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionInterruptionReasonKey API_AVAILABLE(ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionInterruptionSystemPressureStateKey
@@ -111,7 +111,7 @@
@discussion
This key is only present when the AVCaptureSessionInterruptionReasonKey equals AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableDueToSystemPressure.
*/
-AVF_EXPORT NSString *const AVCaptureSessionInterruptionSystemPressureStateKey API_AVAILABLE(ios(11.1), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionInterruptionSystemPressureStateKey API_AVAILABLE(ios(11.1), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionInterruptionEndedNotification
@@ -121,7 +121,7 @@
@discussion
Clients may observe the AVCaptureSessionInterruptionEndedNotification to know when an instance of AVCaptureSession ceases to be interrupted, for example, when a phone call ends, and hardware resources needed to run the session are again available. When appropriate, the AVCaptureSession instance that was previously stopped in response to an interruption will automatically restart once the interruption ends.
*/
-AVF_EXPORT NSString *const AVCaptureSessionInterruptionEndedNotification API_AVAILABLE(macos(10.14), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT NSString *const AVCaptureSessionInterruptionEndedNotification API_AVAILABLE(macos(10.14), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@enum AVCaptureVideoOrientation
@@ -142,7 +142,7 @@
AVCaptureVideoOrientationPortraitUpsideDown = 2,
AVCaptureVideoOrientationLandscapeRight = 3,
AVCaptureVideoOrientationLandscapeLeft = 4,
-} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+} API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
#pragma mark - AVCaptureSession
@@ -159,7 +159,7 @@
@discussion
To perform a real-time capture, a client may instantiate AVCaptureSession and add appropriate AVCaptureInputs, such as AVCaptureDeviceInput, and outputs, such as AVCaptureMovieFileOutput. [AVCaptureSession startRunning] starts the flow of data from the inputs to the outputs, and [AVCaptureSession stopRunning] stops the flow. A client may set the sessionPreset property to customize the quality level or bitrate of the output.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureSession : NSObject
{
@private
@@ -264,6 +264,12 @@
@discussion
An AVCaptureOutput instance can only be added to a session using -addOutput: if -canAddOutput: returns YES.
+
+ On iOS and Mac Catalyst, some limitations to adding combinations of different types of outputs apply:
+ - A maximum of one output of each type may be added. For applications linked on or after iOS 16.0, this restriction no longer applies to AVCaptureVideoDataOutputs. When adding more than one AVCaptureVideoDataOutput, AVCaptureSession.hardwareCost must be taken into account.
+ - A session cannot contain both an AVCaptureStillImageOutput and an AVCapturePhotoOutput at the same time.
+ - Prior to iOS 16.0, an AVCaptureVideoDataOutput and an AVCaptureMovieFileOutput may be added to the same session, but only one may have its connection active. When both have their connections enabled, the AVCaptureMovieFileOutput "wins" and the AVCaptureVideoDataOutput's connection becomes inactive. For applications linked on or after iOS 16.0, this restriction has been lifted. When adding multiple AVCaptureVideoDataOutputs or a combination of AVCaptureVideoDataOutputs and an AVCaptureMovieFileOutput, AVCaptureSession.hardwareCost must be taken into account.
+ - Similarly, prior to iOS 16.0, an AVCaptureAudioDataOutput and an AVCaptureMovieFileOutput may be added to the same session, but only one may have its connection active. When both have their connections enabled, the AVCaptureMovieFileOutput "wins" and the AVCaptureAudioDataOutput's connection becomes inactive. For applications linked on or after iOS 16.0, this restriction has been lifted.
*/
- (BOOL)canAddOutput:(AVCaptureOutput *)output;
@@ -409,6 +415,36 @@
@property(nonatomic, readonly, getter=isInterrupted) BOOL interrupted API_AVAILABLE(ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos);
/*!
+ @property multitaskingCameraAccessSupported
+ @abstract
+ Returns whether the session can be configured to use the camera while multitasking.
+
+ @discussion
+ This property can be used to determine whether multitaskingCameraAccessEnabled may be set to YES. When this property changes from YES to NO, multitaskingCameraAccessEnabled also reverts to NO.
+
+ This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isMultitaskingCameraAccessSupported) BOOL multitaskingCameraAccessSupported API_AVAILABLE(ios(16.0)) API_UNAVAILABLE(macos, macCatalyst, tvos, watchos);
+
+/*!
+ @property multitaskingCameraAccessEnabled
+ @abstract
+ Indicates whether the session is configured to use the camera while multitasking.
+
+ @discussion
+ The default value is NO. This property may only be set if -isMultitaskingCameraAccessSupported returns YES. This property must be set before the session starts running.
+
+ AVCaptureSessions that are configured to use the camera while multitasking will not be interrupted with AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps.
+
+ For applications that have the com.apple.developer.avfoundation.multitasking-camera-access entitlement, this property defaults to YES if -isMultitaskingCameraAccessSupported returns YES.
+
+ To learn about best practices for using the camera while multitasking, refer to the Accessing the Camera While Multitasking article on developer.apple.com. See https://developer.apple.com/documentation/avkit/accessing_the_camera_while_multitasking\.
+
+ This property is key-value observable.
+ */
+@property(nonatomic, getter=isMultitaskingCameraAccessEnabled) BOOL multitaskingCameraAccessEnabled API_AVAILABLE(ios(16.0)) API_UNAVAILABLE(macos, macCatalyst, tvos, watchos);
+
+/*!
@property usesApplicationAudioSession
@abstract
Indicates whether the receiver will use the application's AVAudioSession for recording.
@@ -486,6 +522,25 @@
*/
@property(nonatomic, readonly, nullable) __attribute__((NSObject)) CMClockRef masterClock API_DEPRECATED_WITH_REPLACEMENT("synchronizationClock", macos(10.9, 12.3), ios(7.0, 15.4), macCatalyst(14.0, 15.4)) API_UNAVAILABLE(tvos);
+/*!
+ @property hardwareCost
+ @abstract
+ Indicates the percentage of the session's available hardware budget currently in use.
+
+ @discussion
+ The value of this property is a float from 0.0 => 1.0 indicating how much of the session's available hardware is in use as a percentage, given the currently connected inputs and outputs and the features for which you've opted in. When your hardwareCost is greater than 1.0, the capture session cannot run your desired configuration due to hardware constraints, so you receive an AVCaptureSessionRuntimeErrorNotification when attempting to start it running. Default value is 0.
+
+ Contributors to hardwareCost include:
+ - Whether the source devices' active formats use the full sensor (4:3) or a crop (16:9). Cropped formats require lower hardware bandwidth, and therefore lower the cost.
+ - The max frame rate supported by the source devices' active formats. The higher the max frame rate, the higher the cost.
+ - Whether the source devices' active formats are binned or not. Binned formats require substantially less hardware bandwidth, and therefore result in a lower cost.
+ - The number of sources configured to deliver streaming disparity / depth via AVCaptureDepthDataOutput. The higher the number of cameras configured to produce depth, the higher the cost.
+ In order to reduce hardwareCost, consider picking a sensor-cropped activeFormat, or a binned format. You may also use AVCaptureDeviceInput's videoMinFrameDurationOverride property to artificially limit the max frame rate (which is the reciprocal of the min frame duration) of a source device to a lower value. By doing so, you only pay the hardware cost for the max frame rate you intend to use.
+
+ AVCaptureMultiCamSessions always computes this hardwareCost. AVCaptureSessions only computes a non-zero hardwareCost when multiple AVCaptureVideoDataOutputs or an AVCaptureMovieFileOutput and one or more AVCaptureVideoDataOutputs are added to the session.
+ */
+@property(nonatomic, readonly) float hardwareCost API_AVAILABLE(ios(16.0), macCatalyst(16.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
+
@end
@@ -507,6 +562,7 @@
/*!
@property multiCamSupported
+ @abstract
Indicates whether multicam session is supported on this platform.
@discussion
@@ -585,7 +641,7 @@
Connections involving video expose video specific properties, such as videoMirrored and videoOrientation.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureConnection : NSObject
{
@private
@@ -804,7 +860,7 @@
@discussion
This property is only applicable to AVCaptureConnection instances involving video. In such connections, the videoMinFrameDuration property may only be set if -isVideoMinFrameDurationSupported returns YES.
- This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On Mac OS X, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
+ This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On macOS, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
*/
@property(nonatomic, readonly, getter=isVideoMinFrameDurationSupported) BOOL supportsVideoMinFrameDuration API_DEPRECATED("Use AVCaptureDevice's activeFormat.videoSupportedFrameRateRanges instead.", ios(5.0, 7.0), macCatalyst(14.0, 14.0)) API_UNAVAILABLE(tvos);
@@ -816,7 +872,7 @@
@discussion
The value of this property is a CMTime specifying the minimum duration of each video frame output by the receiver, placing a lower bound on the amount of time that should separate consecutive frames. This is equivalent to the reciprocal of the maximum frame rate. A value of kCMTimeZero or kCMTimeInvalid indicates an unlimited maximum frame rate. The default value is kCMTimeInvalid.
- This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On Mac OS X, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
+ This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On macOS, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
*/
@property(nonatomic) CMTime videoMinFrameDuration API_DEPRECATED("Use AVCaptureDevice's activeVideoMinFrameDuration instead.", ios(5.0, 7.0), macCatalyst(14.0, 14.0)) API_UNAVAILABLE(tvos);
@@ -828,7 +884,7 @@
@discussion
This property is only applicable to AVCaptureConnection instances involving video. In such connections, the videoMaxFrameDuration property may only be set if -isVideoMaxFrameDurationSupported returns YES.
- This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On Mac OS X, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
+ This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On macOS, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
*/
@property(nonatomic, readonly, getter=isVideoMaxFrameDurationSupported) BOOL supportsVideoMaxFrameDuration API_AVAILABLE(macos(10.9)) API_DEPRECATED("Use AVCaptureDevice's activeFormat.videoSupportedFrameRateRanges instead.", ios(5.0, 7.0), macCatalyst(14.0, 14.0)) API_UNAVAILABLE(tvos);
@@ -840,7 +896,7 @@
@discussion
The value of this property is a CMTime specifying the maximum duration of each video frame output by the receiver, placing an upper bound on the amount of time that should separate consecutive frames. This is equivalent to the reciprocal of the minimum frame rate. A value of kCMTimeZero or kCMTimeInvalid indicates an unlimited minimum frame rate. The default value is kCMTimeInvalid.
- This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On Mac OS X, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
+ This property is deprecated on iOS, where min and max frame rate adjustments are applied exclusively at the AVCaptureDevice using the activeVideoMinFrameDuration and activeVideoMaxFrameDuration properties. On macOS, frame rate adjustments are supported both at the AVCaptureDevice and at AVCaptureConnection, enabling connections to output different frame rates.
*/
@property(nonatomic) CMTime videoMaxFrameDuration API_AVAILABLE(macos(10.9)) API_DEPRECATED("Use AVCaptureDevice's activeVideoMaxFrameDuration instead.", ios(5.0, 7.0), macCatalyst(14.0, 14.0)) API_UNAVAILABLE(tvos);
@@ -951,7 +1007,7 @@
@discussion
An AVCaptureConnection from an input producing audio to an output receiving audio exposes an array of AVCaptureAudioChannel objects, one for each channel of audio available. Iterating through these audio channel objects, a client may poll for audio levels. Instances of AVCaptureAudioChannel cannot be created directly.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureAudioChannel : NSObject
{
@private
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h 2022-02-23 07:59:45.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h 2022-05-31 15:03:41.000000000 -0400
@@ -18,7 +18,7 @@
@discussion
Clients may use an AVCaptureSessionPreset to set the format for output on an AVCaptureSession.
*/
-typedef NSString * AVCaptureSessionPreset NS_TYPED_ENUM API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+typedef NSString * AVCaptureSessionPreset NS_TYPED_ENUM API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPresetPhoto
@@ -28,7 +28,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetPhoto for full resolution photo quality output.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetPhoto API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetPhoto API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPresetHigh
@@ -38,7 +38,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetHigh to achieve high quality video and audio output. AVCaptureSessionPresetHigh is the default sessionPreset value.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetHigh API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetHigh API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPresetMedium
@@ -48,7 +48,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetMedium to achieve output video and audio bitrates suitable for sharing over WiFi.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetMedium API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetMedium API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPresetLow
@@ -58,7 +58,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetLow to achieve output video and audio bitrates suitable for sharing over 3G.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetLow API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetLow API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPreset320x240
@@ -78,7 +78,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset352x288 to achieve CIF quality (352x288) output.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset352x288 API_AVAILABLE(macos(10.7), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset352x288 API_AVAILABLE(macos(10.7), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPreset640x480
@@ -88,7 +88,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset640x480 to achieve VGA quality (640x480) output.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset640x480 API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset640x480 API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPreset960x540
@@ -108,7 +108,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1280x720 to achieve 1280x720 output.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1280x720 API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1280x720 API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPreset1920x1080
@@ -118,7 +118,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1920x1080 to achieve 1920x1080 output.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1920x1080 API_AVAILABLE(macos(10.15), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1920x1080 API_AVAILABLE(macos(10.15), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPreset3840x2160
@@ -128,7 +128,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset3840x2160 to achieve 3840x2160 output.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset3840x2160 API_AVAILABLE(macos(10.15), ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset3840x2160 API_AVAILABLE(macos(10.15), ios(9.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPresetiFrame960x540
@@ -138,7 +138,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame960x540 to achieve 960x540 quality iFrame H.264 video at ~30 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame960x540 API_AVAILABLE(macos(10.9), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame960x540 API_AVAILABLE(macos(10.9), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPresetiFrame1280x720
@@ -148,7 +148,7 @@
@discussion
Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame1280x720 to achieve 1280x720 quality iFrame H.264 video at ~40 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame1280x720 API_AVAILABLE(macos(10.9), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame1280x720 API_AVAILABLE(macos(10.9), ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos);
/*!
@constant AVCaptureSessionPresetInputPriority
@@ -158,7 +158,7 @@
@discussion
By calling -setSessionPreset:, clients can easily configure an AVCaptureSession to produce a desired quality of service level. The session configures its inputs and outputs optimally to produce the QoS level indicated. Clients who need to ensure a particular input format is chosen can use AVCaptureDevice's -setActiveFormat: method. When a client sets the active format on a device, the associated session's -sessionPreset property automatically changes to AVCaptureSessionPresetInputPriority. This change indicates that the input format selected by the client now dictates the quality of service level provided at the outputs. When a client sets the session preset to anything other than AVCaptureSessionPresetInputPriority, the session resumes responsibility for configuring inputs and outputs, and is free to change its inputs' activeFormat as needed.
*/
-AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetInputPriority API_AVAILABLE(ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED;
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetInputPriority API_AVAILABLE(ios(7.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h 2022-02-23 07:10:14.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureStillImageOutput.h 2022-05-31 15:03:41.000000000 -0400
@@ -25,7 +25,7 @@
@discussion
Instances of AVCaptureStillImageOutput can be used to capture, on demand, high quality snapshots from a realtime capture source. Clients can request a still image for the current time using the captureStillImageAsynchronouslyFromConnection:completionHandler: method. Clients can also configure still image outputs to produce still images in specific image formats.
*/
-API_DEPRECATED("Use AVCapturePhotoOutput instead.", macos(10.7, 10.15), ios(4.0, 10.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_DEPRECATED("Use AVCapturePhotoOutput instead.", macos(10.7, 10.15), ios(4.0, 10.0)) API_UNAVAILABLE(tvos, watchos)
@interface AVCaptureStillImageOutput : AVCaptureOutput
{
@private
@@ -165,7 +165,7 @@
@discussion
AVCaptureBracketedStillImageSettings may not be instantiated directly.
*/
-API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureBracketedStillImageSettings : NSObject
AV_INIT_UNAVAILABLE
@@ -183,7 +183,7 @@
@discussion
An AVCaptureManualExposureBracketedStillImageSettings instance defines the exposure duration and ISO settings that should be applied to one image in a bracket. An array of settings objects is passed to -[AVCaptureStillImageOutput captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:]. Min and max duration and ISO values are queryable properties of the AVCaptureDevice supplying data to an AVCaptureStillImageOutput instance. If you wish to leave exposureDuration unchanged for this bracketed still image, you may pass the special value AVCaptureExposureDurationCurrent. To keep ISO unchanged, you may pass AVCaptureISOCurrent (see AVCaptureDevice.h).
*/
-API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureManualExposureBracketedStillImageSettings : AVCaptureBracketedStillImageSettings
/*!
@@ -227,7 +227,7 @@
@discussion
An AVCaptureAutoExposureBracketedStillImageSettings instance defines the exposure target bias setting that should be applied to one image in a bracket. An array of settings objects is passed to -[AVCaptureStillImageOutput captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:]. Min and max exposure target bias are queryable properties of the AVCaptureDevice supplying data to an AVCaptureStillImageOutput instance. If you wish to leave exposureTargetBias unchanged for this bracketed still image, you may pass the special value AVCaptureExposureTargetBiasCurrent (see AVCaptureDevice.h).
*/
-API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(ios(8.0), macCatalyst(14.0)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureAutoExposureBracketedStillImageSettings : AVCaptureBracketedStillImageSettings
/*!
@@ -262,7 +262,7 @@
In a bracketed capture, AVCaptureDevice flashMode property is ignored (flash is forced off), as is AVCaptureStillImageOutput's automaticallyEnablesStillImageStabilizationWhenAvailable property (stabilization is forced off).
*/
-API_DEPRECATED("Use AVCapturePhotoOutput instead.", macos(10.7, 10.15), ios(4.0, 10.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_DEPRECATED("Use AVCapturePhotoOutput instead.", macos(10.7, 10.15), ios(4.0, 10.0)) API_UNAVAILABLE(tvos, watchos)
@interface AVCaptureStillImageOutput (AVCaptureStillImageOutputBracketedCapture)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h 2022-02-23 07:13:13.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h 2022-06-03 18:07:15.000000000 -0400
@@ -26,7 +26,7 @@
@discussion
Instances of AVCaptureVideoDataOutput produce video frames suitable for processing using other media APIs. Applications can access the frames with the captureOutput:didOutputSampleBuffer:fromConnection: delegate method.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureVideoDataOutput : AVCaptureOutput
{
@private
@@ -84,7 +84,7 @@
@discussion
See AVVideoSettings.h for more information on how to construct a video settings dictionary. To receive samples in their device native format, set this property to an empty dictionary (i.e. [NSDictionary dictionary]). To receive samples in a default uncompressed format, set this property to nil. Note that after this property is set to nil, subsequent querying of this property will yield a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
- On iOS, the only supported key is kCVPixelBufferPixelFormatTypeKey. Supported pixel formats are kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA.
+ On iOS versions prior to iOS 16.0, the only supported key is kCVPixelBufferPixelFormatTypeKey. Use -availableVideoCVPixelFormatTypes for the list of supported pixel formats. For apps linked on or after iOS 16.0, kCVPixelBufferPixelFormatTypeKey, kCVPixelBufferWidthKey, and kCVPixelBufferHeightKey are supported. The width and height must match the videoOrientation specified on the output's AVCaptureConnection or an NSInvalidArgumentException is thrown. The aspect ratio of width and height must match the aspect ratio of the source's activeFormat (corrected for the connection's videoOrientation) or an NSInvalidArgumentException is thrown. If width or height exceeds the source's activeFormat's width or height, an NSInvalidArgumentException is thrown. Changing width and height when deliversPreviewSizedOutputBuffers is set to YES is not supported and throws an NSInvalidArgumentException.
*/
@property(nonatomic, copy, null_resettable) NSDictionary<NSString *, id> *videoSettings;
@@ -217,7 +217,7 @@
@abstract
Defines an interface for delegates of AVCaptureVideoDataOutput to receive captured video sample buffers and be notified of late sample buffers that were dropped.
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@protocol AVCaptureVideoDataOutputSampleBufferDelegate <NSObject>
@optional
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h 2022-02-23 07:56:30.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoPreviewLayer.h 2022-06-03 18:07:15.000000000 -0400
@@ -29,7 +29,7 @@
@discussion
An AVCaptureVideoPreviewLayer instance is a subclass of CALayer and is therefore suitable for insertion in a layer hierarchy as part of a graphical interface. One creates an AVCaptureVideoPreviewLayer instance with the capture session to be previewed, using +layerWithSession: or -initWithSession:. Using the @"videoGravity" property, one can influence how content is viewed relative to the layer bounds. On some hardware configurations, the orientation of the layer can be manipulated using @"orientation" and @"mirrored".
*/
-API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) __WATCHOS_PROHIBITED
+API_AVAILABLE(macos(10.7), ios(4.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos)
@interface AVCaptureVideoPreviewLayer : CALayer
{
@private
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h 2022-02-23 07:56:31.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVComposition.h 2022-05-31 15:02:35.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -212,14 +212,32 @@
@result A BOOL value indicating the success of the insertion.
@discussion
You provide a reference to an AVAsset and the timeRange within it that you want to insert. You specify the start time in the destination composition at which the timeRange should be inserted.
-
This method may add new tracks to ensure that all tracks of the asset are represented in the inserted timeRange.
-
Note that the media data for the inserted timeRange will be presented at its natural duration and rate. It can be scaled to a different duration and presented at a different rate via -scaleTimeRange:toDuration:.
-
Existing content at the specified startTime will be pushed out by the duration of timeRange.
+ Note that metadata will not be automatically copied.
+*/
+- (BOOL)insertTimeRange:(CMTimeRange)timeRange ofAsset:(AVAsset *)asset atTime:(CMTime)startTime error:(NSError * _Nullable * _Nullable)outError API_DEPRECATED_WITH_REPLACEMENT("insertTimeRange:ofAsset:atTime:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+
+/*!
+ @method insertTimeRange:ofAsset:atTime:completionHandler:
+ @abstract Inserts all the tracks of a timeRange of an asset into a composition.
+ @param timeRange
+ Specifies the timeRange of the asset to be inserted.
+ @param asset
+ Specifies the asset that contains the tracks that are to be inserted. Only instances of AVURLAsset and AVComposition are supported (AVComposition starting in MacOS X 10.10 and iOS 8.0).
+ @param startTime
+ Specifies the time at which the inserted tracks are to be presented by the composition.
+ @param completionHandler
+ A block that is invoked when the insertion is complete. If the error parameter is non-nil, it describes a failure that may be reported to the user, e.g. the asset that was selected for insertion in the composition is restricted by copy-protection.
+ @discussion
+ You provide a reference to an AVAsset and the timeRange within it that you want to insert. You specify the start time in the destination composition at which the timeRange should be inserted.
+ This method may add new tracks to ensure that all tracks of the asset are represented in the inserted timeRange.
+ Note that the media data for the inserted timeRange will be presented at its natural duration and rate. It can be scaled to a different duration and presented at a different rate via -scaleTimeRange:toDuration:.
+ Existing content at the specified startTime will be pushed out by the duration of timeRange.
+ Note that metadata will not be automatically copied.
*/
-- (BOOL)insertTimeRange:(CMTimeRange)timeRange ofAsset:(AVAsset *)asset atTime:(CMTime)startTime error:(NSError * _Nullable * _Nullable)outError;
+- (void)insertTimeRange:(CMTimeRange)timeRange ofAsset:(AVAsset *)asset atTime:(CMTime)startTime completionHandler:(void (^)(NSError * _Nullable error))completionHandler API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
/*!
@method insertEmptyTimeRange:
@@ -368,6 +386,52 @@
@end
+/*!
+ @category AVComposition (SynchronousAssetInterface)
+ @abstract Redeclarations of async-only AVAsset interfaces to allow synchronous usage in the synchronous subclass.
+ @discussion
+ See AVAsset's interface for more information about these interfaces.
+ */
+@interface AVComposition (SynchronousAssetInterface)
+
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<AVMetadataKey> *)commonKeys;
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsBestMatchingPreferredLanguages:(NSArray<NSString *> *)preferredLanguages;
+- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (CMPersistentTrackID)unusedTrackID;
+
+#if __swift__
+@property (nonatomic, readonly) CMTime duration;
+@property (nonatomic, readonly) float preferredRate;
+@property (nonatomic, readonly) float preferredVolume;
+@property (nonatomic, readonly) CGAffineTransform preferredTransform;
+@property (nonatomic, readonly) AVDisplayCriteria *preferredDisplayCriteria API_AVAILABLE(tvos(11.2)) API_UNAVAILABLE(ios) API_UNAVAILABLE(macos, watchos);
+@property (nonatomic, readonly) CMTime minimumTimeOffsetFromLive API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
+@property (nonatomic, readonly) BOOL providesPreciseDurationAndTiming;
+@property (nonatomic, readonly) NSArray<AVAssetTrackGroup *> *trackGroups API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, nullable) AVMetadataItem *creationDate API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, nullable) NSString *lyrics;
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata;
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *metadata API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
+@property (readonly) NSArray<NSLocale *> *availableChapterLocales API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMediaCharacteristic> *availableMediaCharacteristicsWithMediaSelectionOptions API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));;
+@property (nonatomic, readonly) AVMediaSelection *preferredMediaSelection API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0), watchos(2.0));
+@property (nonatomic, readonly) NSArray<AVMediaSelection *> *allMediaSelections API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0), watchos(4.0));
+@property (nonatomic, readonly) BOOL hasProtectedContent API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) BOOL canContainFragments API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) BOOL containsFragments API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) CMTime overallDurationHint API_AVAILABLE(macos(10.12.2), ios(10.2), tvos(10.2), watchos(3.2));
+@property (nonatomic, readonly, getter=isPlayable) BOOL playable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isExportable) BOOL exportable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, getter=isReadable) BOOL readable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, getter=isComposable) BOOL composable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isCompatibleWithSavedPhotosAlbum) BOOL compatibleWithSavedPhotosAlbum API_AVAILABLE(ios(5.0), tvos(9.0)) API_UNAVAILABLE(macos, watchos);
+@property (nonatomic, readonly, getter=isCompatibleWithAirPlayVideo) BOOL compatibleWithAirPlayVideo API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif // __swift__
+
+@end
+
NS_ASSUME_NONNULL_END
#else
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h 2022-02-23 07:13:10.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCompositionTrack.h 2022-06-03 18:07:13.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2019 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -169,6 +169,7 @@
@result A BOOL value indicating the success of the insertion.
@discussion
This method is equivalent to (but more efficient than) calling -insertTimeRange:ofTrack:atTime:error: for each timeRange/track pair. If this method returns an error, none of the time ranges will be inserted into the composition track. To specify an empty time range, pass NSNull for the track and a time range of starting at kCMTimeInvalid with a duration of the desired empty edit.
+ This method throws an exception if time ranges and tracks to not have the same array count.
*/
- (BOOL)insertTimeRanges:(NSArray<NSValue *> *)timeRanges ofTracks:(NSArray<AVAssetTrack *> *)tracks atTime:(CMTime)startTime error:(NSError * _Nullable * _Nullable)outError API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
@@ -267,11 +268,53 @@
@param replacementFormatDescription
A CMFormatDescription to replace the specified format description or NULL to indicate that a previous replacement of originalFormatDescription should be cancelled.
@discussion You can use this method to make surgical changes to a track's format descriptions, such as adding format description extensions to a format description or changing the audio channel layout of an audio track. You should note that a format description can have extensions of type kCMFormatDescriptionExtension_VerbatimSampleDescription and kCMFormatDescriptionExtension_VerbatimISOSampleEntry; if you modify a copy of a format description, you should delete those extensions from the copy or your changes might be ignored. Also note that format description replacements are not transferred when performing editing operations on AVMutableCompositionTrack objects; for instance, inserting a range of a composition track into another composition track does not transfer any replacement format descriptions.
+ This method throws an exception if the media type of the replacement does not match the original format description.
*/
- (void)replaceFormatDescription:(CMFormatDescriptionRef)originalFormatDescription withFormatDescription:(nullable CMFormatDescriptionRef)replacementFormatDescription API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
@end
+/*!
+ @category AVCompositionTrack (SynchronousTrackInterface)
+ @abstract Redeclarations of async-only AVAssetTrack interfaces to allow synchronous usage in the synchronous subclass.
+ @discussion
+ See AVAssetTrack's interface for more information about these interfaces.
+ */
+@interface AVCompositionTrack (SynchronousTrackInterface)
+
+- (BOOL)hasMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (CMTime)samplePresentationTimeForTrackTime:(CMTime)trackTime;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
+- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(AVTrackAssociationType)trackAssociationType API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+
+#if __swift__
+@property (nonatomic, readonly) NSArray *formatDescriptions;
+@property (nonatomic, readonly, getter=isPlayable) BOOL playable API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isDecodable) BOOL decodable API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0), watchos(4.0));
+@property (nonatomic, readonly, getter=isEnabled) BOOL enabled;
+@property (nonatomic, readonly, getter=isSelfContained) BOOL selfContained;
+@property (nonatomic, readonly) long long totalSampleDataLength;
+@property (nonatomic, readonly) CMTimeRange timeRange;
+@property (nonatomic, readonly) CMTimeScale naturalTimeScale;
+@property (nonatomic, readonly) float estimatedDataRate;
+@property (nonatomic, readonly, nullable) NSString *languageCode;
+@property (nonatomic, readonly, nullable) NSString *extendedLanguageTag;
+@property (nonatomic, readonly) CGSize naturalSize;
+@property (nonatomic, readonly) CGAffineTransform preferredTransform;
+@property (nonatomic, readonly) float preferredVolume;
+@property (nonatomic, readonly) BOOL hasAudioSampleDependencies API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
+@property (nonatomic, readonly) float nominalFrameRate;
+@property (nonatomic, readonly) CMTime minFrameDuration API_AVAILABLE(macos(10.10), ios(7.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) BOOL requiresFrameReordering API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata;
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *metadata API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVTrackAssociationType> *availableTrackAssociationTypes API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) BOOL canProvideSampleCursors API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0));
+#endif // __swift__
+
+@end
+
NS_ASSUME_NONNULL_END
#else
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h 2022-02-23 07:13:08.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVError.h 2022-06-03 18:09:29.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -114,7 +114,9 @@
AVErrorRosettaNotInstalled API_AVAILABLE(macos(11.0), ios(14.0), tvos(14.0), watchos(7.0)) = -11877,
AVErrorOperationCancelled API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0)) = -11878,
AVErrorContentKeyRequestCancelled API_AVAILABLE(macos(11.4), ios(14.6), tvos(14.6), watchos(7.5)) = -11879,
-
+ AVErrorInvalidSampleCursor API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0)) = -11880,
+ AVErrorFailedToLoadSampleData API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0)) = -11881,
+ AVErrorAirPlayReceiverTemporarilyUnavailable API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0)) = -11882,
};
#else
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFCore.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFCore.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFCore.h 2022-02-15 02:59:01.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFCore.h 2022-05-21 23:02:27.000000000 -0400
@@ -23,6 +23,7 @@
#import <AVFoundation/AVAssetTrackSegment.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
+#import <AVFoundation/AVAssetPlaybackAssistant.h>
#import <AVFoundation/AVAsynchronousKeyValueLoading.h>
#import <AVFoundation/AVAudioMix.h>
#import <AVFoundation/AVAudioProcessingSettings.h>
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes 2022-02-14 23:57:08.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes 2022-06-02 20:59:56.000000000 -0400
@@ -371,6 +371,9 @@
- Selector: 'insertTimeRange:ofAsset:atTime:error:'
SwiftName: insertTimeRange(_:of:at:)
MethodKind: Instance
+ - Selector: 'insertTimeRange:ofAsset:atTime:completionHandler:'
+ SwiftName: insertTimeRange(_:of:at:completionHandler:)
+ MethodKind: Instance
- Selector: 'insertEmptyTimeRange:'
SwiftName: insertEmptyTimeRange(_:)
MethodKind: Instance
@@ -578,6 +581,9 @@
- Selector: 'valueWithCMTimeMapping:'
SwiftName: init(timeMapping:)
MethodKind: Class
+ - Selector: 'valueWithCMVideoDimensions:'
+ SwiftName: init(videoDimensions:)
+ MethodKind: Class
Properties:
- Name: CMTimeValue
SwiftName: timeValue
@@ -585,6 +591,8 @@
SwiftName: timeRangeValue
- Name: CMTimeMappingValue
SwiftName: timeMappingValue
+ - Name: CMVideoDimensionsValue
+ SwiftName: videoDimensionsValue
- Name: NSCoder
Methods:
- Selector: 'decodeCMTimeForKey:'
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVGeometry.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVGeometry.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVGeometry.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVGeometry.h 2022-05-31 15:04:20.000000000 -0400
@@ -0,0 +1,66 @@
+#if !__has_include(<AVFCore/AVGeometry.h>)
+/*
+ File: AVGeometry.h
+
+ Framework: AVFoundation
+
+ Copyright 2022 Apple Inc. All rights reserved.
+
+ */
+
+#import <AVFoundation/AVBase.h>
+
+#import <Foundation/Foundation.h>
+
+#import <CoreGraphics/CGBase.h>
+#import <CoreGraphics/CGGeometry.h>
+#import <CoreMedia/CMFormatDescription.h>
+
+/*!
+ @function AVMakeRectWithAspectRatioInsideRect
+ @abstract Returns a scaled CGRect that maintains the aspect ratio specified by a CGSize within a bounding CGRect.
+ @discussion This is useful when attempting to fit the presentationSize property of an AVPlayerItem within the bounds of another CALayer.
+ You would typically use the return value of this function as an AVPlayerLayer frame property value. For example:
+ myPlayerLayer.frame = AVMakeRectWithAspectRatioInsideRect(myPlayerItem.presentationSize, mySuperLayer.bounds);
+ @param aspectRatio The width & height ratio, or aspect, you wish to maintain.
+ @param boundingRect The bounding CGRect you wish to fit into.
+ */
+
+AVF_EXPORT CGRect AVMakeRectWithAspectRatioInsideRect(CGSize aspectRatio, CGRect boundingRect) API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+
+
+/*
+ Utilities for packing and unpacking CMVideoDimension structs in NSValue objects
+ */
+@interface NSValue (NSValueCMVideoDimensionsExtensions)
+
+/*!
+ @method valueWithCMVideoDimensions
+ @abstract
+ Creates a NSValue object encoding a CMVideoDimensions struct value.
+
+ @param dimensions
+ The CMVideoDimensions struct to encode.
+ @result
+ An NSValue object encoding the provided dimensions.
+
+ @discussion
+ This extension simplifies converting CMVideoDimensions struct values into NSValue objects.
+ */
++ (NSValue *)valueWithCMVideoDimensions:(CMVideoDimensions)dimensions API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @property CMVideoDimensionsValue
+ @abstract
+ Returns the CMVideoDimensions struct encoded by this object.
+
+ @discussion
+ This property simplifies accessing the contents of AVCaptureDeviceFormat.supportedMaxPhotoDimensions which are CMVideoDimension struct values encoded in NSValue objects.
+ */
+@property (readonly) CMVideoDimensions CMVideoDimensionsValue API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+@end
+
+#else
+#import <AVFCore/AVGeometry.h>
+#endif
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataItem.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataItem.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataItem.h 2022-02-23 10:57:32.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMetadataItem.h 2022-05-31 15:04:20.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2017 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -20,23 +20,19 @@
NS_ASSUME_NONNULL_BEGIN
@class AVMetadataItemFilter;
+@class AVMutableMetadataItem;
/*!
- @class AVMetadataItem
-
- @abstract AVMetadataItem represents an item of metadata associated with an audiovisual asset or with
- one of its tracks.
-
- @discussion AVMetadataItems have keys that accord with the specification of the container format from
- which they're drawn. Full details of the metadata formats, metadata keys, and metadata keyspaces
- supported by AVFoundation are available among the defines in AVMetadataFormat.h.
+ @class AVMetadataItem
+ @abstract AVMetadataItem represents an item of metadata associated with an audiovisual asset or with one of its tracks.
+ @discussion '
+ AVMetadataItems have keys that accord with the specification of the container format from which they're drawn. Full details of the metadata formats, metadata keys, and metadata keyspaces supported by AVFoundation are available among the defines in AVMetadataFormat.h.
- Note that arrays of AVMetadataItems vended by AVAsset and other classes are "lazy", similar
- to array-based keys that support key-value observing, meaning that you can obtain
- objects from those arrays without incurring overhead for items you don't ultimately inspect.
-
- You can filter arrays of AVMetadataItems by locale or by key and keySpace via the category
- AVMetadataItemArrayFiltering defined below.
+ Note that arrays of AVMetadataItems vended by AVAsset and other classes are "lazy", similar to array-based keys that support key-value observing, meaning that you can obtain objects from those arrays without incurring overhead for items you don't ultimately inspect.
+
+ AVMetadataItem conforms to NSMutableCopying, but for some "lazy" instances of AVMetadataItem, creating a mutable copy can cause properties to load synchrounsly. This can cause the calling thread to block while synchronous I/O is performed. To avoid the possiblity of blocking, use the methods of the AVAsynchronousKeyValueLoading protocol to asynchronously load the `value` and `extraAttributes` properties prior to making a mutable copy.
+
+ You can filter arrays of AVMetadataItems by locale or by key and keySpace via the category AVMetadataItemArrayFiltering defined below.
*/
@class AVMetadataItemInternal;
@@ -66,10 +62,10 @@
@property (nonatomic, readonly, copy, nullable) NSString *dataType API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
/* provides the value of the metadata item */
-@property (nonatomic, readonly, copy, nullable) id<NSObject, NSCopying> value;
+@property (nonatomic, readonly, copy, nullable) id<NSObject, NSCopying> value AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.value) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* provides a dictionary of the additional attributes */
-@property (nonatomic, readonly, copy, nullable) NSDictionary<AVMetadataExtraAttributeKey, id> *extraAttributes;
+@property (nonatomic, readonly, copy, nullable) NSDictionary<AVMetadataExtraAttributeKey, id> *extraAttributes AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.extraAttributes) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
@end
@@ -85,16 +81,16 @@
@interface AVMetadataItem (AVMetadataItemTypeCoercion)
/* provides the value of the metadata item as a string; will be nil if the value cannot be represented as a string */
-@property (nonatomic, readonly, nullable) NSString *stringValue;
+@property (nonatomic, readonly, nullable) NSString *stringValue AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.stringValue) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* provides the value of the metadata item as an NSNumber. If the metadata item's value can't be coerced to a number, @"numberValue" will be nil. */
-@property (nonatomic, readonly, nullable) NSNumber *numberValue;
+@property (nonatomic, readonly, nullable) NSNumber *numberValue AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.numberValue) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* provides the value of the metadata item as an NSDate. If the metadata item's value can't be coerced to a date, @"dateValue" will be nil. */
-@property (nonatomic, readonly, nullable) NSDate *dateValue;
+@property (nonatomic, readonly, nullable) NSDate *dateValue AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.dateValue) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
/* provides the raw bytes of the value of the metadata item */
-@property (nonatomic, readonly, nullable) NSData *dataValue;
+@property (nonatomic, readonly, nullable) NSData *dataValue AVF_DEPRECATED_FOR_SWIFT_ONLY("Use load(.dataValue) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
@end
@@ -103,9 +99,19 @@
/* The following two methods of the AVAsynchronousKeyValueLoading protocol are re-declared here so that they can be annotated with availability information. See AVAsynchronousKeyValueLoading.h for documentation. */
-- (AVKeyValueStatus)statusOfValueForKey:(NSString *)key error:(NSError * _Nullable * _Nullable)outError API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0), watchos(1.0));
+- (AVKeyValueStatus)statusOfValueForKey:(NSString *)key error:(NSError * _Nullable * _Nullable)outError
+#if __swift__
+API_DEPRECATED("Use status(of:) instead", macos(10.7, 13.0), ios(4.2, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0), watchos(1.0));
+#endif
-- (void)loadValuesAsynchronouslyForKeys:(NSArray<NSString *> *)keys completionHandler:(nullable void (^)(void))handler API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0), watchos(1.0));
+- (void)loadValuesAsynchronouslyForKeys:(NSArray<NSString *> *)keys completionHandler:(nullable void (^)(void))handler
+#if __swift__
+API_DEPRECATED("Use load(_:) instead. For non-deprecated properties that do not have an AVAsyncProperty equivalent, continue to query these properties synchronously", macos(10.7, 13.0), ios(4.2, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0), watchos(1.0));
+#endif
@end
@@ -200,7 +206,7 @@
AVMutableMetadataItemInternal *_mutablePriv;
}
-/* Indicates the identifier of the metadata item. Publicly defined identifiers are declared in AVMetadataIdentifiers.h. */
+/* Indicates the identifier of the metadata item. Publicly defined identifiers are declared in AVMetadataIdentifiers.h. This property throws an exception if identifier is not of the form \"<keySpace>/<key>\". */
@property (nonatomic, readwrite, copy, nullable) AVMetadataIdentifier identifier API_AVAILABLE(macos(10.10), ios(8.0), tvos(9.0), watchos(1.0));
/* indicates the IETF BCP 47 (RFC 4646) language identifier of the metadata item; may be nil if no language tag information is available */
@@ -335,6 +341,23 @@
@end
+/*!
+ @category AVMutableMetadataItem (SynchronousMetadataItemInterface)
+ @abstract Redeclarations of async-only AVMetadataItem interfaces to allow synchronous usage in the mutable subclass.
+ @discussion
+ See AVMetadataItem's interface for more information about these interfaces.
+ */
+@interface AVMutableMetadataItem (SynchronousMetadataItemInterface)
+
+#if __swift__
+@property (nonatomic, readonly, nullable) NSString *stringValue;
+@property (nonatomic, readonly, nullable) NSNumber *numberValue;
+@property (nonatomic, readonly, nullable) NSDate *dateValue;
+@property (nonatomic, readonly, nullable) NSData *dataValue;
+#endif // __swift__
+
+@end
+
NS_ASSUME_NONNULL_END
#else
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h 2022-02-23 07:13:11.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h 2022-06-03 18:07:13.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2009-2021 Apple Inc. All rights reserved.
+ Copyright 2009-2022 Apple Inc. All rights reserved.
*/
@@ -238,7 +238,12 @@
@result An instance of AVMovieTrack; may be nil if no track of the specified trackID is available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (nullable AVMovieTrack *)trackWithTrackID:(CMPersistentTrackID)trackID;
+- (nullable AVMovieTrack *)trackWithTrackID:(CMPersistentTrackID)trackID
+#if __swift__
+API_DEPRECATED("Use loadTrack(withTrackID:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTrackWithTrackID:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTrackWithTrackID:completionHandler:
@@ -258,7 +263,12 @@
@result An NSArray of AVMovieTracks; may be empty if no tracks of the specified media type are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVMovieTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
+- (NSArray<AVMovieTrack *> *)tracksWithMediaType:(AVMediaType)mediaType
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaType:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTracksWithMediaType:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaType:completionHandler:
@@ -278,7 +288,12 @@
@result An NSArray of AVMovieTracks; may be empty if no tracks with the specified characteristic are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVMovieTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (NSArray<AVMovieTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaCharacteristic:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTracksWithMediaCharacteristic:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaCharacteristic:completionHandler:
@@ -492,6 +507,7 @@
@result A BOOL value that indicates the success of the insertion.
@discussion This method may add new tracks to the target movie to ensure that all tracks of the asset are represented in the inserted timeRange.
Existing content at the specified startTime will be pushed out by the duration of timeRange.
+ Note that metadata will not be automatically copied.
*/
- (BOOL)insertTimeRange:(CMTimeRange)timeRange ofAsset:(AVAsset *)asset atTime:(CMTime)startTime copySampleData:(BOOL)copySampleData error:(NSError * _Nullable * _Nullable)outError;
@@ -553,7 +569,9 @@
@param options
An NSDictionary object that contains keys for specifying options for the initialization of the new AVMutableMovieTrack object. Pass nil for default initialization behavior.
@result An AVMutableMovieTrack object
- @discussion The trackID of the newly added track is a property of the returned instance of AVMutableMovieTrack.
+ @discussion The trackID of the newly added track is a property of the returned instance of AVMutableMovieTrack.
+ This method throws an exception if media type is not equal to the track's media type.
+ Note that metadata will not be automatically copied.
*/
- (nullable AVMutableMovieTrack *)addMutableTrackWithMediaType:(AVMediaType)mediaType copySettingsFromTrack:(nullable AVAssetTrack *)track options:(nullable NSDictionary<NSString *, id> *)options;
@@ -737,7 +755,12 @@
@result An instance of AVFragmentedMovieTrack; may be nil if no track of the specified trackID is available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (nullable AVFragmentedMovieTrack *)trackWithTrackID:(CMPersistentTrackID)trackID;
+- (nullable AVFragmentedMovieTrack *)trackWithTrackID:(CMPersistentTrackID)trackID
+#if __swift__
+API_DEPRECATED("Use loadTrack(withTrackID:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTrackWithTrackID:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTrackWithTrackID:completionHandler:
@@ -757,7 +780,12 @@
@result An NSArray of AVFragmentedMovieTracks; may be empty if no tracks of the specified media type are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVFragmentedMovieTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
+- (NSArray<AVFragmentedMovieTrack *> *)tracksWithMediaType:(AVMediaType)mediaType
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaType:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("loadTracksWithMediaType:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaType:completionHandler:
@@ -777,7 +805,12 @@
@result An NSArray of AVFragmentedMovieTracks; may be empty if no tracks with the specified characteristic are available.
@discussion Becomes callable without blocking when the key @"tracks" has been loaded
*/
-- (NSArray<AVFragmentedMovieTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (NSArray<AVFragmentedMovieTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic
+#if __swift__
+API_DEPRECATED("Use loadTracks(withMediaCharacteristic:) instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED("loadTracksWithMediaCharacteristic:completionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method loadTracksWithMediaCharacteristic:completionHandler:
@@ -853,6 +886,47 @@
@end
+/*!
+ @category AVMutableMovie (SynchronousAssetInterface)
+ @abstract Redeclarations of async-only AVAsset interfaces to allow synchronous usage in the synchronous subclass.
+ @discussion
+ See AVAsset's interface for more information about these interfaces.
+ */
+@interface AVMutableMovie (SynchronousAssetInterface)
+
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<AVMetadataKey> *)commonKeys;
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsBestMatchingPreferredLanguages:(NSArray<NSString *> *)preferredLanguages;
+- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (CMPersistentTrackID)unusedTrackID;
+
+#if __swift__
+@property (nonatomic, readonly) CMTime duration;
+@property (nonatomic, readonly) CMTime minimumTimeOffsetFromLive API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0), watchos(6.0));
+@property (nonatomic, readonly) BOOL providesPreciseDurationAndTiming;
+@property (nonatomic, readonly) NSArray<AVAssetTrackGroup *> *trackGroups API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, nullable) AVMetadataItem *creationDate API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, nullable) NSString *lyrics;
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
+@property (readonly) NSArray<NSLocale *> *availableChapterLocales API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly) NSArray<AVMediaCharacteristic> *availableMediaCharacteristicsWithMediaSelectionOptions API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));;
+@property (nonatomic, readonly) AVMediaSelection *preferredMediaSelection API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0), watchos(2.0));
+@property (nonatomic, readonly) NSArray<AVMediaSelection *> *allMediaSelections API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0), watchos(4.0));
+@property (nonatomic, readonly) BOOL hasProtectedContent API_AVAILABLE(macos(10.7), ios(4.2), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) BOOL canContainFragments API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) BOOL containsFragments API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) CMTime overallDurationHint API_AVAILABLE(macos(10.12.2), ios(10.2), tvos(10.2), watchos(3.2));
+@property (nonatomic, readonly, getter=isPlayable) BOOL playable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isExportable) BOOL exportable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, getter=isReadable) BOOL readable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, getter=isComposable) BOOL composable API_AVAILABLE(macos(10.7), ios(4.3), tvos(9.0), watchos(1.0));
+@property (nonatomic, readonly, getter=isCompatibleWithSavedPhotosAlbum) BOOL compatibleWithSavedPhotosAlbum API_AVAILABLE(ios(5.0), tvos(9.0)) API_UNAVAILABLE(macos, watchos);
+@property (nonatomic, readonly, getter=isCompatibleWithAirPlayVideo) BOOL compatibleWithAirPlayVideo API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+#endif // __swift__
+
+@end
+
NS_ASSUME_NONNULL_END
#else
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovieTrack.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovieTrack.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovieTrack.h 2022-02-23 07:16:15.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovieTrack.h 2022-05-31 15:03:40.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2009-2019 Apple Inc. All rights reserved.
+ Copyright 2009-2022 Apple Inc. All rights reserved.
*/
@@ -331,6 +331,7 @@
An AVMovieTrack object that is to be associated with the receiver.
@param trackAssociationType
The type of track association to add between the receiver and the specified movieTrack (for instance, AVTrackAssociationTypeChapterList).
+ @discussion This method throws an exception if the movie track belongs to a different movie.
*/
- (void)addTrackAssociationToTrack:(AVMovieTrack *)movieTrack type:(AVTrackAssociationType)trackAssociationType;
@@ -341,6 +342,7 @@
An AVMovieTrack object that is associated with the receiver.
@param trackAssociationType
The type of track association to remove between the receiver and the specified movieTrack (for instance, AVTrackAssociationTypeChapterList).
+ @discussion This method throws an exception if the movie track belongs to a different movie.
*/
- (void)removeTrackAssociationToTrack:(AVMovieTrack *)movieTrack type:(AVTrackAssociationType)trackAssociationType;
@@ -357,7 +359,7 @@
A CMFormatDescription to replace the specified format description.
@discussion You can use this method to make surgical changes to a track's format descriptions, such as adding format description extensions to a format description or changing the audio channel layout of an audio track. You should note that a format description can have extensions of type kCMFormatDescriptionExtension_VerbatimSampleDescription and kCMFormatDescriptionExtension_VerbatimISOSampleEntry; if you modify a copy of a format description, you should delete those extensions from the copy or your changes might be ignored.
- An NSInvalidArgumentException will be thrown if the media type of the new format description does not match the media type of the receiver.
+ This method throws an exception if the media type of the new format description does not match the media type of the receiver.
*/
- (void)replaceFormatDescription:(CMFormatDescriptionRef)formatDescription withFormatDescription:(CMFormatDescriptionRef)newFormatDescription API_AVAILABLE(macos(10.13), ios(13.0), watchos(6.0)) API_UNAVAILABLE(tvos);
@@ -396,6 +398,11 @@
using CMTimeRangeGetEnd on each to calculate the media TimeRange for -insertMediaTimeRange:intoTimeRange:.
It's safe for multiple threads to call this method on different tracks at once.
+
+ This method throws an exception for any of the following reasons:
+ - the sample buffer's media type does not match the track's media type
+ - the sample buffer contains image buffers (must contain encoded video)
+ - the sample buffer contains caption groups (must contain encoded media data)
*/
- (BOOL)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer decodeTime:(nullable CMTime *)outDecodeTime presentationTime:(nullable CMTime *)outPresentationTime error:(NSError * _Nullable * _Nullable)outError API_AVAILABLE(macos(10.12), ios(13.0), watchos(6.0)) API_UNAVAILABLE(tvos);
@@ -455,6 +462,42 @@
@end
+/*!
+ @category AVMutableMovieTrack (SynchronousTrackInterface)
+ @abstract Redeclarations of async-only AVAssetTrack interfaces to allow synchronous usage in the synchronous subclass.
+ @discussion
+ See AVAssetTrack's interface for more information about these interfaces.
+ */
+@interface AVMutableMovieTrack (SynchronousTrackInterface)
+
+- (BOOL)hasMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
+- (nullable AVAssetTrackSegment *)segmentForTrackTime:(CMTime)trackTime;
+- (CMTime)samplePresentationTimeForTrackTime:(CMTime)trackTime;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
+- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(AVTrackAssociationType)trackAssociationType API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+
+#if __swift__
+@property (nonatomic, readonly) NSArray *formatDescriptions;
+@property (nonatomic, readonly, getter=isPlayable) BOOL playable API_AVAILABLE(macos(10.8), ios(5.0), watchos(1.0)) API_UNAVAILABLE(tvos);
+@property (nonatomic, readonly, getter=isDecodable) BOOL decodable API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0)) API_UNAVAILABLE(tvos);
+@property (nonatomic, readonly, getter=isSelfContained) BOOL selfContained;
+@property (nonatomic, readonly) long long totalSampleDataLength;
+@property (nonatomic, readonly) CMTimeRange timeRange;
+@property (nonatomic, readonly) CMTimeScale naturalTimeScale;
+@property (nonatomic, readonly) float estimatedDataRate;
+@property (nonatomic, readonly) BOOL hasAudioSampleDependencies API_AVAILABLE(macos(10.15), ios(13.0), watchos(6.0)) API_UNAVAILABLE(tvos);
+@property (nonatomic, readonly) float nominalFrameRate;
+@property (nonatomic, readonly) CMTime minFrameDuration API_AVAILABLE(macos(10.10), ios(7.0), watchos(1.0)) API_UNAVAILABLE(tvos);
+@property (nonatomic, readonly) BOOL requiresFrameReordering API_AVAILABLE(macos(10.10), ios(8.0), watchos(1.0)) API_UNAVAILABLE(tvos);
+@property (nonatomic, copy, readonly) NSArray<AVAssetTrackSegment *> *segments;
+@property (nonatomic, readonly) NSArray<AVMetadataItem *> *commonMetadata;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVTrackAssociationType> *availableTrackAssociationTypes API_AVAILABLE(macos(10.9), ios(7.0), watchos(1.0)) API_UNAVAILABLE(tvos);
+@property (nonatomic, readonly) BOOL canProvideSampleCursors API_AVAILABLE(macos(10.10), ios(16.0), watchos(9.0)) API_UNAVAILABLE(tvos);
+#endif // __swift__
+
+@end
+
NS_ASSUME_NONNULL_END
#else
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVOutputSettingsAssistant.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVOutputSettingsAssistant.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVOutputSettingsAssistant.h 2022-02-23 07:16:14.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVOutputSettingsAssistant.h 2022-05-31 14:52:20.000000000 -0400
@@ -103,7 +103,7 @@
@abstract
A UTI indicating the type of file to be written, to be used when e.g. creating an instance of AVAssetWriter
@discussion
- Use UTTypeCopyPreferredTagWithClass / kUTTagClassFilenameExtension to get a suitable file extension for a given file type.
+ Use [[UTType typeWithIdentifier:outputFileType] preferredFilenameExtension] to get a suitable file extension for a given file type.
*/
@property (nonatomic, readonly) AVFileType outputFileType;
@@ -111,6 +111,7 @@
// Use these properties to give more information about the attributes of your source data, in order to get more informed recommendations
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVOutputSettingsAssistant (AVOutputSettingsAssistant_SourceInformation)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlaybackCoordinator.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlaybackCoordinator.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlaybackCoordinator.h 2022-02-23 10:57:35.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlaybackCoordinator.h 2022-06-03 18:07:14.000000000 -0400
@@ -196,6 +196,7 @@
@category AVPlaybackCoordinator (AVCoordinatedPlaybackPolicies)
@abstract Policies used by AVPlaybackCoordinator to determine how to interact with the group.
*/
+API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos)
@interface AVPlaybackCoordinator (AVCoordinatedPlaybackPolicies)
/**
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayer.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayer.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayer.h 2022-02-23 07:10:15.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayer.h 2022-05-31 15:02:35.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -104,6 +104,8 @@
@param item
@result An instance of AVPlayer
@discussion Useful in order to play items for which an AVAsset has previously been created. See -[AVPlayerItem initWithAsset:].
+ This method throws an exception if the item is not an AVPlayerItem, or if the item is
+ associated with another AVPlayer.
*/
- (instancetype)initWithPlayerItem:(nullable AVPlayerItem *)item;
@@ -173,27 +175,50 @@
The effective rate of playback may differ from the desired rate even while timeControlStatus is AVPlayerTimeControlStatusPlaying, if the processing algorithm in use for managing audio pitch requires quantization of playback rate. For information about quantization of rates for audio processing, see AVAudioProcessingSettings.h. You can always obtain the effective rate of playback from the currentItem's timebase; see the timebase property of AVPlayerItem.
- This property must be accessed on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic) float rate NS_SWIFT_UI_ACTOR;
+@property (nonatomic) float rate
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+;
/*!
- @method play
- @abstract Signals the desire to begin playback at the current item's natural rate.
- @discussion Equivalent to setting the value of rate to 1.0.
+ @property defaultRate
+ @abstract Indicates the rate at which to start playback when play is called; defaults to 1.0.
+ @discussion
+ Setting this property does not imply playback starts automatically at this rate. Clients still have to kick off playback using `play`. Note that using setRate to start playback will skip using the value in this property nor would it update this property. Therefore, `setRate:1.0` is no longer recommended as a means to start playback. Use `play` instead. Use `setRate` for operations like scanning where the rate is to be updated instantaneously. Invoking `play` again would restore playback at the rate set in this property.
- This method must be invoked on the main thread/queue.
+ The effective rate of playback may still differ from the default rate subject to restrictions imposed by the system. See documentation for the rate property for a discussion on when the desired rate does not translate to effective rate.
+
+ */
+@property (nonatomic) float defaultRate API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @method play
+ @abstract Signals the desire to begin playback at the rate set in the defaultRate.
+ @discussion For releases up to iOS version 16.0, macOS versions 13.0, tvOS 16.0 and watchOS 9.0, this is equivalent to setting the value of rate to `1.0`. Starting from iOS version 16.0, macOS versions 13.0, tvOS 16.0 and watchOS 9.0, this will attempt to use the rate set in the `defaultRate` property. The effective rate of playback may differ from the `defaultRate` due to the reasons mentioned in the documentation of the `rate` property. Clients interested in knowing the effective rate can listen for `AVPlayerRateDidChangeNotification` notification.
+
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
*/
-- (void)play NS_SWIFT_UI_ACTOR;
+- (void)play
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+;
/*!
@method pause
@abstract Pauses playback.
@discussion Equivalent to setting the value of rate to 0.0.
- This method must be invoked on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
*/
-- (void)pause NS_SWIFT_UI_ACTOR;
+- (void)pause
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+;
/*!
@enum AVPlayerTimeControlStatus
@@ -286,9 +311,13 @@
When the player's currentItem has a value of NO for playbackBufferEmpty, this method causes the value of rate to change to the specified rate, the value of timeControlStatus to change to AVPlayerTimeControlStatusPlaying, and the receiver to play the available media immediately, whether or not prior buffering of media data is sufficient to ensure smooth playback.
If insufficient media data is buffered for playback to start (e.g. if the current item has a value of YES for playbackBufferEmpty), the receiver will act as if the buffer became empty during playback, except that no AVPlayerItemPlaybackStalledNotification will be posted.
- This method must be invoked on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
*/
-- (void)playImmediatelyAtRate:(float)rate NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.12), ios(10.0), tvos(10.0), watchos(3.0));
+- (void)playImmediatelyAtRate:(float)rate
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.12), ios(10.0), tvos(10.0), watchos(3.0));
@end
@@ -305,6 +334,7 @@
The AVPlayerItem that will become the player's current item.
@discussion
In all releases of iOS 4, invoking replaceCurrentItemWithPlayerItem: with an AVPlayerItem that's already the receiver's currentItem results in an exception being raised. Starting with iOS 5, it's a no-op.
+ This method throws an exception if the item already exists in the play queue.
*/
- (void)replaceCurrentItemWithPlayerItem:(nullable AVPlayerItem *)item;
@@ -332,7 +362,12 @@
AVPlayerActionAtItemEndNone = 2,
};
-/* indicates the action that the player should perform when playback of an item reaches its end time */
+/* */
+/*!
+ @property actionAtItemEnd
+ @abstract Indicates the action that the player should perform when playback of an item reaches its end time.
+ @discussion This property throws an exception if set to AVPlayerActionAtItemEndAdvance on an AVPlayer which is not an AVQueuePlayer.
+*/
@property AVPlayerActionAtItemEnd actionAtItemEnd;
@end
@@ -445,10 +480,14 @@
You can allow the value of automaticallyWaitsToMinimizeStalling to remain YES if you use an AVAssetResourceLoader delegate to manage content keys for FairPlay Streaming, to provide dynamically-generated master playlists for HTTP Live Streaming, or to respond to authentication challenges, but not to load media data for playback.
- This property must be accessed on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic) BOOL automaticallyWaitsToMinimizeStalling NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.12), ios(10.0), tvos(10.0), watchos(3.0));
+@property (nonatomic) BOOL automaticallyWaitsToMinimizeStalling
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.12), ios(10.0), tvos(10.0), watchos(3.0));
@@ -461,6 +500,9 @@
In other words: if hostClockTime is in the past, the timebase's time will be interpolated as though the timebase has been running at the requested rate since that time. If hostClockTime is in the future, the timebase will immediately start running at the requested rate from an earlier time so that it will reach the requested itemTime at the requested hostClockTime. (Note that the item's time will not jump backwards, but instead will sit at itemTime until the timebase reaches that time.)
Note that setRate:time:atHostTime: is not supported when automaticallyWaitsToMinimizeStalling is YES. For clients linked against iOS 10.0 and later or OS X 12.0 and later, invoking setRate:time:atHostTime: when automaticallyWaitsToMinimizeStalling is YES will raise an NSInvalidArgument exception. Support for HTTP Live Streaming content requires iOS 11, tvOS 11, macOS 10.13 or later.
+
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
+
@param itemTime The time to start playback from, specified precisely (i.e., with zero tolerance).
Pass kCMTimeInvalid to use the current item's current time.
@param hostClockTime
@@ -468,17 +510,20 @@
If hostClockTime is specified, the player will not ensure that media data is loaded before the timebase starts moving.
If hostClockTime is kCMTimeInvalid, the rate and time will be set together, but without external synchronization;
a host time in the near future will be used, allowing some time for media data loading.
-
- This method must be invoked on the main thread/queue.
*/
-- (void)setRate:(float)rate time:(CMTime)itemTime atHostTime:(CMTime)hostClockTime NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.8), ios(6.0), tvos(9.0), watchos(1.0));
+- (void)setRate:(float)rate time:(CMTime)itemTime atHostTime:(CMTime)hostClockTime
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.8), ios(6.0), tvos(9.0), watchos(1.0));
/*!
@method prerollAtRate:completionHandler:
@abstract Begins loading media data to prime the render pipelines for playback from the current time with the given rate.
@discussion Once the completion handler is called with YES, the player's rate can be set with minimal latency.
- The completion handler will be called with NO if the preroll is interrupted by a time change or incompatible rate change, or if preroll is not possible for some other reason.
- Call this method only when the rate is currently zero and only after the AVPlayer's status has become AVPlayerStatusReadyToPlay.
+ The completion handler will be called with NO if the preroll is interrupted by a time change or incompatible rate change, or if preroll is not possible for some other reason.
+ Call this method only when the rate is currently zero and only after the AVPlayer's status has become AVPlayerStatusReadyToPlay.
+ This method throws an exception if the status is not AVPlayerStatusReadyToPlay.
@param rate The intended rate for subsequent playback.
@param completionHandler
The block that will be called when the preroll is either completed or is interrupted.
@@ -556,7 +601,12 @@
-removeTimeObserver: to wait for any in-flight blocks to finish executing.
-removeTimeObserver: should be used to explicitly cancel each time observer added using -addPeriodicTimeObserverForInterval:queue:usingBlock:
and -addBoundaryTimeObserverForTimes:queue:usingBlock:.
-*/
+
+ This method throws an exception for any of the following reasons:
+ - observer was added by a different instance of AVPlayer
+ - observer was not returned by -addPeriodicTimeObserverForInterval:queue:usingBlock:
+ - observer was not returned by -addBoundaryTimeObserverForTimes:queue:usingBlock:
+ */
- (void)removeTimeObserver:(id)observer;
@end
@@ -602,9 +652,13 @@
Specific selections made by -[AVPlayerItem selectMediaOption:inMediaSelectionGroup:] within any group will override automatic selection in that group until -[AVPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:] is received.
- This method must be invoked on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
*/
-- (void)setMediaSelectionCriteria:(nullable AVPlayerMediaSelectionCriteria *)criteria forMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+- (void)setMediaSelectionCriteria:(nullable AVPlayerMediaSelectionCriteria *)criteria forMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
/*!
@method mediaSelectionCriteriaForMediaCharacteristic:
@@ -612,9 +666,13 @@
@param mediaCharacteristic
The media characteristic for which the selection criteria is to be returned. Supported values include AVMediaCharacteristicAudible, AVMediaCharacteristicLegible, and AVMediaCharacteristicVisual.
- This method must be invoked on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
*/
-- (nullable AVPlayerMediaSelectionCriteria *)mediaSelectionCriteriaForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+- (nullable AVPlayerMediaSelectionCriteria *)mediaSelectionCriteriaForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
@end
@@ -805,10 +863,14 @@
@discussion
Default is YES on iOS, tvOS and in Mac Catalyst apps. Default is NO on macOS.
Setting this property to NO does not force the display to sleep, it simply stops preventing display sleep. Other apps or frameworks within your app may still be preventing display sleep for various reasons.
-
- This property must be accessed on the main thread/queue.
+
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic) BOOL preventsDisplaySleepDuringVideoPlayback NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.14), ios(12.0), tvos(12.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic) BOOL preventsDisplaySleepDuringVideoPlayback
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.14), ios(12.0), tvos(12.0)) API_UNAVAILABLE(watchos);
@end
@@ -928,6 +990,8 @@
An NSArray of AVPlayerItems with which to populate the player's queue initially.
@result
An instance of AVQueuePlayer.
+ @discussion
+ This method throws an exception if items contains duplicated values or values associated with another AVPlayer.
*/
- (AVQueuePlayer *)initWithItems:(NSArray<AVPlayerItem *> *)items;
@@ -966,6 +1030,8 @@
The item to be inserted.
@param afterItem
The item that the newly inserted item should follow in the queue. Pass nil to append the item to the queue.
+ @discussion
+ This method throws an exception if item already exists in the queue.
*/
- (void)insertItem:(AVPlayerItem *)item afterItem:(nullable AVPlayerItem *)afterItem;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerInterstitialEventController.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerInterstitialEventController.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerInterstitialEventController.h 2022-02-23 07:56:28.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerInterstitialEventController.h 2022-05-31 15:03:40.000000000 -0400
@@ -33,6 +33,22 @@
} NS_SWIFT_NAME(AVPlayerInterstitialEvent.Restrictions) API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0));
/*!
+ @enum AVPlayerInterstitialEventCue
+ @abstract A particular cue can be specified when creating AVPlayerInterstitialEvents to override the start time/date to a predefined position.
+
+ @constant AVPlayerInterstitialEventNoCue
+ No cue specified; event playback should start at event time (or date).
+ @constant AVPlayerInterstitialEventJoinCue
+ Event playback should occur before starting primary playback, regardless of initial primary playback position.
+ @constant AVPlayerInterstitialEventLeaveCue
+ Event playback should occur after primary playback ends without error, either at the end of the primary asset or at the client-specified forward playback end time.
+*/
+typedef NSString * AVPlayerInterstitialEventCue NS_TYPED_ENUM NS_SWIFT_NAME(AVPlayerInterstitialEvent.Cue) API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+AVF_EXPORT AVPlayerInterstitialEventCue const AVPlayerInterstitialEventNoCue API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+AVF_EXPORT AVPlayerInterstitialEventCue const AVPlayerInterstitialEventJoinCue API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+AVF_EXPORT AVPlayerInterstitialEventCue const AVPlayerInterstitialEventLeaveCue API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
@class AVPlayerInterstitialEvent
@abstract
@@ -47,9 +63,10 @@
If you wish to observe the scheduling and progress of interstitial events, use an AVPlayerInterstitialEventMonitor. If you wish to specify your own schedule of interstitial events, use an AVPlayerInterstitialEventController.
+ Note that while previously AVPlayerInterstitialEvent was an immutable object, it is now mutable. This allows it to be created and customized before being set on an AVPlayerInterstitialEventController.
*/
API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0))
-@interface AVPlayerInterstitialEvent : NSObject
+@interface AVPlayerInterstitialEvent : NSObject <NSCopying>
AV_INIT_UNAVAILABLE
/*!
@@ -62,7 +79,7 @@
@param time
The time within the duration of the primary item at which playback of the primary content should be temporarily suspended and the interstitial items played.
@param templateItems
- An array of AVPlayerItems with configurations that will be reproduced for the playback of interstitial content.
+ An array of AVPlayerItems with configurations that will be reproduced for the playback of interstitial content. An NSInvalidArgumentException will be raised if any of the template items employs an AVAsset that lacks a URL, such as an AVComposition.
@param restrictions
Indicates restrictions on the use of end user playback controls that are imposed by the event.
@param resumptionOffset
@@ -73,7 +90,7 @@
Storage for attributes defined by the client or the content vendor. Attribute names should begin with X- for uniformity with server insertion.
@result An instance of AVPlayerInterstitialEvent.
*/
-+ (instancetype)interstitialEventWithPrimaryItem:(AVPlayerItem *)primaryItem identifier:(nullable NSString *)identifier time:(CMTime)time templateItems:(NSArray<AVPlayerItem *> *)templateItems restrictions:(AVPlayerInterstitialEventRestrictions)restrictions resumptionOffset:(CMTime)resumptionOffset playoutLimit:(CMTime)playoutLimit userDefinedAttributes:(nullable NSDictionary*)userDefinedAttributes NS_REFINED_FOR_SWIFT;
++ (instancetype)interstitialEventWithPrimaryItem:(AVPlayerItem *)primaryItem identifier:(nullable NSString *)identifier time:(CMTime)time templateItems:(NSArray<AVPlayerItem *> *)templateItems restrictions:(AVPlayerInterstitialEventRestrictions)restrictions resumptionOffset:(CMTime)resumptionOffset playoutLimit:(CMTime)playoutLimit userDefinedAttributes:(nullable NSDictionary*)userDefinedAttributes NS_REFINED_FOR_SWIFT API_DEPRECATED_WITH_REPLACEMENT("interstitialEventWithPrimaryItem:time:", macos(12.0, API_TO_BE_DEPRECATED), ios(15.0, API_TO_BE_DEPRECATED), tvos(15.0, API_TO_BE_DEPRECATED), watchos(8.0, API_TO_BE_DEPRECATED));
/*!
@method interstitialEventWithPrimaryItem:date:templateItems:restrictions:resumptionOffset:
@@ -85,7 +102,7 @@
@param date
The date within the date range of the primary item at which playback of the primary content should be temporarily suspended and the interstitial items played.
@param templateItems
- An array of AVPlayerItems with configurations that will be reproduced for the playback of interstitial content.
+ An array of AVPlayerItems with configurations that will be reproduced for the playback of interstitial content. An NSInvalidArgumentException will be raised if any of the template items employs an AVAsset that lacks a URL, such as an AVComposition.
@param restrictions
Indicates restrictions on the use of end user playback controls that are imposed by the event.
@param resumptionOffset
@@ -96,7 +113,29 @@
Storage for attributes defined by the client or the content vendor. Attribute names should begin with X- for uniformity with server insertion.
@result An instance of AVPlayerInterstitialEvent.
*/
-+ (instancetype)interstitialEventWithPrimaryItem:(AVPlayerItem *)primaryItem identifier:(nullable NSString *)identifier date:(NSDate *)date templateItems:(NSArray<AVPlayerItem *> *)templateItems restrictions:(AVPlayerInterstitialEventRestrictions)restrictions resumptionOffset:(CMTime)resumptionOffset playoutLimit:(CMTime)playoutLimit userDefinedAttributes:(nullable NSDictionary*)userDefinedAttributes NS_REFINED_FOR_SWIFT;
++ (instancetype)interstitialEventWithPrimaryItem:(AVPlayerItem *)primaryItem identifier:(nullable NSString *)identifier date:(NSDate *)date templateItems:(NSArray<AVPlayerItem *> *)templateItems restrictions:(AVPlayerInterstitialEventRestrictions)restrictions resumptionOffset:(CMTime)resumptionOffset playoutLimit:(CMTime)playoutLimit userDefinedAttributes:(nullable NSDictionary*)userDefinedAttributes NS_REFINED_FOR_SWIFT API_DEPRECATED_WITH_REPLACEMENT("interstitialEventWithPrimaryItem:date:", macos(12.0, API_TO_BE_DEPRECATED), ios(15.0, API_TO_BE_DEPRECATED), tvos(15.0, API_TO_BE_DEPRECATED), watchos(8.0, API_TO_BE_DEPRECATED));
+
+/*!
+ @method interstitialEventWithPrimaryItem:time:
+ @abstract Returns an instance of AVPlayerInterstitialEvent for use in scheduling interstitial playback.
+ @param primaryItem
+ An AVPlayerItem representing the primary content during the playback of which the interstitial event should occur. The primaryItem must have an AVAsset that provides an intrinsic mapping from its timeline to real-time dates.
+ @param time
+ The time within the duration of the primary item at which playback of the primary content should be temporarily suspended and the interstitial items played.
+ @result An instance of AVPlayerInterstitialEvent.
+*/
++ (instancetype)interstitialEventWithPrimaryItem:(AVPlayerItem *)primaryItem time:(CMTime)time API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @method interstitialEventWithPrimaryItem:date:
+ @abstract Returns an instance of AVPlayerInterstitialEvent for use in scheduling interstitial playback.
+ @param primaryItem
+ An AVPlayerItem representing the primary content during the playback of which the interstitial event should occur. The primaryItem must have an AVAsset that provides an intrinsic mapping from its timeline to real-time dates.
+ @param date
+ The date within the date range of the primary item at which playback of the primary content should be temporarily suspended and the interstitial items played.
+ @result An instance of AVPlayerInterstitialEvent.
+*/
++ (instancetype)interstitialEventWithPrimaryItem:(AVPlayerItem *)primaryItem date:(NSDate *)date API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
/*!
@property primaryItem
@@ -128,6 +167,10 @@
/*!
@property templateItems
@abstract An array of AVPlayerItems with configurations that will be reproduced for the playback of interstitial content.
+ @discussion
+ If you want the instances of AVURLAsset used during interstitial playback to be identical to the ones you specify for templateItems in AVPlayerInterstitialEvents that you set on an AVPlayerInterstitialEventController, rather than equivalent AVURLAssets with the same URL, you must create them with a value for the key AVURLAssetPrimarySessionIdentifierKey that's equal to the httpSessionIdentifier of the primary AVPlayerItem's asset. See AVAsset.h. This is especially useful if you require the use of a custom AVAssetResourceLoader delegate for interstitial assets.
+
+ An NSInvalidArgumentException will be raised if any of the template items employs an AVAsset that lacks a URL, such as an AVComposition.
*/
@property (nonatomic, readonly) NSArray<AVPlayerItem *> *templateItems;
@@ -154,6 +197,36 @@
@property (nonatomic, readonly) CMTime playoutLimit;
/*!
+ @property alignsStartWithPrimarySegmentBoundary
+ @abstract Specifies that the start time of interstitial playback should be snapped to a segment boundary of the primary asset
+ @discussion
+ If true, the start time or date of the interstitial will be adjusted to the nearest segment boundary when the primary player is playing an HTTP Live Streaming asset.
+*/
+@property (nonatomic, readonly) BOOL alignsStartWithPrimarySegmentBoundary API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @property alignsResumptionWithPrimarySegmentBoundary
+ @abstract Specifies that the resumption time of primary playback should be snapped to a segment boundary of the primary asset
+ @discussion
+ If true, the resumption time of primary playback following an interstitial will be adjusted to the nearest segment boundary when the primary player is playing an HTTP Live Streaming asset.
+*/
+@property (nonatomic, readonly) BOOL alignsResumptionWithPrimarySegmentBoundary API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @property cue
+ @abstract The cue property is used to schedule event playback at a predefined position of primary playback.
+*/
+@property (nonatomic, readonly) AVPlayerInterstitialEventCue cue API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
+ @property willPlayOnce
+ @abstract Specifies that the interstitial should be scheduled for playback once only, and suppressed for subsequent replay.
+ @discussion
+ The "once" provision takes effect at the start of interstitial playback. The interstitial will not be scheduled again even if the first playback is canceled before completion.
+*/
+@property (nonatomic, readonly) BOOL willPlayOnce API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+
+/*!
@property userDefinedAttributes
@abstract Attributes of the event defined by the content vendor or the client.
@discussion
@@ -163,6 +236,22 @@
@end
+@interface AVPlayerInterstitialEvent (MutableEvents)
+@property (nonatomic, readwrite, weak) AVPlayerItem *primaryItem API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite, copy) NSString *identifier API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite) CMTime time API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite, copy, nullable) NSDate *date API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite, copy) NSArray<AVPlayerItem *> *templateItems API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite) AVPlayerInterstitialEventRestrictions restrictions API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite) CMTime resumptionOffset API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite) CMTime playoutLimit API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite) BOOL alignsStartWithPrimarySegmentBoundary API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite) BOOL alignsResumptionWithPrimarySegmentBoundary API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite, retain) AVPlayerInterstitialEventCue cue API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite) BOOL willPlayOnce API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@property (nonatomic, readwrite, copy) NSDictionary *userDefinedAttributes API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
+@end
+
/*!
@class AVPlayerInterstitialEventMonitor
@@ -205,8 +294,9 @@
@discussion
When interstitial events follow a schedule specified intrinsically within the content of primary items, the value of this property will typically change whenever the currentItem of the primaryPlayer changes. For HLS content that specifies interstitials via the use of DATERANGE tags, the value of this property may also change whenever the set of DATERANGE tags in the currentItem's media playlist changes.
When interstitial events follow a schedule specified via use of an AVPlayerInterstitialEventController, the value of this property changes only when a new schedule is set on the AVPlayerInterstitialEventController.
+ The events returned in this array are immutable. Attempting to mutate them will trigger an exception. To alter an event, make a copy and mutate the copy.
*/
-@property (readonly) NSArray<AVPlayerInterstitialEvent *> *events;
+@property (readonly, copy) NSArray<AVPlayerInterstitialEvent *> *events;
/*!
@property currentEvent
@@ -249,9 +339,14 @@
@param primaryPlayer
The AVPlayer that will play the primaryItems of the receiver's interstitial events.
@result An instance of AVPlayerInterstitialEventController.
+ @discussion This method throws an exception if the primary player is an interstitial player.
*/
+ (instancetype)interstitialEventControllerWithPrimaryPlayer:(AVPlayer *)primaryPlayer;
+/*!
+ @method initWithPrimaryPlayer:
+ @discussion This method throws an exception if the primary player is an interstitial player.
+ */
- (instancetype)initWithPrimaryPlayer:(AVPlayer *)primaryPlayer;
/*!
@@ -265,6 +360,10 @@
If interstitial events are scheduled with dates that coincide either with the date of another scheduled interstitial event or with the date range of the primary content that's omitted according to the resumption offset of another scheduled interstitial event, the primary content will remain suspended until all coinciding interstitial events have been completed. The effective resumption offset will be the sum of the resumption offsets of the coinciding interstitial events. (Note that the sum of a numeric CMTime and kCMTimeIndefinite is kCMTimeIndefinite.)
If interstitial events are scheduled for the same date, they are ordered according to their position in the events array.
+
+ The receiver will make a copy of the events that are set on it. Subsequent mutations on the original events will have no effect on the copy.
+
+ An NSInvalidArgumentException will be raised if an under-specified AVPlayerInterstitialEvent is set, such as one with a nil primaryItem, or with neither a time nor a date.
*/
@property (copy, null_resettable) NSArray<AVPlayerInterstitialEvent *> *events;
@@ -299,9 +398,13 @@
/*!
@property automaticallyHandlesInterstitialEvents
@abstract Allows interstitials to be played according to a schedule that's specified by server-side directives. The default value is YES. A value of NO prevents automatic scheduling of future server-side interstitial events. Events specified by an AVPlayerInterstitialEventController override server-side events, regardless of the value of this property.
- @discussion This property must be accessed on the main thread/queue.
-*/
-@property (nonatomic) BOOL automaticallyHandlesInterstitialEvents NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0));
+ @discussion Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
+ */
+@property (nonatomic) BOOL automaticallyHandlesInterstitialEvents
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0), watchos(8.0));
/*!
@property templatePlayerItem
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItem.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItem.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItem.h 2022-02-23 07:10:13.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItem.h 2022-05-31 14:52:20.000000000 -0400
@@ -160,8 +160,16 @@
*/
- (instancetype)initWithAsset:(AVAsset *)asset automaticallyLoadedAssetKeys:(nullable NSArray<NSString *> *)automaticallyLoadedAssetKeys NS_DESIGNATED_INITIALIZER API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
-- (id)copyWithZone:(nullable NSZone *)zone NS_SWIFT_UI_ACTOR;
-- (id)copy NS_SWIFT_UI_ACTOR;
+- (id)copyWithZone:(nullable NSZone *)zone
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+;
+- (id)copy
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+;
/*!
@property status
@@ -384,6 +392,8 @@
set to NO. If the new request completes without being interrupted by another seek request or by any other operation the specified
completion handler will be invoked with the finished parameter set to YES.
If the seek time is outside of seekable time ranges as indicated by seekableTimeRanges property, the seek request will be cancelled and the completion handler will be invoked with the finished parameter set to NO.
+
+ This method throws an exception if time is invalid or indefinite.
*/
- (void)seekToTime:(CMTime)time completionHandler:(void (^_Nullable)(BOOL finished))completionHandler API_AVAILABLE(macos(10.7), ios(5.0), tvos(9.0), watchos(1.0));
@@ -402,6 +412,8 @@
request completes without being interrupted by another seek request or by any other operation the specified completion handler will be invoked with the
finished parameter set to YES.
If the seek time is outside of seekable time ranges as indicated by seekableTimeRanges property, the seek request will be cancelled and the completion handler will be invoked with the finished parameter set to NO.
+
+ This method throws an exception if time is invalid or indefinite or if tolerance before or tolerance after is invalid or negative.
*/
- (void)seekToTime:(CMTime)time toleranceBefore:(CMTime)toleranceBefore toleranceAfter:(CMTime)toleranceAfter completionHandler:(void (^_Nullable)(BOOL finished))completionHandler API_AVAILABLE(macos(10.7), ios(5.0), tvos(9.0), watchos(1.0));
@@ -467,10 +479,18 @@
/*!
@property videoComposition
@abstract Indicates the video composition settings to be applied during playback.
- @discussion This property must be accessed on the main thread/queue.
-
- */
-@property (nonatomic, copy, nullable) AVVideoComposition *videoComposition NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+ @discussion Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
+
+ This property throws an exception if a video composition is set with any of the following values:
+ - renderSize, renderScale, or frameDuration is less than or equal to zero
+ - sourceTrackIDForFrameTiming is less than or equal to zero
+ - uses AVVideoCompositionCoreAnimationTool (works for offline rendering only)
+ */
+@property (nonatomic, copy, nullable) AVVideoComposition *videoComposition
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos);
/*!
@property customVideoCompositor
@@ -479,9 +499,13 @@
This property is nil if there is no video compositor, or if the internal video compositor is in use. This reference can be used to provide
extra context to the custom video compositor instance if required.
- This property must be accessed on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic, readonly, nullable) id<AVVideoCompositing> customVideoCompositor NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly, nullable) id<AVVideoCompositing> customVideoCompositor
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
/*!
@property seekingWaitsForVideoCompositionRendering
@@ -729,9 +753,13 @@
all media selection options in the group.
Note that if multiple options within a group meet your criteria for selection according to locale or other considerations, and if these options are otherwise indistinguishable to you according to media characteristics that are meaningful for your application, content is typically authored so that the first available option that meets your criteria is appropriate for selection.
- This method must be invoked on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
*/
-- (void)selectMediaOption:(nullable AVMediaSelectionOption *)mediaSelectionOption inMediaSelectionGroup:(AVMediaSelectionGroup *)mediaSelectionGroup NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
+- (void)selectMediaOption:(nullable AVMediaSelectionOption *)mediaSelectionOption inMediaSelectionGroup:(AVMediaSelectionGroup *)mediaSelectionGroup
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0), watchos(1.0));
/*!
@method selectMediaOptionAutomaticallyInMediaSelectionGroup:
@@ -741,9 +769,13 @@
@discussion
Has no effect unless the appliesMediaSelectionCriteriaAutomatically property of the associated AVPlayer is YES and unless automatic media selection has previously been overridden via -[AVPlayerItem selectMediaOption:inMediaSelectionGroup:].
- This method must be invoked on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this method must be invoked on the main thread/queue.
*/
-- (void)selectMediaOptionAutomaticallyInMediaSelectionGroup:(AVMediaSelectionGroup *)mediaSelectionGroup NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
+- (void)selectMediaOptionAutomaticallyInMediaSelectionGroup:(AVMediaSelectionGroup *)mediaSelectionGroup
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0), watchos(1.0));
/*!
@property currentMediaSelection
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemOutput.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemOutput.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemOutput.h 2022-02-23 07:14:09.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemOutput.h 2022-05-31 15:03:41.000000000 -0400
@@ -173,6 +173,7 @@
@param pixelBufferAttributes
The client requirements for output CVPixelBuffers, expressed using the constants in <CoreVideo/CVPixelBuffer.h>.
@result An instance of AVPlayerItemVideoOutput.
+ @discussion This method throws an exception if the pixel buffer attributes contain keys that are not pixel buffer attribute keys.
*/
- (instancetype)initWithPixelBufferAttributes:(nullable NSDictionary<NSString *, id> *)pixelBufferAttributes NS_DESIGNATED_INITIALIZER;
@@ -190,6 +191,10 @@
AVVideoAllowWideColorKey
@result An instance of AVPlayerItemVideoOutput.
+ @discussion This method throws an exception for any of the following reasons:
+ - the output settings dictionary is empty
+ - the settings will yield compressed output
+ - the settings do not honor the requirements listed above for outputSettings
*/
- (instancetype)initWithOutputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings API_AVAILABLE(macos(10.12), ios(10.0), tvos(10.0)) API_UNAVAILABLE(watchos) NS_DESIGNATED_INITIALIZER;
@@ -344,7 +349,7 @@
@end
-
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVPlayerItemLegibleOutput (AVPlayerItemLegibleOutput_NativeRepresentation)
/*!
@@ -357,12 +362,14 @@
Add media subtype FourCC number objects to the subtypes array to elect to receive that type as a CMSampleBuffer instead of an NSAttributedString. Initializing an AVPlayerItemLegibleOutput using the -init method is equivalent to calling -initWithMediaSubtypesForNativeRepresentation: with an empty array, which means that all legible data, regardless of media subtype, will be delivered using NSAttributedString in a common format.
If a media subtype for which there is no legible data in the current player item is included in the media subtypes array, no error will occur. AVPlayerItemLegibleOutput will not vend closed caption data as CMSampleBuffers, so it is an error to include 'c608' in the media subtypes array.
+
+ This method throws an exception if any media subtype is kCMClosedCaptionFormatType_CEA608 (native representation is not available for media subtype).
*/
- (instancetype)initWithMediaSubtypesForNativeRepresentation:(NSArray<NSNumber *> *)subtypes;
@end
-
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos)
@interface AVPlayerItemLegibleOutput (AVPlayerItemLegibleOutput_TextStylingResolution)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemTrack.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemTrack.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemTrack.h 2022-02-23 07:16:14.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerItemTrack.h 2022-05-31 15:04:21.000000000 -0400
@@ -50,18 +50,26 @@
/*!
@property enabled
@abstract Indicates whether the track is enabled for presentation during playback.
- @discussion This property must be accessed on the main thread/queue.
+ @discussion Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic, assign, getter=isEnabled) BOOL enabled NS_SWIFT_UI_ACTOR;
+@property (nonatomic, assign, getter=isEnabled) BOOL enabled
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+;
/*!
@property currentVideoFrameRate
@abstract If the media type of the assetTrack is AVMediaTypeVideo, indicates the current frame rate of the track as it plays, in units of frames per second. If the item is not playing, or if the media type of the track is not video, the value of this property is 0.
@discussion This property is not observable.
- This property must be accessed on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic, readonly) float currentVideoFrameRate NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+@property (nonatomic, readonly) float currentVideoFrameRate
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
@@ -77,9 +85,13 @@
@discussion A value of nil indicates default processing of video frames. If you want video fields to be deinterlaced, set videoFieldMode to AVPlayerItemTrackVideoFieldModeDeinterlaceFields.
You can test whether video being played has multiple fields by examining the underlying AVAssetTrack's format descriptions. See -[AVAssetTrack formatDescriptions] and, for video format descriptions, kCMFormatDescriptionExtension_FieldCount.
- This property must be accessed on the main thread/queue.
+ Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic, copy, nullable) NSString *videoFieldMode NS_SWIFT_UI_ACTOR API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos);
+@property (nonatomic, copy, nullable) NSString *videoFieldMode
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR
+#endif
+API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos);
#endif
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h 2022-02-23 07:14:09.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h 2022-05-31 14:52:21.000000000 -0400
@@ -80,7 +80,7 @@
@property readyForDisplay
@abstract Boolean indicating that the first video frame has been made ready for display for the current item of the associated AVPlayer.
@discusssion Use this property as an indicator of when best to show or animate-in an AVPlayerLayer into view.
- An AVPlayerLayer may be displayed, or made visible, while this propoerty is NO, however the layer will not have any user-visible content until the value becomes YES. Note that if an animation is added to an AVPlayerLayer before it becomes readyForDisplay the video image displayed inside might not animate with the receiver.
+ An AVPlayerLayer may be displayed, or made visible, while this property is NO, however the layer will not have any user-visible content until the value becomes YES. Note that if an animation is added to an AVPlayerLayer before it becomes readyForDisplay the video image displayed inside might not animate with the receiver.
This property remains NO for an AVPlayer currentItem whose AVAsset contains no enabled video tracks.
*/
@property(nonatomic, readonly, getter=isReadyForDisplay) BOOL readyForDisplay;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVRouteDetector.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVRouteDetector.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVRouteDetector.h 2022-02-23 07:16:15.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVRouteDetector.h 2022-05-31 15:04:22.000000000 -0400
@@ -46,6 +46,14 @@
@property (readonly) BOOL multipleRoutesDetected;
/*!
+ @property detectsCustomRoutes
+ @abstract Whether or not route detection will include custom routes. The default value is NO.
+ @discussion
+ Only set this to YES if also using AVCustomRoutingController.
+ */
+@property (nonatomic) BOOL detectsCustomRoutes API_AVAILABLE(ios(16.0)) API_UNAVAILABLE(macos, tvos, watchos);
+
+/*!
@constant AVRouteDetectorMultipleRoutesDetectedDidChangeNotification
@abstract Posted when the value of multipleRoutesDetected changes.
*/
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferGenerator.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferGenerator.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferGenerator.h 2022-02-23 07:56:31.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferGenerator.h 2022-05-31 14:52:22.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2014-2018 Apple Inc. All rights reserved.
+ Copyright 2014-2022 Apple Inc. All rights reserved.
*/
@@ -32,26 +32,91 @@
*/
@class AVSampleBufferRequest;
+@class AVSampleBufferGeneratorBatch;
+
@class AVSampleBufferGeneratorInternal;
-API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos)
+API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0))
@interface AVSampleBufferGenerator : NSObject {
@private
AVSampleBufferGeneratorInternal *_generator;
}
AV_INIT_UNAVAILABLE
-/* If timebase is NULL, requests will be handled synchronously. */
+/*!
+ @method initWithAsset: timebase:
+ @abstract Creates an instance of AVSampleBufferGenerator to generate sample buffers from the specified asset.
+ @param asset
+ The asset from which sample buffers will be created.
+ @param timebase
+ The generator timebase, which governs when sample data for sample buffers is loaded. If NULL, sample data is loaded synchronously.
+ @result An instance of AVSampleBufferGenerator.
+ @discussion If the specified asset is an HTTP Live Streaming asset, the generator cannot create sample buffers.
+*/
- (instancetype)initWithAsset:(AVAsset *)asset timebase:(nullable CMTimebaseRef)timebase NS_DESIGNATED_INITIALIZER;
+/*!
+ @method createSampleBufferForRequest: error:
+ @abstract Creates a sample buffer and if requested, attempts to load its data asynchronously. Attempt may fail based on generator configuration or file format.
+ See [AVSampleBufferGenerator notifyOfDataReadyForSampleBuffer: completionHandler:] to get notified when the sample buffer data is available.
+ @param request
+ An instance of AVSampleBufferRequest representing the CMSampleBuffer creation request.
+ @param outError
+ A pointer to an NSError object that will be populated with failure information, if sample buffer creation fails.
+ @result A CMSampleBuffer object referencing the output sample buffer.
+ @discussion If the AVSampleBufferGenerator was created with a NULL timebase, any associated AVSampleBufferRequest will default to using AVSampleBufferRequestModeImmediate.
+*/
+- (nullable CMSampleBufferRef)createSampleBufferForRequest:(AVSampleBufferRequest *)request error:(NSError * _Nullable * _Nullable)outError CF_RETURNS_RETAINED API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0)) NS_SWIFT_NAME(makeSampleBuffer(for:));
+
/* It is an error to use an AVSampleBufferRequest with mode set to AVSampleBufferRequestModeScheduled when the AVSampleBufferGenerator was created with a NULL timebase. */
-- (nullable CMSampleBufferRef)createSampleBufferForRequest:(AVSampleBufferRequest *)request CF_RETURNS_RETAINED;
+- (nullable CMSampleBufferRef)createSampleBufferForRequest:(AVSampleBufferRequest *)request CF_RETURNS_RETAINED API_DEPRECATED("Use -createSampleBufferForRequest: error:, passing NULL for the error if not required", macos(10.10, 13.0)) API_UNAVAILABLE(ios, tvos, watchos);
+
+/*!
+ @method makeBatch
+ @abstract Creates a batch to handle multiple sample buffers, allowing to asynchronously load sample data and optimize I/O when possible.
+ @result An instance of an AVSampleBufferGeneratorBatch that can be used in calls to createSampleBufferForRequest:addingToBatch:error: of the same AVSampleBufferGenerator instance.
+*/
+- (AVSampleBufferGeneratorBatch *) makeBatch API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0));
-/* completionHandler is called when data is ready or as soon as an error has occurred. */
+/*!
+ @method createSampleBufferForRequest: addingToBatch: error:
+ @abstract Creates a sample buffer and attempts to defer I/O for its data. Attempt may fail based on generator configuration or file format.
+ The [AVSampleBufferGeneratorBatch makeDataReadyWithCompletionHandler:] should be called once to commence I/O and load sample data for all CMSampleBuffers within a batch.
+ Any subsequent calls to createSampleBufferForRequest:addingToBatch:error: will throw an exception.
+ @param request
+ An instance of AVSampleBufferRequest representing the CMSampleBuffer creation request
+ @param batch
+ An instance of AVSampleBufferGeneratorBatch to contain the output sample buffer. If nil, an exception is thrown.
+ Must be created by calling makeBatch on the same instance of AVSampleBufferGenerator. An exception will be thrown otherwise.
+ @param outError
+ A pointer to an NSError object that will be populated with failure information, if sample buffer creation fails.
+ @result A CMSampleBuffer object referencing the output sample buffer. The generator may defer I/O to fetch sample data depending on the source of the sample data and
+ the generator's timebase.
+*/
+- (nullable CMSampleBufferRef)createSampleBufferForRequest:(AVSampleBufferRequest *)request addingToBatch: (AVSampleBufferGeneratorBatch *)batch error:(NSError * _Nullable * _Nullable)outError CF_RETURNS_RETAINED API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0)) NS_SWIFT_NAME(makeSampleBuffer(for:addTo:));
+
+/*!
+ @method notifyOfDataReadyForSampleBuffer: completionHandler:
+ @abstract Allows the client to get notified when the sample buffer data is ready, or as soon as an error has occured.
+ @param completionHandler
+ The completionHandler will be called, when the sample buffer data is ready, or as soon as an error has occurred.
+*/
+ (void)notifyOfDataReadyForSampleBuffer:(CMSampleBufferRef)sbuf completionHandler:(void (^)(BOOL dataReady, NSError *error))completionHandler;
@end
+
+/*!
+ @enum AVSampleBufferRequestDirection
+ @abstract
+ Indicates the direction in which the samples should be generated for the AVSampleBufferRequest.
+
+ @constant AVSampleBufferRequestDirectionNone
+ Indicates only one sample will be loaded at [AVSampleBufferRequest startCursor], and the [AVSampleBufferRequest limitCursor], [AVSampleBufferRequest preferredMinSampleCount], and [AVSampleBufferRequest maxSampleCount] will be ignored.
+ @constant AVSampleBufferRequestDirectionForward
+ Indicates zero or more following samples may be loaded from [AVSampleBufferRequest startCursor], subject to [AVSampleBufferRequest limitCursor], [AVSampleBufferRequest preferredMinSampleCount], and [AVSampleBufferRequest maxSampleCount]
+ @constant AVSampleBufferRequestDirectionReverse
+ Indicates zero or more preceeding samples may be loaded from [AVSampleBufferRequest startCursor], subject to [AVSampleBufferRequest limitCursor], [AVSampleBufferRequest preferredMinSampleCount], and [AVSampleBufferRequest maxSampleCount]*/
typedef NS_ENUM(NSInteger, AVSampleBufferRequestDirection) {
AVSampleBufferRequestDirectionForward = +1,
AVSampleBufferRequestDirectionNone = 0,
@@ -85,7 +150,7 @@
@abstract An AVSampleBufferRequest describes a CMSampleBuffer creation request.
*/
-API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos)
+API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0))
@interface AVSampleBufferRequest : NSObject {
@private
AVSampleBufferRequestInternal *_request;
@@ -97,10 +162,7 @@
/* mandatory: the created CMSampleBuffer must include the sample at this position */
@property (nonatomic, retain, readonly) AVSampleCursor *startCursor;
-/* If AVSampleBufferRequestDirectionNone, only one sample will be loaded and limitCursor, preferredMinSampleCount, and maxSampleCount will be ignored.
- If AVSampleBufferRequestDirectionForward, zero or more following samples may be included, subject to limitCursor, preferredMinSampleCount, and maxSampleCount.
- If AVSampleBufferRequestDirectionReverse, zero or more preceeding samples may be included, subject to limitCursor, preferredMinSampleCount, and maxSampleCount.
- Default is AVSampleBufferRequestDirectionNone. */
+/* Default is AVSampleBufferRequestDirectionNone. */
@property (nonatomic, assign) AVSampleBufferRequestDirection direction;
/* optional: if not nil, the sequence of samples to be loaded may include the sample at this position, but no further. */
@@ -120,6 +182,38 @@
@end
+/*!
+ @class AVSampleBufferGeneratorBatch
+
+ @abstract An AVSampleBufferGeneratorBatch provides an optimized way to load sample data asynchronously for multiple CMSampleBuffers in an asset.
+ @discussion
+ The AVSampleBufferGeneratorBatch loads sample data asynchronously, by aggregating adjacent I/O requests and overlapping them when possible for all CMSampleBuffers within a batch.
+ An AVSampleBufferGeneratorBatch is associated with an AVSampleBufferGenerator. See -[AVSampleBufferGenerator makeBatch] to create an AVSampleBufferGeneratorBatch.
+ See -[AVSampleBufferGeneratorBatch createSampleBufferForRequest: addingToBatch: error:] to create a CMSampleBuffer, defer I/O for its data, and build up a batch.
+*/
+
+API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0), watchos(9.0))
+@interface AVSampleBufferGeneratorBatch : NSObject
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method makeDataReadyWithCompletionHandler:
+ @abstract Loads sample data asynchronously for all CMSampleBuffers within a batch.
+ This can only be called once on a batch, an exception will be thrown otherwise.
+ @param completionHandler
+ The completionHandler is called once, when all CMSampleBuffers in the batch are data-ready, or as soon as an error has occurred.
+*/
+- (void) makeDataReadyWithCompletionHandler: (void (^)(NSError * _Nullable error))completionHandler;
+
+/*!
+ @method cancel
+ @abstract Attempt to cancel any I/O for this batch. The associated sample buffers will have their data ready handler invoked with an error.
+*/
+- (void) cancel;
+
+@end
+
#pragma pack(pop)
NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h 2022-02-23 10:57:35.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h 2022-06-03 18:07:14.000000000 -0400
@@ -214,6 +214,12 @@
An object returned by a previous call to -addPeriodicTimeObserverForInterval:queue:usingBlock: or -addBoundaryTimeObserverForTimes:queue:usingBlock:.
@discussion
Upon return, the caller is guaranteed that no new time observer blocks will begin executing. Depending on the calling thread and the queue used to add the time observer, an in-flight block may continue to execute after this method returns. You can guarantee synchronous time observer removal by enqueuing the call to -removeTimeObserver: on that queue. Alternatively, call dispatch_sync(queue, ^{}) after -removeTimeObserver: to wait for any in-flight blocks to finish executing. -removeTimeObserver: should be used to explicitly cancel each time observer added using -addPeriodicTimeObserverForInterval:queue:usingBlock: and -addBoundaryTimeObserverForTimes:queue:usingBlock:.
+
+ This method throws an exception for any of the following reasons:
+ - observer was added by another AVSampleBufferRenderSynchronizer
+ - observer was not returned by either
+ -addPeriodicTimeObserverForInterval:queue:usingBlock:
+ -addBoundaryTimeObserverForTimes:queue:usingBlock:
*/
- (void)removeTimeObserver:(id)observer;
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleCursor.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleCursor.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleCursor.h 2022-02-23 07:59:44.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleCursor.h 2022-05-31 15:04:21.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2014-2021 Apple Inc. All rights reserved.
+ Copyright 2014-2022 Apple Inc. All rights reserved.
*/
@@ -30,7 +30,7 @@
@class AVSampleCursorInternal;
-API_AVAILABLE(macos(10.10)) API_UNAVAILABLE(ios, tvos, watchos)
+API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0))
@interface AVSampleCursor : NSObject <NSCopying> {
@private
AVSampleCursorInternal *_sampleCursor;
@@ -80,6 +80,7 @@
@end
+API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0))
@interface AVSampleCursor (AVSampleCursorTemporalPosition)
/*!
@@ -126,7 +127,7 @@
@end
-
+API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0))
@interface AVSampleCursor (AVSampleCursorCurrentSampleInfo)
/*!
@@ -199,7 +200,7 @@
@property currentSampleDependencyAttachments
@abstract Provides a dictionary containing dependency related sample buffer attachments, if known. See kCMSampleAttachmentKey_... in CoreMedia/CMSampleBuffer.h.
*/
-@property (nonatomic, readonly, nullable) NSDictionary *currentSampleDependencyAttachments API_AVAILABLE(macos(12.0)) API_UNAVAILABLE(ios, tvos, watchos);
+@property (nonatomic, readonly, nullable) NSDictionary *currentSampleDependencyAttachments API_AVAILABLE(macos(12.0), ios(16.0), tvos(16.0), watchos(9.0));
/*!
@struct AVSampleCursorAudioDependencyInfo
@@ -222,7 +223,7 @@
the number of steps back you have taken. This implies that if the current sample (before this walk) is independently decodable, with an
audioSampleRefreshCount of zero, no walk is required.
*/
-@property (nonatomic, readonly) AVSampleCursorAudioDependencyInfo currentSampleAudioDependencyInfo API_AVAILABLE(macos(10.15)) API_UNAVAILABLE(ios, tvos, watchos);
+@property (nonatomic, readonly) AVSampleCursorAudioDependencyInfo currentSampleAudioDependencyInfo API_AVAILABLE(macos(10.15), ios(16.0), tvos(16.0), watchos(9.0));
/*!
@property samplesRequiredForDecoderRefresh
@@ -240,11 +241,11 @@
// in order to decode the sample at the position of mySampleCursor in full
*/
-@property (nonatomic, readonly) NSInteger samplesRequiredForDecoderRefresh API_AVAILABLE(macos(10.11)) API_UNAVAILABLE(ios, tvos, watchos);
+@property (nonatomic, readonly) NSInteger samplesRequiredForDecoderRefresh API_AVAILABLE(macos(10.11), ios(16.0), tvos(16.0), watchos(9.0));
@end
-
+API_AVAILABLE(macos(10.10), ios(16.0), tvos(16.0), watchos(9.0))
@interface AVSampleCursor (AVSampleCursorSampleStorageInfo)
/*!
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSynchronizedLayer.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSynchronizedLayer.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSynchronizedLayer.h 2022-02-23 07:16:13.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSynchronizedLayer.h 2022-05-31 15:04:20.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -69,9 +69,13 @@
/*!
@property playerItem
@abstract Indicates the instance of AVPlayerItem to which the timing of the AVSynchronizedLayer is synchronized.
- @discussion This property must be accessed on the main thread/queue.
+ @discussion Before macOS 13, iOS 16, tvOS 16, and watchOS 9, this property must be accessed on the main thread/queue.
*/
-@property (nonatomic, retain, nullable) AVPlayerItem *playerItem NS_SWIFT_UI_ACTOR;
+@property (nonatomic, retain, nullable) AVPlayerItem *playerItem
+#if ! AVF_DEPLOYING_TO_2022_RELEASES_AND_LATER
+NS_SWIFT_UI_ACTOR;
+#endif
+;
@end
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVUtilities.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVUtilities.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVUtilities.h 2022-02-23 07:16:18.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVUtilities.h 2022-05-21 23:02:28.000000000 -0400
@@ -4,26 +4,13 @@
Framework: AVFoundation
- Copyright 2010-2015 Apple Inc. All rights reserved.
+ Copyright 2010-2015, 2018-2020, 2022 Apple Inc. All rights reserved.
*/
#import <AVFoundation/AVBase.h>
-#import <CoreGraphics/CGBase.h>
-#import <CoreGraphics/CGGeometry.h>
-
-/*!
- @function AVMakeRectWithAspectRatioInsideRect
- @abstract Returns a scaled CGRect that maintains the aspect ratio specified by a CGSize within a bounding CGRect.
- @discussion This is useful when attempting to fit the presentationSize property of an AVPlayerItem within the bounds of another CALayer.
- You would typically use the return value of this function as an AVPlayerLayer frame property value. For example:
- myPlayerLayer.frame = AVMakeRectWithAspectRatioInsideRect(myPlayerItem.presentationSize, mySuperLayer.bounds);
- @param aspectRatio The width & height ratio, or aspect, you wish to maintain.
- @param boundingRect The bounding CGRect you wish to fit into.
- */
-
-AVF_EXPORT CGRect AVMakeRectWithAspectRatioInsideRect(CGSize aspectRatio, CGRect boundingRect) API_AVAILABLE(macos(10.7), ios(4.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+#import <AVFoundation/AVGeometry.h>
#else
#import <AVFCore/AVUtilities.h>
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoCompositing.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoCompositing.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoCompositing.h 2022-02-23 07:56:28.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoCompositing.h 2022-05-31 14:49:49.000000000 -0400
@@ -317,7 +317,12 @@
*/
- (nullable AVTimedMetadataGroup *)sourceTimedMetadataByTrackID:(CMPersistentTrackID)trackID API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
-/* callback the custom compositor should call when composition succeeded */
+/*!
+ @method finishWithComposedVideoFrame:
+ @abstract The method that the custom compositor calls when composition succeeds.
+ @param composedVideoFrame
+ The video frame to finish with.
+*/
- (void)finishWithComposedVideoFrame:(CVPixelBufferRef)composedVideoFrame;
/* callback the custom compositor should call when composition failed. The error parameter should describe the actual error. */
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoComposition.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoComposition.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoComposition.h 2022-02-23 10:57:33.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoComposition.h 2022-06-03 18:07:13.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2021 Apple Inc. All rights reserved.
+ Copyright 2010-2022 Apple Inc. All rights reserved.
*/
@@ -43,7 +43,7 @@
AVVideoCompositionInternal *_videoComposition;
}
-/*
+/*!
@method videoCompositionWithPropertiesOfAsset:
@abstract
Returns a new instance of AVVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
@@ -61,7 +61,28 @@
If the specified asset has no video tracks, this method will return an AVVideoComposition instance with an empty collection of instructions.
*/
-+ (AVVideoComposition *)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset API_AVAILABLE(macos(10.9), ios(6.0), tvos(9.0)) API_UNAVAILABLE(watchos);
++ (AVVideoComposition *)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset API_DEPRECATED_WITH_REPLACEMENT("videoCompositionWithPropertiesOfAsset:completionHandler:", macos(10.9, API_TO_BE_DEPRECATED), ios(6.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED)) API_UNAVAILABLE(watchos);
+
+/*!
+ @method videoCompositionWithPropertiesOfAsset:completionHandler:
+ @abstract
+ Vends a new instance of AVVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
+ @param asset
+ An instance of AVAsset.
+ @param completionHandler
+ A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
+ @discussion
+ The new AVVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks.
+ It will also have the following values for its properties:
+
+ - If the asset has exactly one video track, the original timing of the source video track will be used. If the asset has more than one video track, and the nominal frame rate of any of video tracks is known, the reciprocal of the greatest known nominalFrameRate will be used as the value of frameDuration. Otherwise, a default framerate of 30fps is used.
+ - If the specified asset is an instance of AVComposition, the renderSize will be set to the naturalSize of the AVComposition; otherwise the renderSize will be set to a value that encompasses all of the asset's video tracks.
+ - A renderScale of 1.0.
+ - A nil animationTool.
+
+ If the specified asset has no video tracks, this method will return an AVVideoComposition instance with an empty collection of instructions.
+*/
++ (void)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset completionHandler:(void (^)(AVVideoComposition * _Nullable videoComposition, NSError * _Nullable error))completionHandler API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
/* indicates a custom compositor class to use. The class must implement the AVVideoCompositing protocol.
If nil, the default, internal video compositor is used */
@@ -150,8 +171,8 @@
@interface AVVideoComposition (AVVideoCompositionFiltering)
-/*
- @method videoCompositionWithAsset:options:applyingCIFiltersWithHandler:
+/*!
+ @method videoCompositionWithAsset:applyingCIFiltersWithHandler:
@abstract
Returns a new instance of AVVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
@param asset An instance of AVAsset. For best performance, ensure that the duration and tracks properties of the asset are already loaded before invoking this method.
@@ -186,7 +207,53 @@
}];
*/
+ (AVVideoComposition *)videoCompositionWithAsset:(AVAsset *)asset
- applyingCIFiltersWithHandler:(void (^)(AVAsynchronousCIImageFilteringRequest *request))applier API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+ applyingCIFiltersWithHandler:(void (^)(AVAsynchronousCIImageFilteringRequest *request))applier API_DEPRECATED_WITH_REPLACEMENT("videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler:", macos(10.11, API_TO_BE_DEPRECATED), ios(9.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED)) API_UNAVAILABLE(watchos);
+
+/*!
+ @method videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler:
+ @abstract
+ Vends a new instance of AVVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
+ @param asset
+ An instance of AVAsset.
+ @param completionHandler
+ A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
+ @discussion
+ The new AVVideoComposition will cause the specified handler block to be called to filter each frame of the asset's first enabled video track. The handler block should use the properties of the provided AVAsynchronousCIImageFilteringRequest and respond using finishWithImage:context: with a "filtered" new CIImage (or the provided source image for no affect). In the event of an error, respond to the request using finishWithError:. The error can be observed via AVPlayerItemFailedToPlayToEndTimeNotification, see AVPlayerItemFailedToPlayToEndTimeErrorKey in notification payload.
+
+ NOTE: The returned AVVideoComposition's properties are private and support only CIFilter-based operations. Mutations are not supported, either in the values of properties of the AVVideoComposition itself or in its private instructions. If rotations or other transformations are desired, they must be accomplished via the application of CIFilters during the execution of your specified handler.
+
+ The video composition will also have the following values for its properties:
+
+ - The original timing of the asset's first enabled video track will be used.
+ - A renderSize that encompasses the asset's first enabled video track respecting the track's preferredTransform.
+ - A renderScale of 1.0.
+
+ The default CIContext has the following properties:
+
+ - iOS: Device RGB color space
+ - OS X: sRGB color space
+
+ Example usage:
+
+ [AVVideoComposition videoCompositionWithAsset:srcAsset applyingCIFiltersWithHandler:
+ ^(AVAsynchronousCIImageFilteringRequest *request)
+ {
+ NSError *err = nil;
+ CIImage *filtered = myRenderer(request, &err);
+ if (filtered)
+ [request finishWithImage:filtered context:nil];
+ else
+ [request finishWithError:err];
+ } completionHandler:
+ ^(AVVideoComposition * _Nullable videoComposition, NSError * _Nullable error)
+ {
+ if (videoComposition != nil) {
+ playerItem.videoComposition = videoComposition
+ else {
+ // handle error
+ }];
+ */
++ (void)videoCompositionWithAsset:(AVAsset *)asset applyingCIFiltersWithHandler:(void (^)(AVAsynchronousCIImageFilteringRequest *request))applier completionHandler:(void (^)(AVVideoComposition * _Nullable videoComposition, NSError * _Nullable error))completionHandler API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
@end
@@ -213,7 +280,7 @@
*/
+ (AVMutableVideoComposition *)videoComposition;
-/*
+/*!
@method videoCompositionWithPropertiesOfAsset:
@abstract
Returns a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
@@ -231,9 +298,31 @@
If the specified asset has no video tracks, this method will return an AVMutableVideoComposition instance with an empty collection of instructions.
*/
-+ (AVMutableVideoComposition *)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset API_AVAILABLE(macos(10.9), ios(6.0), tvos(9.0)) API_UNAVAILABLE(watchos);
++ (AVMutableVideoComposition *)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset API_DEPRECATED_WITH_REPLACEMENT("videoCompositionWithPropertiesOfAsset:completionHandler:", macos(10.9, API_TO_BE_DEPRECATED), ios(6.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED)) API_UNAVAILABLE(watchos);
-/*
+/*!
+ @method videoCompositionWithPropertiesOfAsset:completionHandler:
+ @abstract
+ Vends a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks.
+ @param asset
+ An instance of AVAsset.
+ @param completionHandler
+ A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
+ @discussion
+ The new AVMutableVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks. The client can set sourceTrackIDForFrameTiming to kCMPersistentTrackID_Invalid and frameDuration to an appropriate value in order to specify the maximum output frame rate independent of the source track timing.
+ It will also have the following values for its properties:
+
+ - If the asset has exactly one video track, the original timing of the source video track will be used. If the asset has more than one video track, and the nominal frame rate of any of video tracks is known, the reciprocal of the greatest known nominalFrameRate will be used as the value of frameDuration. Otherwise, a default framerate of 30fps is used.
+ - If the specified asset is an instance of AVComposition, the renderSize will be set to the naturalSize of the AVComposition; otherwise the renderSize will be set to a value that encompasses all of the asset's video tracks.
+ - A renderScale of 1.0.
+ - A nil animationTool.
+
+ If the specified asset has no video tracks, this method will return an AVMutableVideoComposition instance with an empty collection of instructions.
+
+*/
++ (void)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset completionHandler:(void (^)(AVMutableVideoComposition * _Nullable videoComposition, NSError * _Nullable error))completionHandler API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
+/*!
@method videoCompositionWithPropertiesOfAsset:prototypeInstruction:
@abstract
Returns a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks, and also overrides default properties with those from a prototypeInstruction.
@@ -249,7 +338,35 @@
myVideoComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:myAsset prototypeInstruction:myPrototypeInstruction];
*/
-+ (AVMutableVideoComposition *)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset prototypeInstruction:(AVVideoCompositionInstruction *)prototypeInstruction API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0)) API_UNAVAILABLE(watchos);
++ (AVMutableVideoComposition *)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset prototypeInstruction:(AVVideoCompositionInstruction *)prototypeInstruction API_DEPRECATED_WITH_REPLACEMENT("videoCompositionWithPropertiesOfAsset:prototypeInstruction:completionHandler:", macos(10.15, API_TO_BE_DEPRECATED), ios(13.0, API_TO_BE_DEPRECATED), tvos(13.0, API_TO_BE_DEPRECATED)) API_UNAVAILABLE(watchos);
+
+/*!
+ @method videoCompositionWithPropertiesOfAsset:prototypeInstruction:completionHandler:
+ @abstract
+ Vends a new instance of AVMutableVideoComposition with values and instructions suitable for presenting the video tracks of the specified asset according to its temporal and geometric properties and those of its tracks, and also overrides default properties with those from a prototypeInstruction.
+ @param asset
+ An instance of AVAsset.
+ @param prototypeInstruction
+ Custom instructions that the client can choose to override.
+ @param completionHandler
+ A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
+ @discussion
+ Also see videoCompositionWithPropertiesOfAsset:completionHandler:.
+ The new AVMutableVideoComposition will have instructions that respect the spatial properties and timeRanges of the specified asset's video tracks. Anything not pertaining to spatial layout and timing, such as background color for their composition or post-processing behaviors, is eligible to be specified via a prototype instruction.
+ Example: To add a background color,
+ myPrototypeInstruction = [[AVMutableVideoCompositionInstruction alloc] init];
+ myPrototypeInstruction.backgroundColor = myCGColorRef; // Do not use constant CGColorRef colors here.
+ myVideoComposition = [AVVideoComposition videoCompositionWithPropertiesOfAsset:myAsset prototypeInstruction:myPrototypeInstruction completionHandler:^(AVMutableVideoComposition * _Nullable myVideoComposition, NSError * _Nullable error) {
+ if (myVideoComposition != nil) {
+ // use myVideoComposition
+ }
+ else {
+ // handle error
+ }
+ }];
+
+ */
++ (void)videoCompositionWithPropertiesOfAsset:(AVAsset *)asset prototypeInstruction:(AVVideoCompositionInstruction *)prototypeInstruction completionHandler:(void (^)(AVMutableVideoComposition * _Nullable videoComposition, NSError * _Nullable error))completionHandler API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
/* indicates the custom compositor class to use. If nil, the default, internal video compositor is used */
@property (nonatomic, retain, nullable) Class<AVVideoCompositing> customVideoCompositorClass API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -337,8 +454,8 @@
@interface AVMutableVideoComposition (AVMutableVideoCompositionFiltering)
-/*
- @method videoCompositionWithAsset:options:applyingCIFiltersWithHandler:
+/*!
+ @method videoCompositionWithAsset:applyingCIFiltersWithHandler:
@abstract
Returns a new instance of AVMutableVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
@param asset An instance of AVAsset. For best performance, ensure that the duration and tracks properties of the asset are already loaded before invoking this method.
@@ -371,7 +488,51 @@
}];
*/
+ (AVMutableVideoComposition *)videoCompositionWithAsset:(AVAsset *)asset
- applyingCIFiltersWithHandler:(void (^)(AVAsynchronousCIImageFilteringRequest *request))applier API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+ applyingCIFiltersWithHandler:(void (^)(AVAsynchronousCIImageFilteringRequest *request))applier API_DEPRECATED_WITH_REPLACEMENT("videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler:", macos(10.11, API_TO_BE_DEPRECATED), ios(9.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED)) API_DEPRECATED_WITH_REPLACEMENT("videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler:", watchos(2.0, API_TO_BE_DEPRECATED));
+
+/*!
+ @method videoCompositionWithAsset:applyingCIFiltersWithHandler:completionHandler:
+ @abstract
+ Vends a new instance of AVMutableVideoComposition with values and instructions that will apply the specified handler block to video frames represented as instances of CIImage.
+ @param asset
+ An instance of AVAsset.
+ @param completionHandler
+ A block that is invoked when the new video composition has finished being created. If the `videoComposition` parameter is nil, the `error` parameter describes the failure that occurred.
+ @discussion
+ The new AVMutableVideoComposition will cause the specified handler block to be called to filter each frame of the asset's first enabled video track. The handler block should use the properties of the provided AVAsynchronousCIImageFilteringRequest and respond using finishWithImage:context: with a "filtered" new CIImage (or the provided source image for no affect). In the event of an error, respond to the request using finishWithError:. The error can be observed via AVPlayerItemFailedToPlayToEndTimeNotification, see AVPlayerItemFailedToPlayToEndTimeErrorKey in notification payload. The client can set sourceTrackIDForFrameTiming to kCMPersistentTrackID_Invalid and frameDuration to an appropriate value in order to specify the maximum output frame rate independent of the source track timing.
+
+ The video composition will also have the following values for its properties:
+
+ - The original timing of the asset's first enabled video track will be used.
+ - A renderSize that encompasses the asset's first enabled video track respecting the track's preferredTransform.
+ - A renderScale of 1.0.
+
+ The default CIContext has the following properties:
+
+ - iOS: Device RGB color space
+ - OS X: sRGB color space
+
+ Example usage:
+
+ [AVMutableVideoComposition videoCompositionWithAsset:srcAsset applyingCIFiltersWithHandler:
+ ^(AVAsynchronousCIImageFilteringRequest *request)
+ {
+ NSError *err = nil;
+ CIImage *filtered = myRenderer(request, &err);
+ if (filtered)
+ [request finishWithImage:filtered context:nil];
+ else
+ [request finishWithError:err];
+ } completionHandler:
+ ^(AVMutableVideoComposition * _Nullable videoComposition, NSError * _Nullable error)
+ {
+ if (videoComposition != nil) {
+ playerItem.videoComposition = videoComposition
+ else {
+ // handle error
+ }];
+*/
++ (void)videoCompositionWithAsset:(AVAsset *)asset applyingCIFiltersWithHandler:(void (^)(AVAsynchronousCIImageFilteringRequest *request))applier completionHandler:(void (^)(AVMutableVideoComposition * _Nullable videoComposition, NSError * _Nullable error))completionHandler API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
@end
@@ -592,6 +753,8 @@
During a transform ramp, the affine transform is interpolated between the values set at the ramp's start time and end time.
Before the first specified time for which a transform is set, the affine transform is held constant at the value of CGAffineTransformIdentity;
after the last time for which a transform is set, the affine transform is held constant at that last value;
+
+ This method throws an exception if the time range overlaps the time range of an existing transform ramp or if the time range of a does not have a numeric start time and duration.
*/
- (void)setTransformRampFromStartTransform:(CGAffineTransform)startTransform toEndTransform:(CGAffineTransform)endTransform timeRange:(CMTimeRange)timeRange;
@@ -612,6 +775,8 @@
Sets a fixed transform to apply from the specified time until the next time at which a transform is set; this is the same as setting a flat ramp for that time range.
Before the first specified time for which a transform is set, the affine transform is held constant at the value of CGAffineTransformIdentity;
after the last time for which a transform is set, the affine transform is held constant at that last value;
+
+ This method throws an exception if time is not numeric.
*/
- (void)setTransform:(CGAffineTransform)transform atTime:(CMTime)time;
@@ -626,8 +791,9 @@
The timeRange over which the value of the opacity will be interpolated between startOpacity and endOpacity.
@discussion
During an opacity ramp, opacity is computed using a linear interpolation.
- Before the first time for which an opacity is set, the opacity is held constant at 1.0; after the last specified time, the opacity is held constant at the last value.
-*/
+ Before the first time for which an opacity is set, the opacity is held constant at 1.0; after the last specified time, the opacity is held constant at the last value
+ This method throws an exception if the time range of a does not have a numeric start time and duration.
+ */
- (void)setOpacityRampFromStartOpacity:(float)startOpacity toEndOpacity:(float)endOpacity timeRange:(CMTimeRange)timeRange;
/*
@@ -640,6 +806,7 @@
@discussion
Sets a fixed opacity to apply from the specified time until the next time at which an opacity is set; this is the same as setting a flat ramp for that time range.
Before the first time for which an opacity is set, the opacity is held constant at 1.0; after the last specified time, the opacity is held constant at the last value.
+ This method throws an exception if time is not numeric.
*/
- (void)setOpacity:(float)opacity atTime:(CMTime)time;
@@ -661,6 +828,8 @@
When the starting or ending rectangle is empty, interpolations take into account the origin and size of the empty rectangle.
Before the first specified time for which a crop rectangle is set, the crop rectangle is held constant to CGRectInfinite
after the last time for which a crop rectangle is set, the crop rectangle is held constant at that last value.
+
+ This method throws an exception if the time range overlaps the time range of an existing crop rectangle ramp, or if the time range does not have a numeric start time and duration.
*/
- (void)setCropRectangleRampFromStartCropRectangle:(CGRect)startCropRectangle toEndCropRectangle:(CGRect)endCropRectangle timeRange:(CMTimeRange)timeRange API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -679,6 +848,8 @@
Sets a fixed crop rectangle to apply from the specified time until the next time at which a crop rectangle is set; this is the same as setting a flat ramp for that time range.
Before the first specified time for which a crop rectangle is set, the crop rectangle is held constant to CGRectInfinite
after the last time for which a crop rectangle is set, the crop rectangle is held constant at that last value.
+
+ This method throws an exception if time is not numeric.
*/
- (void)setCropRectangle:(CGRect)cropRectangle atTime:(CMTime)time API_AVAILABLE(macos(10.9), ios(7.0), tvos(9.0)) API_UNAVAILABLE(watchos);
@@ -758,7 +929,12 @@
@interface AVAsset (AVAssetVideoCompositionUtility)
-- (CMPersistentTrackID)unusedTrackID;
+- (CMPersistentTrackID)unusedTrackID
+#if __swift__
+API_DEPRECATED("Use findUnusedTrackID() instead", macos(10.7, 13.0), ios(4.0, 16.0), tvos(9.0, 16.0), watchos(1.0, 9.0));
+#else
+API_DEPRECATED_WITH_REPLACEMENT("findUnusedTrackIDWithCompletionHandler:", macos(10.7, API_TO_BE_DEPRECATED), ios(4.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED), watchos(1.0, API_TO_BE_DEPRECATED));
+#endif
/*!
@method findUnusedTrackIDWithCompletionHandler:
@@ -789,7 +965,25 @@
In the course of validation, the receiver will invoke its validationDelegate with reference to any trouble spots in the video composition.
An exception will be raised if the delegate modifies the receiver's array of instructions or the array of layerInstructions of any AVVideoCompositionInstruction contained therein during validation.
*/
-- (BOOL)isValidForAsset:(nullable AVAsset *)asset timeRange:(CMTimeRange)timeRange validationDelegate:(nullable id<AVVideoCompositionValidationHandling>)validationDelegate API_AVAILABLE(macos(10.8), ios(5.0), tvos(9.0)) API_UNAVAILABLE(watchos);
+- (BOOL)isValidForAsset:(nullable AVAsset *)asset timeRange:(CMTimeRange)timeRange validationDelegate:(nullable id<AVVideoCompositionValidationHandling>)validationDelegate API_DEPRECATED_WITH_REPLACEMENT("determineValidityForAsset:timeRange:validationDelegate:completionHandler:", macos(10.8, API_TO_BE_DEPRECATED), ios(5.0, API_TO_BE_DEPRECATED), tvos(9.0, API_TO_BE_DEPRECATED)) API_UNAVAILABLE(watchos);
+
+/*!
+ @method determineValidityForAsset:timeRange:validationDelegate:completionHandler:
+ @abstract
+ Determines whether the timeRanges of the receiver's instructions conform to the requirements described for them immediately above (in connection with the instructions property) and also whether all of the layer instructions have a value for trackID that corresponds either to a track of the specified asset or to the receiver's animationTool.
+ @param asset
+ Pass a reference to an AVAsset if you wish to validate the timeRanges of the instructions against the duration of the asset and the trackIDs of the layer instructions against the asset's tracks. Pass nil to skip that validation.
+ @param timeRange
+ A CMTimeRange. Only those instuctions with timeRanges that overlap with the specified timeRange will be validated. To validate all instructions that may be used for playback or other processing, regardless of timeRange, pass CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity).
+ @param validationDelegate
+ Indicates an object implementing the AVVideoCompositionValidationHandling protocol to receive information about troublesome portions of a video composition during processing of -determineValidityForAsset:. May be nil.
+ @param completionHandler
+ A block that is invoked when a determination is made about whether the video composition is valid. If the `isValid` parameter is NO, either the video composition is not valid, in which case the `error` parameter will be nil, or the answer could not be determined, in which case the `error` parameter will be non-nil and describe the failure that occurred.
+@discussion
+ In the course of validation, the receiver will invoke its validationDelegate with reference to any trouble spots in the video composition.
+ An exception will be raised if the delegate modifies the receiver's array of instructions or the array of layerInstructions of any AVVideoCompositionInstruction contained therein during validation.
+*/
+- (void)determineValidityForAsset:(nullable AVAsset *)asset timeRange:(CMTimeRange)timeRange validationDelegate:(nullable id<AVVideoCompositionValidationHandling>)validationDelegate completionHandler:(void (^)(BOOL isValid, NSError * _Nullable error))completionHandler NS_SWIFT_ASYNC_NAME(isValid(for:timeRange:validationDelegate:)) API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
@end
diff -ruN /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoSettings.h /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoSettings.h
--- /Applications/Xcode_13.3.0.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoSettings.h 2022-02-23 07:59:43.000000000 -0500
+++ /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVVideoSettings.h 2022-06-03 18:07:12.000000000 -0400
@@ -4,7 +4,7 @@
Framework: AVFoundation
- Copyright 2010-2020 Apple Inc. All rights reserved.
+ Copyright 2010-2020,2022 Apple Inc. All rights reserved.
*/
@@ -127,6 +127,12 @@
AVVideoTransferFunction_ITU_R_2100_HLG
AVVideoYCbCrMatrix_ITU_R_709_2
+ If you require HDR Linear colorimetry, you can use:
+
+ AVVideoColorPrimaries_ITU_R_2020
+ AVVideoTransferFunction_Linear
+ AVVideoYCbCrMatrix_ITU_R_2020
+
AVFoundation will color match if the source and destination color properties differ according to the following rules:
If you want to override the tagging of color properties in the video that you will be processing, set a value for AVVideoColorPropertiesKey:
@@ -153,6 +159,7 @@
AVF_EXPORT NSString *const AVVideoTransferFunction_SMPTE_240M_1995 API_AVAILABLE(macos(10.7)) API_UNAVAILABLE(ios, tvos, watchos);
AVF_EXPORT NSString *const AVVideoTransferFunction_SMPTE_ST_2084_PQ API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
AVF_EXPORT NSString *const AVVideoTransferFunction_ITU_R_2100_HLG API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
+ AVF_EXPORT NSString *const AVVideoTransferFunction_Linear API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
AVF_EXPORT NSString *const AVVideoYCbCrMatrixKey /* NSString */ API_AVAILABLE(macos(10.7), ios(10.0), tvos(10.0)) API_UNAVAILABLE(watchos);
AVF_EXPORT NSString *const AVVideoYCbCrMatrix_ITU_R_709_2 API_AVAILABLE(macos(10.7), ios(10.0), tvos(10.0)) API_UNAVAILABLE(watchos);
AVF_EXPORT NSString *const AVVideoYCbCrMatrix_ITU_R_601_4 API_AVAILABLE(macos(10.7), ios(10.0), tvos(10.0)) API_UNAVAILABLE(watchos);
- README
- xcode13.0 Binding Status
- xcode13.1 Binding Status
- xcode13.2 Binding Status
- xcode13.3 Binding Status
- xcode13.4 Binding Status
- xcode14.0 Binding Status
- xcode14.1 Binding Status
- xcode14.2 Binding Status
- xcode14.3 Binding Status
- xcode15.0 Binding Status
- xcode15.1 Binding Status
- xcode15.3 Binding Status
- xcode15.4 Binding Status
- xcode16.0 Binding Status
- xcode16.1 Binding Status
- xcode16.2 Binding Status