forked from dotnet/macios
-
Notifications
You must be signed in to change notification settings - Fork 1
AVFoundation watchOS xcode9 beta4
Sebastien Pouliot edited this page Jul 24, 2017
·
1 revision
#AVFoundation.framework
diff -ruN /Applications/Xcode9-beta3.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes /Applications/Xcode9-beta4.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes
--- /Applications/Xcode9-beta3.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes 2017-07-01 00:02:21.000000000 -0400
+++ /Applications/Xcode9-beta4.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes 2017-07-14 16:11:45.000000000 -0400
@@ -68,8 +68,8 @@
SwiftName: 'chromaticityValues(for:)'
MethodKind: Instance
- Selector: 'defaultDeviceWithDeviceType:mediaType:position:'
- SwiftName: 'default(_:for:position:)'
MethodKind: Class
+ SwiftPrivate: true
- Selector: 'defaultDeviceWithMediaType:'
SwiftName: 'default(for:)'
MethodKind: Class
@@ -99,8 +99,16 @@
MethodKind: Instance
- Name: AVCaptureDeviceDiscoverySession
SwiftName: AVCaptureDevice.DiscoverySession
+ Methods:
+ - Selector: 'discoverySessionWithDeviceTypes:mediaType:position:'
+ MethodKind: Class
+ SwiftPrivate: true
- Name: AVCaptureDeviceFormat
SwiftName: AVCaptureDevice.Format
+ Properties:
+ - Name: supportedColorSpaces
+ PropertyKind: Instance
+ SwiftPrivate: true
- Name: AVCaptureDeviceInputSource
SwiftName: AVCaptureDevice.InputSource
- Name: AVCaptureFileOutput
@@ -151,11 +159,26 @@
SwiftName: 'supportedPhotoCodecTypes(for:)'
MethodKind: Instance
- Selector: 'supportedPhotoPixelFormatTypesForFileType:'
- SwiftName: 'supportedPhotoPixelFormatTypes(for:)'
MethodKind: Instance
+ SwiftPrivate: true
- Selector: 'supportedRawPhotoPixelFormatTypesForFileType:'
- SwiftName: 'supportedRawPhotoPixelFormatTypes(for:)'
MethodKind: Instance
+ SwiftPrivate: true
+ Properties:
+ - Name: availablePhotoPixelFormatTypes
+ PropertyKind: Instance
+ SwiftPrivate: true
+ - Name: availableRawPhotoPixelFormatTypes
+ PropertyKind: Instance
+ SwiftPrivate: true
+ - Name: supportedFlashModes
+ PropertyKind: Instance
+ SwiftPrivate: true
+- Name: AVCapturePhotoSettings
+ Properties:
+ - Name: availablePreviewPhotoPixelFormatTypes
+ PropertyKind: Instance
+ SwiftPrivate: true
- Name: AVCaptureStillImageOutput
Methods:
- Selector: 'new'
@@ -176,7 +199,8 @@
MethodKind: Instance
Properties:
- Name: availableVideoCVPixelFormatTypes
- SwiftName: availableVideoPixelFormatTypes
+ PropertyKind: Instance
+ SwiftPrivate: true
- Name: AVCaptureVideoPreviewLayer
Methods:
- Selector: 'captureDevicePointOfInterestForPoint:'
@@ -215,6 +239,11 @@
- Selector: 'compositionTrackSegmentWithURL:trackID:sourceTimeRange:targetTimeRange:'
SwiftName: init(url:trackID:sourceTimeRange:targetTimeRange:)
MethodKind: Class
+- Name: AVMetadataMachineReadableCodeObject
+ Properties:
+ - Name: corners
+ PropertyKind: Instance
+ SwiftPrivate: true
- Name: AVMutableMovie
Methods:
- Selector: 'insertTimeRange:ofAsset:atTime:copySampleData:error:'
@@ -1081,10 +1110,6 @@
- Selector: 'chromaticityValuesForDeviceWhiteBalanceGains:'
SwiftName: 'chromaticityValues(forDeviceWhiteBalanceGains:)'
MethodKind: Instance
- - Selector: 'defaultDeviceWithDeviceType:mediaType:position:'
- MethodKind: Class
- NullabilityOfRet: U
- Nullability: [ U, U, N ]
- Selector: 'defaultDeviceWithMediaType:'
MethodKind: Class
NullabilityOfRet: U
@@ -1181,8 +1206,6 @@
Nullability: U
- Name: mediaType
Nullability: U
- - Name: supportedColorSpaces
- Nullability: U
- Name: videoSupportedFrameRateRanges
Nullability: U
Type: 'NSArray *'
@@ -1204,11 +1227,6 @@
Nullability: U
- Name: AVCaptureDeviceDiscoverySession
SwiftName: AVCaptureDeviceDiscoverySession
- Methods:
- - Selector: 'discoverySessionWithDeviceTypes:mediaType:position:'
- MethodKind: Class
- NullabilityOfRet: U
- Nullability: [ U, U, N ]
Properties:
- Name: devices
PropertyKind: Instance
@@ -1453,11 +1471,6 @@
MethodKind: Instance
Nullability: [ U, U ]
Properties:
- - Name: availableVideoCVPixelFormatTypes
- SwiftName: availableVideoCVPixelFormatTypes
- PropertyKind: Instance
- Nullability: U
- Type: 'NSArray *'
- Name: availableVideoCodecTypes
PropertyKind: Instance
Nullability: U
@@ -1540,16 +1553,13 @@
SwiftName: AVFrameRateRange
- Name: AVMetadataMachineReadableCodeObject
Properties:
- - Name: corners
- PropertyKind: Instance
- Nullability: U
- Type: 'NSArray *'
- Name: stringValue
PropertyKind: Instance
Nullability: U
- Name: AVMetadataObject
Properties:
- Name: type
+ PropertyKind: Instance
Nullability: U
- Name: AVMutableComposition
Methods:
@@ -1566,16 +1576,6 @@
- Name: defaultMediaDataStorage
PropertyKind: Instance
Nullability: N
- - Name: AVMutableVideoComposition
- Methods:
- - Selector: 'videoCompositionWithPropertiesOfAsset:'
- MethodKind: Class
- NullabilityOfRet: N
- - Name: AVVideoComposition
- Methods:
- - Selector: 'videoCompositionWithPropertiesOfAsset:'
- MethodKind: Class
- NullabilityOfRet: N
- Name: AVPlayerItem
Methods:
- Selector: 'seekToTime:completionHandler:'
diff -ruN /Applications/Xcode9-beta3.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h /Applications/Xcode9-beta4.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h
--- /Applications/Xcode9-beta3.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h 2017-06-30 23:52:19.000000000 -0400
+++ /Applications/Xcode9-beta4.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h 2017-07-14 04:50:10.000000000 -0400
@@ -119,6 +119,26 @@
*/
- (void)flushFromSourceTime:(CMTime)time completionHandler:(void (^)(BOOL flushSucceeded))completionHandler;
+/*!
+ @constant AVSampleBufferAudioRendererWasFlushedAutomaticallyNotification
+ @abstract A notification that fires whenever the receiver's enqueued media data has been flushed for a reason other than a call to the -flush method.
+ @discussion
+ The renderer may flush enqueued media data when the user routes playback to a new destination. The renderer may also flush enqueued media data when the playback rate of the attached AVSampleBufferRenderSynchronizer is changed (e.g. 1.0 -> 2.0 or 1.0 -> 0.0 -> 2.0), however no flush will occur for normal pauses (non-zero -> 0.0) and resumes (0.0 -> same non-zero rate as before).
+
+ When an automatic flush occurs, the attached render synchronizer's timebase will remain running at its current rate. It is typically best to respond to this notification by enqueueing media data with timestamps starting at the timebase's current time. To the listener, this will sound similar to muting the audio for a short period of time. If it is more desirable to ensure that all audio is played than to keep the timeline moving, you may also stop the synchronizer, set the synchronizer's current time to the value of AVSampleBufferAudioRendererFlushTimeKey, start reenqueueing sample buffers with timestamps starting at that time, and restart the synchronizer. To the listener, this will sound similar to pausing the audio for a short period of time.
+
+ This notification is delivered on an arbitrary thread. If sample buffers are being enqueued with the renderer concurrently with the receipt of this notification, it is possible that one or more sample buffers will remain enqueued in the renderer. This is generally undesirable, because the sample buffers that remain will likely have timestamps far ahead of the timebase's current time and so won't be rendered for some time. The best practice is to invoke the -flush method, in a manner that is serialized with enqueueing sample buffers, after receiving this notification and before resuming the enqueueing of sample buffers.
+ */
+AVF_EXPORT NSNotificationName const AVSampleBufferAudioRendererWasFlushedAutomaticallyNotification API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) __WATCHOS_PROHIBITED;
+
+ /*!
+ @constant AVSampleBufferAudioRendererFlushTimeKey
+ @abstract The presentation timestamp of the first enqueued sample that was flushed.
+ @discussion
+ The value of this key is an NSValue wrapping a CMTime.
+ */
+ AVF_EXPORT NSString * const AVSampleBufferAudioRendererFlushTimeKey API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) __WATCHOS_PROHIBITED;
+
@end
NS_ASSUME_NONNULL_END