-
Notifications
You must be signed in to change notification settings - Fork 516
AudioToolbox macOS xcode13.0 beta1
Rachel Kang edited this page Aug 24, 2021
·
3 revisions
#AudioToolbox.framework https://github.com/xamarin/xamarin-macios/pull/12491
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h 2021-03-16 09:52:43.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h 2021-06-02 12:45:47.000000000 -0400
@@ -13,6 +13,7 @@
#import <AudioToolbox/AUParameters.h>
#import <Foundation/NSExtensionRequestHandling.h>
+#import <CoreMIDI/MIDIServices.h>
#if !TARGET_OS_IPHONE
typedef UInt32 AUAudioObjectID; // AudioObjectID
@@ -49,7 +50,7 @@
/*! @var AUEventSampleTimeImmediate
@brief A special value of AUEventSampleTime indicating "immediately."
@discussion
- Callers of AUScheduleParameterBlock and AUScheduleMIDIEventBlock can pass
+ Callers of AUScheduleParameterBlock, AUMIDIEventListBlock, and AUScheduleMIDIEventBlock can pass
AUEventSampleTimeImmediate to indicate that the event should be rendered as soon as
possible, in the next cycle. A caller may also add a small (less than 4096) sample frame
offset to this constant. The base AUAudioUnit implementation translates this constant to a
@@ -481,8 +482,8 @@
/*! @property scheduleParameterBlock
@brief Block which hosts use to schedule parameters.
@discussion
- As with renderBlock, a host should fetch and cache this block before beginning to render,
- if it intends to schedule parameters.
+ As with renderBlock, a host should fetch and cache this block before calling
+ allocateRenderResources, if it intends to schedule parameters.
The block is safe to call from any thread context, including realtime audio render
threads.
@@ -592,30 +593,73 @@
/*! @property scheduleMIDIEventBlock
@brief Block used to schedule MIDI events.
@discussion
- As with renderBlock, a host should fetch and cache this block before beginning to render,
- if it intends to schedule MIDI events.
+ As with renderBlock, a host should fetch and cache this block before calling
+ allocateRenderResources if it intends to schedule MIDI events.
This is implemented in the base class. It is nil when musicDeviceOrEffect is NO.
- Subclassers should not override. When hosts schedule events via this block, they are
- delivered to the audio unit via the list of AURenderEvents delivered to
+ Subclasses should not override. When hosts schedule events via this block, they are
+ sent to the Audio Unit via the list of AURenderEvents delivered to
internalRenderBlock.
+
+ All events sent via this block will be delivered to the internalRenderBlock in the MIDI
+ protocol returned by the AudioUnitMIDIProtocol property. For example, if AudioUnitMIDIProtocol
+ returns kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0 if necessary.
+ If AudioUnitMIDIProtocol is not set, events will be delivered as legacy MIDI.
This bridged to the v2 API MusicDeviceMIDIEvent.
*/
@property (NS_NONATOMIC_IOSONLY, readonly, nullable) AUScheduleMIDIEventBlock scheduleMIDIEventBlock;
+/*! @property scheduleMIDIEventListBlock
+ @brief Block used to schedule MIDIEventLists.
+ @discussion
+ As with renderBlock, a host should fetch and cache this block before calling
+ allocateRenderResources, if it intends to schedule MIDI events.
+
+ When scheduling events during the render cycle (e.g. via a render observer) eventSampleTime can be
+ AUEventSampleTimeImmediate plus an optional buffer offset, in which case the event is
+ scheduled at the provided offset position within the current render cycle.
+
+ This is implemented in the base class. It is nil when musicDeviceOrEffect is NO.
+
+ Subclassers should not override. When hosts schedule events via this block, they are
+ delivered to the Audio Unit via the list of AURenderEvents delivered to
+ internalRenderBlock.
+
+ All events sent via this block will be delivered to the internalRenderBlock in the MIDI protocol returned by
+ the AudioUnitMIDIProtocol property. For example, if this block is called with MIDI-1.0 events but
+ AudioUnitMIDIProtocol returns kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0.
+ If AudioUnitMIDIProtocol is not set, events will be delivered as legacy MIDI.
+
+ Note: This block should be preferred over scheduleMIDIEventBlock going forward.
+
+ This bridged to the v2 API MusicDeviceMIDIEventList.
+*/
+@property (NS_NONATOMIC_IOSONLY, readonly, nullable) AUMIDIEventListBlock scheduleMIDIEventListBlock API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
+
/*! @property MIDIOutputNames
@brief Count, and names of, a plug-in's MIDI outputs.
@discussion
A plug-in may override this method to inform hosts about its MIDI outputs. The size of the
- array is the number of outputs the audio unit supports. Each item in the array is the name
+ array is the number of outputs the Audio Unit supports. Each item in the array is the name
of the MIDI output at that index.
This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallbackInfo.
*/
@property (NS_NONATOMIC_IOSONLY, readonly, copy) NSArray<NSString *> *MIDIOutputNames API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0), tvos(11.0));
+/*! @property MIDIEventListOutputNames
+ @brief The names of a plug-in's MIDI event list outputs.
+ @discussion
+ A plug-in may override this method to inform hosts about its MIDIEventList outputs. The size of the
+ array is the number of outputs the Audio Unit supports. Each item in the array is the name
+ of the MIDIEventList output at that index.
+
+ This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputEventListCallbackInfo.
+*/
+@property (NS_NONATOMIC_IOSONLY, readonly, copy) NSArray<NSString *> *MIDIEventListOutputNames API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
+
/*! @property providesUserInterface
@brief Specifies whether an audio unit provides UI (normally in the form of a view controller).
@discussion
@@ -631,21 +675,71 @@
// These properties and methods are generally optional.
/*! @property MIDIOutputEventBlock
- @brief Block used by the host to access the MIDI output generated by an audio unit.
+ @brief Block used by the host to access the MIDI output generated by an Audio Unit.
@discussion
The host can set this block and the plug-in can call it in its renderBlock to provide to the
host the MIDI data associated with the current render cycle.
+
+ All events sent via this block will be delivered to the host in the MIDI protocol returned by
+ the hostMIDIProtocol property. For example, if hostMIDIProtocol is set to
+ kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0. If hostMIDIProtocol
+ is not set, events will be delivered as legacy MIDI.
- This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallback.
-*/
+ Note: AUMIDIEventListBlock should be preferred over this block going forward.
+
+ This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallback.
+ */
@property (NS_NONATOMIC_IOSONLY, copy, nullable) AUMIDIOutputEventBlock MIDIOutputEventBlock API_AVAILABLE(macos(10.13), ios(11.0), watchos(4.0), tvos(11.0));
+/*! @property MIDIOutputEventListBlock
+ @brief Block used by the host to access the MIDIEventList output generated by an Audio Unit.
+ @discussion
+ The host can set this block and the plug-in can call it in its renderBlock to provide to the
+ host the MIDIEventList data associated with the current render cycle.
+
+ All events sent via this block will be delivered to the host in the MIDI protocol returned by
+ the hostMIDIProtocol property. For example, if hostMIDIProtocol is set to
+ kMIDIProtocol_2_0, incoming events will be translated to MIDI 2.0. If hostMIDIProtocol
+ is not set, events will be delivered as legacy MIDI.
+
+ Note: This block should be preferred over MIDIOutputEventBlock going forward.
+
+ Host should setup in the following order:
+ - Set hostMIDIProtocol
+ - Set MIDIOutputEventBlock
+ - Call allocateRenderResourcesAndReturnError
+
+ This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputEventListCallback.
+*/
+@property (NS_NONATOMIC_IOSONLY, copy, nullable) AUMIDIEventListBlock MIDIOutputEventListBlock API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
+
+/*! @property AudioUnitMIDIProtocol
+ @brief The MIDI protocol used by the Audio Unit for receiving MIDIEventList data.
+ @discussion
+ All translatable messages will be converted (if necessary) to this protocol prior to delivery
+ to the Audio Unit.
+
+ This is bridged to the v2 API property kAudioUnitProperty_AudioUnitMIDIProtocol.
+*/
+@property (NS_NONATOMIC_IOSONLY, readonly) MIDIProtocolID AudioUnitMIDIProtocol API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
+
+/*! @property hostMIDIProtocol
+ @brief The MIDI protocol to be used by the host for receiving MIDIEventList data.
+ @discussion
+ Hosts should set this property to the protocol they wish to receive MIDIEventList data
+ from the Audio Unit. This should be set prior to initialization, all translatable messages
+ will be converted (if necessary) to this property's protocol prior to delivery to the host.
+
+ This is bridged to the v2 API property kAudioUnitProperty_HostMIDIProtocol.
+*/
+@property (NS_NONATOMIC_IOSONLY) MIDIProtocolID hostMIDIProtocol API_AVAILABLE(macos(12.0), ios(15.0), tvos(15.0)) API_UNAVAILABLE(watchos);
+
/*! @property fullState
- @brief A persistable snapshot of the audio unit's properties and parameters, suitable for
+ @brief A persistable snapshot of the Audio Unit's properties and parameters, suitable for
saving as a user preset.
@discussion
- Hosts may use this property to save and restore the state of an audio unit being used in a
- user preset or document. The audio unit should not persist transitory properties such as
+ Hosts may use this property to save and restore the state of an Audio Unit being used in a
+ user preset or document. The Audio Unit should not persist transitory properties such as
stream formats, but should save and restore all parameters and custom properties.
The base class implementation of this property saves the values of all parameters
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h 2021-03-16 13:57:39.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h 2021-06-02 05:34:10.000000000 -0400
@@ -116,7 +116,8 @@
AURenderEventParameter = 1,
AURenderEventParameterRamp = 2,
AURenderEventMIDI = 8,
- AURenderEventMIDISysEx = 9
+ AURenderEventMIDISysEx = 9,
+ AURenderEventMIDIEventList = 10
};
#pragma pack(4)
@@ -156,6 +157,16 @@
uint8_t data[3]; //!< The bytes of the MIDI event. Running status will not be used.
} AUMIDIEvent;
+/// Describes a single scheduled MIDIEventList.
+typedef struct AUMIDIEventList {
+ union AURenderEvent *__nullable next; //!< The next event in a linked list of events.
+ AUEventSampleTime eventSampleTime; //!< The sample time at which the event is scheduled to occur.
+ AURenderEventType eventType; //!< AURenderEventMIDI or AURenderEventMIDISysEx.
+ uint8_t reserved; //!< Must be 0.
+ MIDIEventList eventList; //!< A structure containing UMP packets.
+} AUMIDIEventList;
+
+
/*! @brief A union of the various specific render event types.
@discussion
Determine which variant to use via head.eventType. AURenderEventParameter and
@@ -166,6 +177,7 @@
AURenderEventHeader head;
AUParameterEvent parameter;
AUMIDIEvent MIDI;
+ AUMIDIEventList MIDIEventsList;
} AURenderEvent;
#pragma pack()
@@ -229,7 +241,9 @@
If the plug-in produces more MIDI output data than the default size of the allocated buffer,
then the plug-in can provide this property to increase the size of this buffer.
- The value represents the number of 3-byte MIDI 1.0 messages that fit into the buffer.
+ The value represents the number of 3-byte Legacy MIDI messages that fit into the buffer or
+ a single MIDIEventList containing 1 MIDIEventPacket of 2 words when using MIDI 2.0 (MIDIEventList based API's).
+
This property is set to the default value by the framework.
In case of kAudioUnitErr_MIDIOutputBufferFull errors caused by producing too much MIDI
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h 2021-03-16 13:55:24.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h 2021-06-02 12:42:53.000000000 -0400
@@ -365,8 +365,8 @@
merges the two inputs to the single output
@constant kAudioUnitSubType_NewTimePitch
- An audio unit that provides good quality time stretching and pitch shifting
- while still being very fast.
+ An audio unit that provides good quality time stretching and pitch shifting.
+ It is computationally less expensive than kAudioUnitSubType_TimePitch.
@constant kAudioUnitSubType_AUiPodTimeOther
An audio unit that provides time domain time stretching.
@@ -384,20 +384,19 @@
kAudioUnitSubType_RoundTripAAC = 'raac',
};
-#if !TARGET_OS_IPHONE
+#if !TARGET_OS_WATCH
/*!
- @enum Apple converter audio unit sub types (macOS only)
+ @enum Apple converter audio unit sub types (macOS and iOS only)
@constant kAudioUnitSubType_TimePitch
- An audio unit that can be used to have independent control of both playback
- rate and pitch. It provides a generic view, so can be used in both a UI and
- programmatic context. It also comes in an Offline version so can be used to
- process audio files.
+ An audio unit that provides high quality time stretching and pitch shifting.
*/
CF_ENUM(UInt32) {
kAudioUnitSubType_TimePitch = 'tmpt'
};
-#elif !TARGET_OS_MACCATALYST
+#endif //!TARGET_OS_WATCH
+
+#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST
/*!
@enum Apple converter audio unit sub types (iOS only)
@constant kAudioUnitSubType_AUiPodTime
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h 2021-03-16 13:57:40.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h 2021-06-02 05:34:11.000000000 -0400
@@ -712,7 +712,9 @@
converter's output format. On exit, the number of packets of converted
data that were written to outOutputData.
@param outOutputData
- The converted output data is written to this buffer.
+ The converted output data is written to this buffer. On entry, the buffers'
+ mDataByteSize fields (which must all be the same) reflect buffer capacity.
+ On exit, mDataByteSize is set to the number of bytes written.
@param outPacketDescription
If non-null, and the converter's output uses packet descriptions, then
packet descriptions are written to this array. It must point to a memory
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h 2021-03-16 09:52:43.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioHardwareService.h 2021-06-02 11:05:14.000000000 -0400
@@ -46,16 +46,16 @@
clients can be informed when the service has been reset for some reason.
When a reset happens, any state the client has with AHS, such as cached data
or added listeners, must be re-established by the client.
- @constant kAudioHardwareServiceDeviceProperty_VirtualMasterVolume
+ @constant kAudioHardwareServiceDeviceProperty_VirtualMainVolume
A Float32 that represents the value of the volume control. The range is
between 0.0 and 1.0 (inclusive). This actual volume controls this property
manipulates depends on what the device provides. If the device has a true
- master volume control, this property directly controls that. If the device
+ main volume control, this property directly controls that. If the device
has individual channel volume controls, this property will apply to those
identified by the device's preferred multi-channel layout (or preferred
stereo pair if the device is stereo only). Note that this control maintains
the relative balance between all the channels it affects.
- @constant kAudioHardwareServiceDeviceProperty_VirtualMasterBalance
+ @constant kAudioHardwareServiceDeviceProperty_VirtualMainBalance
A Float32 that represents the value of the stereo balance control. The range
is from 0.0 (all power to the left) to 1.0 (all power to the right) with
the value of 0.5 signifying that the channels have equal power. This control
@@ -66,8 +66,12 @@
CF_ENUM(AudioObjectPropertySelector)
{
kAudioHardwareServiceProperty_ServiceRestarted = 'srst',
- kAudioHardwareServiceDeviceProperty_VirtualMasterVolume = 'vmvc',
- kAudioHardwareServiceDeviceProperty_VirtualMasterBalance = 'vmbc'
+
+ kAudioHardwareServiceDeviceProperty_VirtualMainVolume = 'vmvc',
+ kAudioHardwareServiceDeviceProperty_VirtualMasterVolume API_DEPRECATED_WITH_REPLACEMENT("kAudioHardwareServiceDeviceProperty_VirtualMainVolume", macos(10.5, 10.5)) API_UNAVAILABLE(ios, watchos, tvos) = kAudioHardwareServiceDeviceProperty_VirtualMainVolume,
+
+ kAudioHardwareServiceDeviceProperty_VirtualMainBalance = 'vmbc',
+ kAudioHardwareServiceDeviceProperty_VirtualMasterBalance API_DEPRECATED_WITH_REPLACEMENT("kAudioHardwareServiceDeviceProperty_VirtualMainBalance", macos(10.5, 10.5)) API_UNAVAILABLE(ios, watchos, tvos) = kAudioHardwareServiceDeviceProperty_VirtualMainBalance,
};
//==================================================================================================
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h 2021-03-16 08:46:12.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h 2021-06-02 05:36:45.000000000 -0400
@@ -330,10 +330,8 @@
// Parameters for the AUTimePitch, AUTimePitch (offline), AUPitch units
CF_ENUM(AudioUnitParameterID) {
kTimePitchParam_Rate = 0,
-#if !TARGET_OS_IPHONE
kTimePitchParam_Pitch = 1,
kTimePitchParam_EffectBlend = 2 // only for the AUPitch unit
-#endif
};
// Parameters for AUNewTimePitch
@@ -475,7 +473,9 @@
kDynamicsProcessorParam_ReleaseTime = 5,
// Global, dB, -40->40, 0
- kDynamicsProcessorParam_MasterGain = 6,
+ kDynamicsProcessorParam_OverallGain = 6,
+
+ kDynamicsProcessorParam_MasterGain API_DEPRECATED_WITH_REPLACEMENT("kDynamicsProcessorParam_OverallGain", ios(2.0, 2.0), watchos(2.0, 2.0), tvos(9.0, 9.0) ,macos(10.5,10.5)) = kDynamicsProcessorParam_OverallGain,
// Global, dB, read-only parameter
kDynamicsProcessorParam_CompressionAmount = 1000,
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h 2021-03-16 08:46:12.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h 2021-06-02 11:05:13.000000000 -0400
@@ -607,12 +607,12 @@
MIDIEndpointRef in CoreMIDI.
The host can retrieve an array of CFStringRefs published by the audio unit, where :
- - the size of the array is the number of MIDI Outputs the audio unit supports
+ - the size of the array is the number of MIDI Outputs the Audio Unit supports
- each item in the array is the name for that output at that index
- The host should release the array when it is finished with it.
+ The host owns this array and its elements and should release them when it is finished.
- Once the host has determined that the audio unit supports this feature, it can then provide a
+ Once the host has determined that the Audio Unit supports this feature, it can then provide a
callback, through which the audio unit can send the MIDI data.
See the documentation for the kAudioUnitProperty_MIDIOutputCallback property.
@@ -776,13 +776,102 @@
interact with the AudioUnit through this block; it is for the exclusive use
of the OS.
+ @constant kAudioUnitProperty_LastRenderSampleTime
+ Scope: Global
+ Value Type: Float64
+ Access: read-only
+
+ The absolute sample frame time of the most recent render timestamp.
+
@constant kAudioUnitProperty_LoadedOutOfProcess
Scope: Global
Value Type: UInt32
Access: read-only
- Indicates whether an audio unit is loaded out-of-process, which might happen
+ Indicates whether an Audio Unit is loaded out-of-process, which might happen
at the request of the host or when loading in-process is not possible.
+
+ @constant kAudioUnitProperty_MIDIOutputEventListCallbackInfo
+ Scope: Global
+ Value Type: CFArrayRef
+ Access: read
+
+ Used to determine how many MIDIEventList output streams the audio unit can generate (and the name for
+ each of these outputs). Each MIDIEventList output is a complete MIDI data stream, such as embodied by a
+ MIDIEndpointRef in CoreMIDI.
+
+ The host can retrieve an array of CFStringRefs published by the Audio Unit, where :
+ - the size of the array is the number of MIDIEventList Outputs the audio unit supports
+ - each item in the array is the name for that output at that index
+
+ The host owns this array and its elements and should release them when it is finished.
+
+ Once the host has determined that the Audio Unit supports this feature, it can then provide a
+ callback, through which the audio unit can send the MIDIEventList data.
+ See the documentation for the kAudioUnitProperty_MIDIOutputEventListCallback property.
+
+ Note: The CFArrayRef's returned from this property must return a +1 reference.
+
+ Compare to property kAudioUnitProperty_MIDIOutputCallbackInfo.
+
+ @constant kAudioUnitProperty_MIDIOutputEventListCallback
+ Scope: Global
+ Value Type: block: void (^)(AUEventSampleTime, const struct MIDIEventList *)
+ Access: write
+
+ The host sets this property on the Audio Unit with the callback set appropriately.
+
+ Operational Parameters:
+ In the render call, just as is the expected usage of the AUHostCallbacks, the audio unit can
+ call the provided callback to provide MIDIEventList data to the host that it will associate with the
+ current AudioUnitRender.
+
+ The Audio Unit in the callback provides:
+ - the AUEventSampleTime that was provided to the audio unit for this particular call of
+ AudioUnitRender
+ - a MIDIEventList containing MIDI data. The time stamp values contained within the
+ MIDIEventPacket in this list are **sample offsets*** from the AudioTimeStamp provided.
+ This allows MIDI data to be time-stamped with a sample offset that is directly associated
+ with the audio data it is generating in the current call to the AudioUnitRender function
+
+ Host should setup in the following order:
+ - Set host desired MIDIProtocolID
+ - Set kAudioUnitProperty_MIDIOutputEventListCallback
+ - Initialize the Audio Unit
+
+ Note: kAudioUnitProperty_HostMIDIProtocol can not be changed while the Audio Unit is initialized.
+
+ There is no implied or expected association between the number (or position) of an audio unit's
+ audio or MIDI outputs.
+
+ Compare to property kAudioUnitProperty_MIDIOutputEventCallback.
+
+ @constant kAudioUnitProperty_AudioUnitMIDIProtocol
+ Scope: Global
+ Value Type: SInt32
+ Access: read
+
+ A signed 32-bit integer representing the audio unit's MIDI protocol. This should be one of the
+ values in the MIDIProtocolID enum (see <CoreMIDI/MIDIServices.h>)..
+
+ The framework will convert all incoming MIDI data to the protocol set in this property, the host can query
+ this property to detect the audio unit's current MIDI protocol.
+
+ Note: This property should not be changed after the audio has been initialized.
+
+ @constant kAudioUnitProperty_HostMIDIProtocol
+ Scope: Global
+ Value Type: SInt32
+ Access: write
+
+ A signed 32-bit integer representing the hosts MIDI protocol. This should be set to one of the values
+ in the MIDIProtocolID enum (see <CoreMIDI/MIDIServices.h>).
+
+ Hosts should set this property to the protocol that MIDI data is desired to be delivered in. The framework will
+ convert all MIDI data sent to the host to the protocol value set in this property, an audio unit can query
+ this property to detect the hosts MIDI protocol.
+
+ Note: This property should not be changed after the audio unit has been initialized.
*/
CF_ENUM(AudioUnitPropertyID)
{
@@ -799,7 +888,7 @@
kAudioUnitProperty_SupportedNumChannels = 13,
kAudioUnitProperty_MaximumFramesPerSlice = 14,
kAudioUnitProperty_ParameterValueStrings = 16,
- kAudioUnitProperty_AudioChannelLayout = 19,
+ kAudioUnitProperty_AudioChannelLayout = 19,
kAudioUnitProperty_TailTime = 20,
kAudioUnitProperty_BypassEffect = 21,
kAudioUnitProperty_LastRenderError = 22,
@@ -817,7 +906,7 @@
kAudioUnitProperty_FrequencyResponse = 52,
kAudioUnitProperty_ParameterHistoryInfo = 53,
kAudioUnitProperty_NickName = 54,
- kAudioUnitProperty_OfflineRender = 37,
+ kAudioUnitProperty_OfflineRender = 37,
kAudioUnitProperty_ParameterIDName = 34,
kAudioUnitProperty_ParameterStringFromValue = 33,
kAudioUnitProperty_ParameterClumpName = 35,
@@ -831,6 +920,7 @@
kAudioUnitProperty_RenderContextObserver
__SWIFT_UNAVAILABLE_MSG("Swift is not supported for use with audio realtime threads")
= 60,
+ kAudioUnitProperty_LastRenderSampleTime = 61,
kAudioUnitProperty_LoadedOutOfProcess = 62,
#if !TARGET_OS_IPHONE
kAudioUnitProperty_FastDispatch = 5,
@@ -843,6 +933,12 @@
kAudioUnitProperty_MIDIOutputCallbackInfo = 47,
kAudioUnitProperty_MIDIOutputCallback = 48,
+
+ kAudioUnitProperty_MIDIOutputEventListCallbackInfo = 63,
+ kAudioUnitProperty_MIDIOutputEventListCallback = 64,
+
+ kAudioUnitProperty_AudioUnitMIDIProtocol = 65,
+ kAudioUnitProperty_HostMIDIProtocol = 66
};
#if AU_SUPPORT_INTERAPP_AUDIO
@@ -1288,6 +1384,31 @@
typedef void (^AURenderContextObserver)(const AudioUnitRenderContext *context)
__SWIFT_UNAVAILABLE_MSG("Swift is not supported for use with audio realtime threads");
+/*!
+ @struct MIDIEventList
+ @abstract Forward declaration of MIDIEventList found in <CoreMIDI/MIDIServices.h>
+*/
+typedef struct MIDIEventList MIDIEventList;
+
+/*! @typedef AUEventSampleTime
+ @brief Expresses time as a sample count.
+ @discussion
+ Sample times are normally positive, but hosts can propagate HAL sample times through audio
+ units, and HAL sample times can be small negative numbers.
+*/
+typedef int64_t AUEventSampleTime;
+
+/*!
+ @typedef AUMIDIEventListBlock
+ @abstract A block used by an audio unit to send or receive MIDIEventList data.
+ @param eventSampleTime
+ The time in samples at which the MIDI events are to occur.
+ @param eventList
+ One full MIDI, partial MIDI SysEx, or a full SysEx UMP message.
+*/
+typedef OSStatus (^ AUMIDIEventListBlock)(AUEventSampleTime eventSampleTime,
+ const struct MIDIEventList * eventList);
+
//=====================================================================================================================
#pragma mark - Parameter Definitions
@@ -1341,7 +1462,7 @@
octaves in relative pitch where a value of 1 is equal to 1200 cents
@constant kAudioUnitParameterUnit_BPM
beats per minute, ie tempo
- @constant kAudioUnitParameterUnit_Beats
+ @constant kAudioUnitParameterUnit_Beats
time relative to tempo, i.e., 1.0 at 120 BPM would equal 1/2 a second
@constant kAudioUnitParameterUnit_Milliseconds
parameter is expressed in milliseconds
@@ -1349,6 +1470,8 @@
for compression, expansion ratio, etc.
@constant kAudioUnitParameterUnit_CustomUnit
this is the parameter unit type for parameters that present a custom unit name
+ @constant kAudioUnitParameterUnit_MIDI2Controller
+ a generic MIDI 2.0 controller value with 32-bit range
*/
typedef CF_ENUM(UInt32, AudioUnitParameterUnit)
{
@@ -1378,7 +1501,8 @@
kAudioUnitParameterUnit_Beats = 23,
kAudioUnitParameterUnit_Milliseconds = 24,
kAudioUnitParameterUnit_Ratio = 25,
- kAudioUnitParameterUnit_CustomUnit = 26
+ kAudioUnitParameterUnit_CustomUnit = 26,
+ kAudioUnitParameterUnit_MIDI2Controller = 27
};
/*!
@@ -2461,10 +2585,37 @@
CF_ENUM(AudioUnitPropertyID) {
kAUVoiceIOProperty_BypassVoiceProcessing = 2100,
kAUVoiceIOProperty_VoiceProcessingEnableAGC = 2101,
- kAUVoiceIOProperty_MuteOutput = 2104
-
+ kAUVoiceIOProperty_MuteOutput = 2104
};
+/*!
+ @enum Speech activity event described by AUVoiceIO
+*/
+typedef CF_ENUM(UInt32, AUVoiceIOSpeechActivityEvent)
+{
+ kAUVoiceIOSpeechActivityHasStarted = 0,
+ kAUVoiceIOSpeechActivityHasEnded = 1
+};
+
+/*!
+ @typedef AUVoiceIOMutedSpeechActivityEventListener
+ @abstract Block called to receive speech activity event while the client is muted.
+*/
+typedef void (^AUVoiceIOMutedSpeechActivityEventListener)(AUVoiceIOSpeechActivityEvent event);
+
+/*!
+
+ @constant kAUVoiceIOProperty_MutedSpeechActivityEventListener
+ @discussion Scope: Global
+ Value Type: AUVoiceIOMutedSpeechActivityEventListener
+ Access: write only
+ Register a listener to be notified when speech activity event occurs while the client is muted.
+ Continuous presence of or lack of speech activity during mute will not cause redundant notification.
+ In order to use this API, it's expected to implement the mute via the kAUVoiceIOProperty_MuteOutput.
+ */
+CF_ENUM(AudioUnitPropertyID) {
+ kAUVoiceIOProperty_MutedSpeechActivityEventListener = 2106
+} API_AVAILABLE(ios(15.0)) API_UNAVAILABLE(macos, watchos, tvos);
#pragma mark - AUVoiceProcessing unit deprecated properties
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h 2021-03-16 08:40:30.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CAFFile.h 2021-06-01 20:55:09.000000000 -0400
@@ -11,9 +11,12 @@
#include <CoreAudioTypes/CoreAudioTypes.h>
-
+#if TARGET_OS_WIN32
+#define ATTRIBUTE_PACKED
+#pragma pack(push, 1)
+#else
#define ATTRIBUTE_PACKED __attribute__((__packed__))
-
+#endif
// In a CAF File all of these types' byte order is big endian.
// When reading or writing these values the program will need to flip byte order to native endian
@@ -354,7 +357,9 @@
} ATTRIBUTE_PACKED;
typedef struct CAFUMIDChunk CAFUMIDChunk;
-
+#if TARGET_OS_WIN32
+#pragma pack(pop)
+#endif
////////////////////////////////////////////////////////////////////////////////////////////////
#endif // AudioToolbox_CAFFile_h
diff -ruN /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h
--- /Applications/Xcode_12.5.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h 2021-03-16 13:55:24.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h 2021-06-02 11:05:14.000000000 -0400
@@ -171,6 +171,12 @@
*/
typedef AudioComponentInstance MusicDeviceComponent;
+/*!
+ @struct MIDIEventList
+ @abstract Forward declaration of MIDIEventList found in <CoreMIDI/MIDIServices.h>
+*/
+typedef struct MIDIEventList MIDIEventList;
+
//=====================================================================================================================
#pragma mark -
#pragma mark Functions
@@ -181,7 +187,8 @@
@discussion This is the API used to send MIDI channel messages to an audio unit. The status and data parameters
are used exactly as described by the MIDI specification, including the combination of channel and
- command in the status byte.
+ command in the status byte. All events sent via MusicDeviceMIDIEventList will be delivered to the
+ audio unit in the MIDI protocol returned by kAudioUnitProperty_AudioUnitMIDIProtocol.
@param inUnit
The audio unit
@@ -229,6 +236,38 @@
const UInt8 * inData,
UInt32 inLength) API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+/*!
+ @function MusicDeviceMIDIEventList
+ @abstract Used to send MIDI messages to an audio unit
+
+ @discussion This API is suitable for sending Universal MIDI Packet (UMP) MIDI messages to an audio unit. The message must be
+ a full non-SysEx event, a partial SysEx event, or a complete SysEx event. Running status is not allowed. MIDI 1.0 in
+ universal packets (MIDI-1UP) and MIDI 2.0 messages are allowed. All events sent via MusicDeviceMIDIEventList will
+ be delivered to the audio unit in the MIDI protocol returned by kAudioUnitProperty_AudioUnitMIDIProtocol.
+
+ This is bridged to the v2 API property kAudioUnitProperty_MIDIOutputCallback.
+
+ @param inUnit
+ The audio unit
+ @param inOffsetSampleFrame
+ If you are scheduling the MIDIEventList from the audio unit's render thread, then you can supply a
+ sample offset that the audio unit may apply within its next audio unit render.
+ This allows you to schedule to the sample, the time when a MIDI command is applied and is particularly
+ important when starting new notes. If you are not scheduling in the audio unit's render thread,
+ then you should set this value to 0
+
+ inOffsetSampleFrame should serve as the base offset for each packet's timestamp i.e.
+ sampleOffset = inOffsetSampleFrame + evtList.packet[0].timeStamp
+
+ @param evtList
+ The MIDIEventList to be sent
+
+ @result noErr, or an audio unit error code
+*/
+extern OSStatus
+MusicDeviceMIDIEventList( MusicDeviceComponent inUnit,
+ UInt32 inOffsetSampleFrame,
+ const struct MIDIEventList * evtList) API_AVAILABLE(macos(12), ios(15.0), tvos(15.0));
/*!
@function MusicDeviceStartNote
@@ -313,6 +352,7 @@
@constant kMusicDeviceReleaseInstrumentSelect
@constant kMusicDeviceStartNoteSelect
@constant kMusicDeviceStopNoteSelect
+ @constant kMusicDeviceMIDIEventListSelect
*/
enum {
kMusicDeviceRange = 0x0100,
@@ -321,7 +361,8 @@
kMusicDevicePrepareInstrumentSelect = 0x0103,
kMusicDeviceReleaseInstrumentSelect = 0x0104,
kMusicDeviceStartNoteSelect = 0x0105,
- kMusicDeviceStopNoteSelect = 0x0106
+ kMusicDeviceStopNoteSelect = 0x0106,
+ kMusicDeviceMIDIEventListSelect = 0x0107
};
//=====================================================================================================================
- README
- xcode13.0 Binding Status
- xcode13.1 Binding Status
- xcode13.2 Binding Status
- xcode13.3 Binding Status
- xcode13.4 Binding Status
- xcode14.0 Binding Status
- xcode14.1 Binding Status
- xcode14.2 Binding Status
- xcode14.3 Binding Status
- xcode15.0 Binding Status
- xcode15.1 Binding Status
- xcode15.3 Binding Status
- xcode15.4 Binding Status
- xcode16.0 Binding Status
- xcode16.1 Binding Status
- xcode16.2 Binding Status