Skip to content

AudioToolbox iOS xcode26.0 b1

Rolf Bjarne Kvinge edited this page Jun 24, 2025 · 4 revisions

#AudioToolbox.framework

Rolf

diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h	2025-04-19 01:38:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnit.h	2025-05-30 01:21:12
@@ -105,7 +105,9 @@
 		An AUAudioUnitStatus result code. If an error is returned, the input data should be assumed 
 		to be invalid.
 */
-typedef AUAudioUnitStatus (^AURenderPullInputBlock)(AudioUnitRenderActionFlags *actionFlags, const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber, AudioBufferList *inputData);
+typedef AUAudioUnitStatus (^AURenderPullInputBlock)(AudioUnitRenderActionFlags *actionFlags,
+	const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber,
+	AudioBufferList *inputData) CA_REALTIME_API;
 
 /*!	@typedef	AURenderBlock
 	@brief		Block to render the audio unit.
@@ -138,7 +140,9 @@
 		An `AUAudioUnitStatus` result code. If an error is returned, the output data should be assumed
 		to be invalid.
 */
-typedef AUAudioUnitStatus (^AURenderBlock)(AudioUnitRenderActionFlags *actionFlags, const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger outputBusNumber, AudioBufferList *outputData, AURenderPullInputBlock __nullable pullInputBlock);
+typedef AUAudioUnitStatus (^AURenderBlock)(AudioUnitRenderActionFlags *actionFlags,
+	const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger outputBusNumber,
+	AudioBufferList *outputData, AURenderPullInputBlock __nullable pullInputBlock) CA_REALTIME_API;
 
 /*!	@typedef	AURenderObserver
 	@brief		Block called when an audio unit renders.
@@ -149,7 +153,9 @@
 		
 		The parameters are identical to those of AURenderBlock.
 */
-typedef void (^AURenderObserver)(AudioUnitRenderActionFlags actionFlags, const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger outputBusNumber);
+typedef void (^AURenderObserver)(AudioUnitRenderActionFlags actionFlags,
+	const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger outputBusNumber)
+	CA_REALTIME_API;
 
 /*!	@typedef	AUScheduleParameterBlock
 	@brief		Block to schedule parameter changes.
@@ -172,7 +178,9 @@
 		The parameter's new value if the ramp duration is 0; otherwise, the value at the end
 		of the scheduled ramp.
 */
-typedef void (^AUScheduleParameterBlock)(AUEventSampleTime eventSampleTime, AUAudioFrameCount rampDurationSampleFrames, AUParameterAddress parameterAddress, AUValue value);
+typedef void (^AUScheduleParameterBlock)(AUEventSampleTime eventSampleTime,
+	AUAudioFrameCount rampDurationSampleFrames, AUParameterAddress parameterAddress, AUValue value)
+	CA_REALTIME_API;
 
 /*!	@typedef	AUScheduleMIDIEventBlock
 	@brief		Block to schedule MIDI events.
@@ -189,7 +197,8 @@
 		One or more valid MIDI 1.0 events, except sysex which must always be sent as the only event
 		in the chunk. Also, running status is not allowed.
 */
-typedef void (^AUScheduleMIDIEventBlock)(AUEventSampleTime eventSampleTime, uint8_t cable, NSInteger length, const uint8_t *midiBytes);
+typedef void (^AUScheduleMIDIEventBlock)(AUEventSampleTime eventSampleTime, uint8_t cable,
+	NSInteger length, const uint8_t *midiBytes) CA_REALTIME_API;
 
 /*!	@typedef	AUMIDIOutputEventBlock
 	@brief		Block to provide MIDI output events to the host.
@@ -203,7 +212,8 @@
 		One or more valid MIDI 1.0 events, except sysex which must always be sent as the only event
 		in the chunk.
 */
-typedef OSStatus (^AUMIDIOutputEventBlock)(AUEventSampleTime eventSampleTime, uint8_t cable, NSInteger length, const uint8_t *midiBytes);
+typedef OSStatus (^AUMIDIOutputEventBlock)(AUEventSampleTime eventSampleTime, uint8_t cable,
+	NSInteger length, const uint8_t *midiBytes) CA_REALTIME_API;
 
 /*!	@typedef	AUHostMusicalContextBlock
 	@brief		Block by which hosts provide musical tempo, time signature, and beat position.
@@ -230,7 +240,10 @@
 		Any of the provided parameters may be null to indicate that the audio unit is not interested
 		in that particular piece of information.
 */
-typedef BOOL (^AUHostMusicalContextBlock)(double * __nullable currentTempo, double * __nullable timeSignatureNumerator, NSInteger * __nullable timeSignatureDenominator, double * __nullable currentBeatPosition, NSInteger * __nullable sampleOffsetToNextBeat, double * __nullable currentMeasureDownbeatPosition);
+typedef BOOL (^AUHostMusicalContextBlock)(double * __nullable currentTempo,
+	double * __nullable timeSignatureNumerator, NSInteger * __nullable timeSignatureDenominator,
+	double * __nullable currentBeatPosition, NSInteger * __nullable sampleOffsetToNextBeat,
+	double * __nullable currentMeasureDownbeatPosition) CA_REALTIME_API;
 
 /*!	@typedef	AUMIDICIProfileChangedBlock
 	@brief		Block by which hosts are informed of an audio unit having enabled or disabled a
@@ -286,7 +299,9 @@
 		Any of the provided parameters may be null to indicate that the audio unit is not interested
 		in that particular piece of information.
 */
-typedef BOOL (^AUHostTransportStateBlock)(AUHostTransportStateFlags * __nullable transportStateFlags, double * __nullable currentSamplePosition, double * __nullable cycleStartBeatPosition, double * __nullable cycleEndBeatPosition);
+typedef BOOL (^AUHostTransportStateBlock)(AUHostTransportStateFlags * __nullable transportStateFlags,
+	double * __nullable currentSamplePosition, double * __nullable cycleStartBeatPosition,
+	double * __nullable cycleEndBeatPosition) CA_REALTIME_API;
 
 // =================================================================================================
 
@@ -1207,7 +1222,9 @@
 				The AUAudioUnit is not provided since it is not safe to message an Objective C 
 				object in a real time context.
 */
-typedef void (^AUInputHandler)(AudioUnitRenderActionFlags *actionFlags, const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber);
+typedef void (^AUInputHandler)(AudioUnitRenderActionFlags *actionFlags,
+	const AudioTimeStamp *timestamp, AUAudioFrameCount frameCount, NSInteger inputBusNumber)
+	CA_REALTIME_API;
 
 /*!	@brief		Additional methods for audio units which can do input/output.
 	@discussion	These methods will fail if the audio unit is not an input/output audio unit.
@@ -1580,6 +1597,20 @@
 
 @end
 
+@class CASpatialAudioExperience;
+
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface AUAudioUnit (IntendedSpatialExperience)
+
+/// The AUAudioUnit's intended spatial experience.
+///
+/// Only useful for output AUAudioUnits - setting on a non-output AU is a
+/// no-op. The default value of CAAutomaticSpatialAudio means the output AUAudioUnit
+/// uses its AVAudioSession's spatial experience. See CASpatialAudioExperience for
+/// more details.
+@property (nonnull, copy) CASpatialAudioExperience *intendedSpatialExperience NS_REFINED_FOR_SWIFT;
+
+@end
 
 NS_ASSUME_NONNULL_END
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h	2025-04-19 05:14:29
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUAudioUnitImplementation.h	2025-05-24 01:42:37
@@ -200,7 +200,7 @@
 	NSInteger												outputBusNumber,
 	AudioBufferList *										outputData,
 	const AURenderEvent *__nullable 						realtimeEventListHead,
-	AURenderPullInputBlock __nullable __unsafe_unretained	pullInputBlock);
+	AURenderPullInputBlock __nullable __unsafe_unretained	pullInputBlock) CA_REALTIME_API;
 
 // =================================================================================================
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h	2025-04-19 04:03:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AUComponent.h	2025-05-24 01:40:36
@@ -463,6 +463,9 @@
  
 	@constant       kAudioUnitSubType_AUSoundIsolation
 					An audio unit that can be used to isolate a specified sound type
+ 
+	@constant       kAudioUnitSubType_AUAudioMix
+					An audio unit that supports AudioMix separate-and-remix functionality
 */
 CF_ENUM(UInt32) {
 	kAudioUnitSubType_PeakLimiter			= 'lmtr',
@@ -479,6 +482,7 @@
 	kAudioUnitSubType_NBandEQ				= 'nbeq',
     kAudioUnitSubType_Reverb2               = 'rvb2',
 	kAudioUnitSubType_AUSoundIsolation API_AVAILABLE(macos(13.0), ios(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos) = 'vois',
+	kAudioUnitSubType_AUAudioMix API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 'amix',
 };
 
 #if !TARGET_OS_IPHONE
@@ -1060,7 +1064,7 @@
 						const AudioTimeStamp *			inTimeStamp,
 						UInt32							inBusNumber,
 						UInt32							inNumberFrames,
-						AudioBufferList * __nullable	ioData);
+						AudioBufferList * __nullable	ioData) CA_REALTIME_API;
 
 /*!
 	@typedef		AudioUnitPropertyListenerProc
@@ -1108,7 +1112,7 @@
 (*AUInputSamplesInOutputCallback)(	void *						inRefCon,
 									const AudioTimeStamp *		inOutputTimeStamp,
 									Float64						inInputSample,
-									Float64						inNumberInputSamples);
+									Float64						inNumberInputSamples) CA_REALTIME_API;
 
 /*!
 	@constant kAudioComponentRegistrationsChangedNotification
@@ -1437,7 +1441,8 @@
 									AudioUnitParameterID		inID,
 									AudioUnitScope				inScope,
 									AudioUnitElement			inElement,
-									AudioUnitParameterValue *	outValue)			
+									AudioUnitParameterValue *	outValue)
+												CA_REALTIME_API
 												API_AVAILABLE(macos(10.0), ios(2.0), watchos(2.0), tvos(9.0));
 
 /*!
@@ -1469,7 +1474,8 @@
 									AudioUnitScope				inScope,
 									AudioUnitElement			inElement,
 									AudioUnitParameterValue		inValue,
-									UInt32						inBufferOffsetInFrames) 
+									UInt32						inBufferOffsetInFrames)
+												CA_REALTIME_API
 												API_AVAILABLE(macos(10.0), ios(2.0), watchos(2.0), tvos(9.0));
 
 /*!
@@ -1506,7 +1512,8 @@
 extern OSStatus
 AudioUnitScheduleParameters(		AudioUnit						inUnit,
 									const AudioUnitParameterEvent *	inParameterEvent,
-									UInt32							inNumParamEvents) 
+									UInt32							inNumParamEvents)
+												CA_REALTIME_API
 												API_AVAILABLE(macos(10.2), ios(2.0), watchos(2.0), tvos(9.0));
 
 /*!
@@ -1555,19 +1562,21 @@
 									const AudioTimeStamp *			inTimeStamp,
 									UInt32							inOutputBusNumber,
 									UInt32							inNumberFrames,
-									AudioBufferList *				ioData)			
+									AudioBufferList *				ioData)
+												CA_REALTIME_API
 												API_AVAILABLE(macos(10.2), ios(2.0), watchos(2.0), tvos(9.0));
 
 extern OSStatus
-AudioUnitProcess (					AudioUnit						inUnit, 
+AudioUnitProcess (					AudioUnit						inUnit,
 									AudioUnitRenderActionFlags * __nullable	ioActionFlags, 
 									const AudioTimeStamp *			inTimeStamp, 
 									UInt32							inNumberFrames, 
 									AudioBufferList *				ioData)
+												CA_REALTIME_API
 												API_AVAILABLE(macos(10.7), ios(6.0), watchos(2.0), tvos(9.0));
 
 extern OSStatus
-AudioUnitProcessMultiple(			AudioUnit						inUnit, 
+AudioUnitProcessMultiple(			AudioUnit						inUnit,
 									AudioUnitRenderActionFlags * __nullable ioActionFlags, 
 									const AudioTimeStamp *			inTimeStamp, 
 									UInt32							inNumberFrames,
@@ -1575,6 +1584,7 @@
 									const AudioBufferList * __nonnull * __nonnull inInputBufferLists,
 									UInt32							inNumberOutputBufferLists,
 									AudioBufferList * __nonnull * __nonnull ioOutputBufferLists)
+												CA_REALTIME_API
 												API_AVAILABLE(macos(10.7), ios(6.0), watchos(2.0), tvos(9.0));
 	
 /*!
@@ -1835,25 +1845,25 @@
 (*AudioUnitRemoveRenderNotifyProc) (void *self, AURenderCallback proc, void * __nullable userData);
 
 typedef OSStatus	
-(*AudioUnitScheduleParametersProc) (void *self, const AudioUnitParameterEvent *events, UInt32 numEvents);
+(*AudioUnitScheduleParametersProc) (void *self, const AudioUnitParameterEvent *events, UInt32 numEvents) CA_REALTIME_API;
 
 typedef OSStatus	
 (*AudioUnitResetProc) (void *self, AudioUnitScope		inScope, AudioUnitElement	inElement);
 
-typedef OSStatus	
-(*AudioUnitComplexRenderProc) (void *self, AudioUnitRenderActionFlags * __nullable ioActionFlags, const AudioTimeStamp *inTimeStamp, 
+typedef OSStatus
+(*AudioUnitComplexRenderProc) (void *self, AudioUnitRenderActionFlags * __nullable ioActionFlags, const AudioTimeStamp *inTimeStamp,
 									UInt32 inOutputBusNumber, UInt32 inNumberOfPackets, UInt32 *outNumberOfPackets, 
 									AudioStreamPacketDescription *outPacketDescriptions, AudioBufferList *ioData, 
-									void *outMetadata, UInt32 *outMetadataByteSize);
+									void *outMetadata, UInt32 *outMetadataByteSize) CA_REALTIME_API;
 
-typedef OSStatus	
-(*AudioUnitProcessProc) (void *self, AudioUnitRenderActionFlags * __nullable ioActionFlags, const AudioTimeStamp *inTimeStamp, 
-									UInt32 inNumberFrames, AudioBufferList *ioData);
+typedef OSStatus
+(*AudioUnitProcessProc) (void *self, AudioUnitRenderActionFlags * __nullable ioActionFlags, const AudioTimeStamp *inTimeStamp,
+									UInt32 inNumberFrames, AudioBufferList *ioData) CA_REALTIME_API;
 
-typedef OSStatus	
-(*AudioUnitProcessMultipleProc) (void *self, AudioUnitRenderActionFlags * __nullable ioActionFlags, const AudioTimeStamp *inTimeStamp, 
+typedef OSStatus
+(*AudioUnitProcessMultipleProc) (void *self, AudioUnitRenderActionFlags * __nullable ioActionFlags, const AudioTimeStamp *inTimeStamp,
 									UInt32 inNumberFrames, UInt32 inNumberInputBufferLists, const AudioBufferList * __nonnull * __nonnull inInputBufferLists,
-									UInt32 inNumberOutputBufferLists, AudioBufferList * __nonnull * __nonnull ioOutputBufferLists);
+									UInt32 inNumberOutputBufferLists, AudioBufferList * __nonnull * __nonnull ioOutputBufferLists) CA_REALTIME_API;
 
 
 /*!
@@ -1872,7 +1882,7 @@
 								AudioUnitParameterID		inID,
 								AudioUnitScope				inScope,
 								AudioUnitElement			inElement,
-								AudioUnitParameterValue *	outValue);
+								AudioUnitParameterValue *	outValue) CA_REALTIME_API;
 								
 /*!
 	@typedef		AudioUnitSetParameterProc
@@ -1891,7 +1901,7 @@
 								AudioUnitScope				inScope,
 								AudioUnitElement			inElement,
 								AudioUnitParameterValue		inValue,
-								UInt32						inBufferOffsetInFrames);
+								UInt32						inBufferOffsetInFrames) CA_REALTIME_API;
 								
 /*!
 	@typedef		AudioUnitRenderProc
@@ -1910,7 +1920,7 @@
 								const AudioTimeStamp *			inTimeStamp,
 								UInt32							inOutputBusNumber,
 								UInt32							inNumberFrames,
-								AudioBufferList *				ioData);
+								AudioBufferList *				ioData) CA_REALTIME_API;
 
 
 //================================================================================================
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioCodec.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioCodec.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioCodec.h	2025-04-19 05:14:29
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioCodec.h	2025-05-24 01:40:36
@@ -444,7 +444,7 @@
 						unreliable IP networks where the encoder needs to adapt immediately to network condition changes.
 						Escape property ID's start with a '^' symbol as the first char code. This bypasses the initilization check.
     @constant		kAudioCodecPropertyDynamicRangeControlMode
-						A UInt32 specifying the DRC mode. Supported modes are defined as enum with the
+						A UInt32 specifying the decoder DRC mode. Supported modes are defined as enum with the
 						prefix kDynamicRangeControlMode (see below). For certain legacy metadata this property controls which
 						dynamic range compression scheme is applied if the information is present in
 						the bitstream. The default is kDynamicRangeControlMode_None.
@@ -472,6 +472,18 @@
                         A Float32 specifying the program target loudness in LKFS for decoders. It has the same effect
                         as kAudioCodecPropertyProgramTargetLevel, but this property can also be set on an initialized decoder
                         object. It will be applied immediately, if supported.
+    @constant        kAudioCodecPropertyDynamicRangeControlConfiguration
+                        A UInt32 specifying the encoder DRC configuration. Configurations are defined as enum with the prefix
+                        kAudioCodecDynamicRangeControlConfiguration_. When supported by the encoder, this property controls which
+                        configuration is applied when a bitstream is generated. The default configuration for an APAC
+                        encoder is kAudioCodecDynamicRangeControlConfiguration_Capture, otherwise it is kAudioCodecDynamicRangeControlConfiguration_None.
+    @constant        kAudioCodecPropertyContentSource
+                        An SInt32 index to select a pre-defined content source type that describes the content type and how it was generated.
+                        This is an encoder property with read/write access, if supported.  Supported values are defined with a prefix kAudioCodecContentSource_.
+    @constant        kAudioCodecPropertyASPFrequency
+                        A UInt32 to set the frequency of Audio Sync Packets (ASP). The value must be larger than 2.
+                        A recommended value is 75 so that each 75th packet is an ASP.
+                        This is an encoder property with read/write access, if supported.
 */
 CF_ENUM(AudioCodecPropertyID)
 {
@@ -512,6 +524,9 @@
     kAudioCodecPropertyAdjustTargetLevelConstant                                = '^tlc',
     kAudioCodecPropertyProgramTargetLevel                                       = 'pptl',
     kAudioCodecPropertyAdjustTargetLevel                                        = '^ptl',
+    kAudioCodecPropertyDynamicRangeControlConfiguration                         = 'cdrc',
+    kAudioCodecPropertyContentSource                                            = 'csrc',
+    kAudioCodecPropertyASPFrequency                                             = 'aspf',
 };
 
 
@@ -685,6 +700,109 @@
     kDynamicRangeCompressionProfile_NoisyEnvironment        = 2,
     kDynamicRangeCompressionProfile_LimitedPlaybackRange    = 3,
     kDynamicRangeCompressionProfile_GeneralCompression      = 6
+};
+
+/*!
+    @enum            AudioCodecDynamicRangeControlConfiguration
+
+    @discussion     Constants to be used with kAudioCodecPropertyDynamicRangeControlConfiguration for encoding
+
+    @constant       kAudioCodecDynamicRangeControlConfiguration_None
+                        Dynamic range compression disabled
+    @constant       kAudioCodecDynamicRangeControlConfiguration_Music
+                        Dynamic range compression for music
+    @constant       kAudioCodecDynamicRangeControlConfiguration_Speech
+                        Dynamic range compression for speech
+    @constant       kAudioCodecDynamicRangeControlConfiguration_Movie
+                        Dynamic range compression for movie sound tracks
+    @constant       kAudioCodecDynamicRangeControlConfiguration_Capture
+                        Dynamic range compression for capture (live encoding)
+*/
+CF_ENUM(UInt32)
+{
+    kAudioCodecDynamicRangeControlConfiguration_None     = 0,
+    kAudioCodecDynamicRangeControlConfiguration_Music    = 1,
+    kAudioCodecDynamicRangeControlConfiguration_Speech   = 2,
+    kAudioCodecDynamicRangeControlConfiguration_Movie    = 3,
+    kAudioCodecDynamicRangeControlConfiguration_Capture  = 4
+};
+
+/*!
+    @enum           AudioCodecContentSource
+
+    @discussion     Constants to be used with kAudioCodecPropertyContentSource to indicate the content type.
+
+    @constant       kAudioCodecContentSource_Unspecified
+                        Unspecified content source
+    @constant       kAudioCodecContentSource_Reserved
+                        Reserved index
+    @constant       kAudioCodecContentSource_AppleCapture_Traditional
+                        Traditional Apple device capture
+    @constant       kAudioCodecContentSource_AppleCapture_Spatial
+                        Spatial Apple device capture
+    @constant       kAudioCodecContentSource_AppleCapture_Spatial_Enhanced
+                        Reserved for Apple use
+    @constant       kAudioCodecContentSource_AppleMusic_Traditional
+                        Traditional Apple music and music video content such as stereo and multichannel
+    @constant       kAudioCodecContentSource_AppleMusic_Spatial
+                        Spatial Apple music and music video content
+    @constant       kAudioCodecContentSource_AppleAV_Traditional_Offline
+                        Traditional Apple professional AV offline encoded content such as stereo and multichannel
+    @constant       kAudioCodecContentSource_AppleAV_Spatial_Offline
+                        Spatial Apple professional AV offline encoded content
+    @constant       kAudioCodecContentSource_AppleAV_Traditional_Live
+                        Traditional Apple professional AV live content such as stereo and multichannel
+    @constant       kAudioCodecContentSource_AppleAV_Spatial_Live
+                        Spatial Apple professional AV live content
+    @constant       kAudioCodecContentSource_ApplePassthrough
+                        Apple passthrough content (use only if source information is not available)
+
+    @constant       kAudioCodecContentSource_Capture_Traditional
+                        Traditional device capture
+    @constant       kAudioCodecContentSource_Capture_Spatial
+                        Spatial device capture
+    @constant       kAudioCodecContentSource_Capture_Spatial_Enhanced
+                        Reserved for future use
+    @constant       kAudioCodecContentSource_Music_Traditional
+                        Traditional music and music video content such as stereo and multichannel
+    @constant       kAudioCodecContentSource_Music_Spatial
+                        Spatial music and music video content
+    @constant       kAudioCodecContentSource_AV_Traditional_Offline
+                        Traditional professional AV offline encoded content such as stereo and multichannel
+    @constant       kAudioCodecContentSource_AV_Spatial_Offline
+                        Spatial professional AV offline encoded content
+    @constant       kAudioCodecContentSource_AV_Traditional_Live
+                        Traditional professional AV live content such as stereo and multichannel
+    @constant       kAudioCodecContentSource_AV_Spatial_Live
+                        Spatial professional AV live content
+    @constant       kAudioCodecContentSource_Passthrough
+                        Passthrough content (use only if source information is not available)
+*/
+CF_ENUM(SInt32)
+{
+    kAudioCodecContentSource_Unspecified                    = -1,
+    kAudioCodecContentSource_Reserved                       = 0,
+    kAudioCodecContentSource_AppleCapture_Traditional       = 1,
+    kAudioCodecContentSource_AppleCapture_Spatial           = 2,
+    kAudioCodecContentSource_AppleCapture_Spatial_Enhanced  = 3,
+    kAudioCodecContentSource_AppleMusic_Traditional         = 4,
+    kAudioCodecContentSource_AppleMusic_Spatial             = 5,
+    kAudioCodecContentSource_AppleAV_Traditional_Offline    = 6,
+    kAudioCodecContentSource_AppleAV_Spatial_Offline        = 7,
+    kAudioCodecContentSource_AppleAV_Traditional_Live       = 8,
+    kAudioCodecContentSource_AppleAV_Spatial_Live           = 9,
+    kAudioCodecContentSource_ApplePassthrough               = 10,
+    
+    kAudioCodecContentSource_Capture_Traditional            = 33,
+    kAudioCodecContentSource_Capture_Spatial                = 34,
+    kAudioCodecContentSource_Capture_Spatial_Enhanced       = 35,
+    kAudioCodecContentSource_Music_Traditional              = 36,
+    kAudioCodecContentSource_Music_Spatial                  = 37,
+    kAudioCodecContentSource_AV_Traditional_Offline         = 38,
+    kAudioCodecContentSource_AV_Spatial_Offline             = 39,
+    kAudioCodecContentSource_AV_Traditional_Live            = 40,
+    kAudioCodecContentSource_AV_Spatial_Live                = 41,
+    kAudioCodecContentSource_Passthrough                    = 42
 };
 
 /*!
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h	2025-04-19 03:40:58
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioConverter.h	2025-05-28 07:59:14
@@ -714,7 +714,9 @@
                                 UInt32                          inInputDataSize,
                                 const void *                    inInputData,
                                 UInt32 *                        ioOutputDataSize,
-                                void *                          outOutputData)  API_AVAILABLE(macos(10.1), ios(2.0), watchos(2.0), tvos(9.0));
+                                void *                          outOutputData)
+                        CA_REALTIME_API
+                        API_AVAILABLE(macos(10.1), ios(2.0), watchos(2.0), tvos(9.0));
 
 //-----------------------------------------------------------------------------
 /*!
@@ -782,6 +784,20 @@
                                         AudioStreamPacketDescription * __nullable * __nullable outDataPacketDescription,
                                         void * __nullable               inUserData);
 
+/*!
+	@typedef	AudioConverterComplexInputDataProcRealtimeSafe
+	@abstract	Realtime-safe variant of AudioConverterComplexInputDataProc.
+	
+	See the discussions of AudioConverterComplexInputDataProc and AudioConverterFillComplexBuffer.
+*/
+typedef OSStatus
+(*AudioConverterComplexInputDataProcRealtimeSafe)(
+                                        AudioConverterRef               inAudioConverter,
+                                        UInt32 *                        ioNumberDataPackets,
+                                        AudioBufferList *               ioData,
+                                        AudioStreamPacketDescription * __nullable * __nullable outDataPacketDescription,
+                                        void * __nullable               inUserData) CA_REALTIME_API;
+
 //-----------------------------------------------------------------------------
 /*!
     @function   AudioConverterFillComplexBuffer
@@ -828,7 +844,75 @@
                                     AudioStreamPacketDescription * __nullable outPacketDescription)
                                                                                 API_AVAILABLE(macos(10.2), ios(2.0), watchos(2.0), tvos(9.0));
 
+/*!
+    @function   AudioConverterFillComplexBufferRealtimeSafe
+    @abstract   Identical to AudioConverterFillComplexBuffer, with the addition of a realtime-safety
+    			guarantee.
+	
+	Conversions involving only PCM formats -- interleaving, deinterleaving, channel count changes,
+	sample rate conversions -- are realtime-safe. Such conversions may use this API in order to
+	obtain compiler checks involving the `CA_REALTIME_API` attributes.
+	
+	At runtime, this function returns `kAudioConverterErr_OperationNotSupported` if the conversion 
+	requires non-realtime-safe functionality.
+*/
+extern OSStatus
+AudioConverterFillComplexBufferRealtimeSafe(
+                                    AudioConverterRef                   inAudioConverter,
+                                    AudioConverterComplexInputDataProcRealtimeSafe inInputDataProc,
+                                    void * __nullable                   inInputDataProcUserData,
+                                    UInt32 *                            ioOutputDataPacketSize,
+                                    AudioBufferList *                   outOutputData,
+                                    AudioStreamPacketDescription * __nullable outPacketDescription)
+                                        CA_REALTIME_API
+                                        API_AVAILABLE(macos(26.0), ios(26.0), watchos(26.0), tvos(26.0), visionos(26.0));
 
+/*!
+    @function   AudioConverterFillComplexBufferWithPacketDependencies
+    @abstract   Converts audio data supplied by a callback function, supporting non-interleaved and
+                packetized formats, and also supporting packet dependency descriptions.
+    @discussion For output formats that use packet dependency descriptions, this must be used instead of
+                AudioConverterFillComplexBuffer, which will return an error for such formats.
+    @param inAudioConverter         The audio converter to use for format conversion.
+    @param inInputDataProc          A callback function that supplies audio data to convert.
+                                    This callback is invoked repeatedly as the converter is ready for
+                                    new input data.
+    @param inInputDataProcUserData  Custom data for use by your application when receiving a
+                                    callback invocation.
+    @param ioOutputDataPacketSize   On input, the size of the output buffer (in the `outOutputData`
+                                    parameter), expressed in number packets in the audio converter’s
+                                    output format.  On output, the number of packets of converted data
+                                    that were written to the output buffer.
+    @param outOutputData            The converted output data is written to this buffer. On entry, the
+                                    buffers' `mDataByteSize` fields (which must all be the same) reflect
+                                    buffer capacity.  On exit, `mDataByteSize` is set to the number of
+                                    bytes written.
+    @param outPacketDescriptions    If not `NULL`, and if the audio converter's output format uses packet
+                                    descriptions, this must point to a block of memory capable of holding
+                                    the number of packet descriptions specified in the `ioOutputDataPacketSize`
+                                    parameter.  (See _Audio Format Services Reference_ for functions that
+                                    let you determine whether an audio format uses packet descriptions).
+                                    If not `NULL` on output and if the audio converter's output format
+                                    uses packet descriptions, then this parameter contains an array of
+                                    packet descriptions.
+    @param outPacketDependencies    Should point to a memory block capable of holding the number of
+                                    packet dependency description structures specified in the
+                                    `ioOutputDataPacketSize` parameter.  Must not be `NULL`.  This array
+                                    will be filled out only by encoders that produce a format which has a
+                                    non-zero value for `kAudioFormatProperty_FormatEmploysDependentPackets`.
+    @result                         A result code.
+*/
+OSStatus
+AudioConverterFillComplexBufferWithPacketDependencies(
+    AudioConverterRef                            inAudioConverter,
+    AudioConverterComplexInputDataProc           inInputDataProc,
+    void * __nullable                            inInputDataProcUserData,
+    UInt32 *                                     ioOutputDataPacketSize,
+    AudioBufferList *                            outOutputData,
+    AudioStreamPacketDescription * __nullable    outPacketDescriptions,
+    AudioStreamPacketDependencyDescription *     outPacketDependencies)
+API_AVAILABLE(macos(26.0), ios(26.0), watchos(26.0), tvos(26.0), visionos(26.0));
+
 //-----------------------------------------------------------------------------
 /*!
     @function   AudioConverterConvertComplexBuffer
@@ -855,7 +939,8 @@
                                     UInt32                          inNumberPCMFrames,
                                     const AudioBufferList *         inInputData,
                                     AudioBufferList *               outOutputData)
-                                                                                API_AVAILABLE(macos(10.7), ios(5.0), watchos(2.0), tvos(9.0));
+                                    	CA_REALTIME_API
+										API_AVAILABLE(macos(10.7), ios(5.0), watchos(2.0), tvos(9.0));
 
 // =================================================================================================
 // DEPRECATED
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFile.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFile.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFile.h	2025-04-19 01:38:16
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFile.h	2025-05-28 07:59:15
@@ -922,6 +922,39 @@
 
 
 /*!
+    @function   AudioFileWritePacketsWithDependencies
+    @abstract   Write packets of audio data with corresponding packet dependencies to an audio data file.
+    @discussion For all uncompressed formats, `packets == frames`.
+    @param inAudioFile          The audio file to write to.
+    @param inUseCache           Set to `true` if you want to cache the data. Otherwise, set to `false`.
+    @param inNumBytes           The number of bytes of audio data being written.
+    @param inPacketDescriptions A pointer to an array of packet descriptions for the audio data.
+                                Not all formats require packet descriptions. If no packet descriptions
+                                are required, for instance, if you are writing CBR data,  pass `NULL`.
+    @param inPacketDependencies A pointer to an array of packet dependencies for the audio data.
+                                This must not be `NULL`.  To write packets without dependencies,
+                                use ``AudioFileWritePackets`` instead.
+    @param inStartingPacket     The packet index for the placement of the first provided packet.
+    @param ioNumPackets         On input, a pointer to the number of packets to write.
+                                On output, a pointer to the number of packets actually written.
+    @param inBuffer             A pointer to user-allocated memory containing the new audio data
+                                to write to the audio data file.
+    @result                     A result code. See Result Codes.
+*/
+extern OSStatus
+AudioFileWritePacketsWithDependencies (
+    AudioFileID                                         inAudioFile,
+    Boolean                                             inUseCache,
+    UInt32                                              inNumBytes,
+    const AudioStreamPacketDescription * __nullable     inPacketDescriptions,
+    const AudioStreamPacketDependencyDescription *      inPacketDependencies,
+    SInt64                                              inStartingPacket,
+    UInt32 *                                            ioNumPackets,
+    const void *                                        inBuffer)
+API_AVAILABLE(macos(26.0), ios(26.0), watchos(26.0), tvos(26.0), visionos(26.0));
+
+
+/*!
     @function	AudioFileCountUserData
     @abstract   Get the number of user data items with a certain ID in the file
     @discussion		"User Data" refers to chunks in AIFF, CAF and WAVE files, or resources 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFormat.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFormat.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFormat.h	2025-04-19 04:03:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioFormat.h	2025-05-28 08:05:49
@@ -249,7 +249,7 @@
 					The specifier is an AudioFormatInfo struct. At a minimum formatID member of the ASBD struct must filled in. Other fields
 					may be filled in. If there is no magic cookie, then the number of channels and sample rate should be filled in. 
 	@constant	kAudioFormatProperty_FirstPlayableFormatFromList
-					The specifier is a list of 1 or more AudioFormatListItem. Generally it is the list of these items returned from kAudioFormatProperty_FormatList. The property value retrieved is an UInt32 that specifies an index into that list. The list that the caller provides is generally sorted with the first item as the best format (most number of channels, highest sample rate), and the returned index represents the first item in that list that can be played by the system. 
+					The specifier is a list of 1 or more AudioFormatListItem. Generally it is the list of these items returned from kAudioFormatProperty_FormatList. The property value retrieved is an UInt32 that specifies an index into that list. The list that the caller provides is generally sorted with the first item as the best format (most number of channels, highest sample rate), and the returned index represents the first item in that list that can be played by the system.
 					Thus, the property is typically used to determine the best playable format for a given (layered) audio stream
 	@constant   kAudioFormatProperty_ValidateChannelLayout
 					The specifier is an AudioChannelLayout. The property value and size are not used and must be set to NULL.
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioQueue.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioQueue.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioQueue.h	2025-04-19 04:03:14
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioQueue.h	2025-05-28 07:59:14
@@ -256,6 +256,12 @@
     @constant   kAudioQueueProperty_TimePitchBypass
         A read/write property whose value is a UInt32 describing whether the time/pitch unit
         has been bypassed (1=bypassed, 0=not bypassed).
+    @constant   kAudioQueueProperty_IntendedSpatialExperience
+        A read/write property whose value is an CASpatialAudioExperience describing this specific
+        AudioQueue's intended spatial experience. Only useful for output AudioQueue's not configured
+        in offline mode. Setting this property on an input AudioQueue or an offline AudioQueue is
+        a no-op. The default value of CAAutomaticSpatialAudio value means the AudioQueue uses its
+        AVAudioSession's intended spatial experience. See CASpatialAudioExperience for more details.
 */
 CF_ENUM(AudioQueuePropertyID) {
     kAudioQueueProperty_IsRunning               = 'aqrn',       //!< value is UInt32
@@ -279,6 +285,8 @@
     kAudioQueueProperty_EnableTimePitch         = 'q_tp',       // value is UInt32, 0/1
     kAudioQueueProperty_TimePitchAlgorithm      = 'qtpa',       // value is UInt32. See values below.
     kAudioQueueProperty_TimePitchBypass         = 'qtpb',       // value is UInt32, 1=bypassed
+    
+    kAudioQueueProperty_IntendedSpatialExperience API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos) CF_REFINED_FOR_SWIFT = 'iseo', // value is CASpatialAudioExperience*
 };
 
 /*!
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioServices.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioServices.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioServices.h	2025-04-19 03:40:57
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioServices.h	2025-05-24 01:42:36
@@ -390,6 +390,48 @@
                                                                     API_AVAILABLE(macos(10.5), ios(2.0), watchos(2.0), tvos(9.0))
                                                                     ;
 
+/*!
+    @enum AudioServicesPlaySystemSoundWithDetails Dictionary Keys
+    @abstract   Keys that are passed in a dictionary to AudioServicesPlaySystemSoundWithDetails
+    @constant   kAudioServicesDetailIntendedSpatialExperience
+                    Must be any non-nil CASpatialAudioExperience. The system sound
+                    will have this spatial experience for the duration of its
+                    playback and cannot change mid-playback.
+*/
+extern const CFStringRef kAudioServicesDetailIntendedSpatialExperience API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos) CF_REFINED_FOR_SWIFT;
+
+/*!
+    @function       AudioServicesPlaySystemSoundWithDetails
+    @abstract       Play the sound designated by the provided SystemSoundID.
+    @param          inSystemSoundID
+                        A SystemSoundID for the system sound server to play.
+    @param            inDetails
+                        A set of details as described above.
+    @param          inCompletionBlock
+                        The completion block gets executed for every attempt to play a system sound irrespective
+                        of success or failure. The callbacks are issued on a serial queue and the client is
+                        responsible for handling thread safety.
+*/
+extern void AudioServicesPlaySystemSoundWithDetails(SystemSoundID inSystemSoundID,
+                                                    CFDictionaryRef _Nullable inDetails,
+                                                    void (^_Nullable inCompletionBlock)(void)) API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos) CF_REFINED_FOR_SWIFT;
+
+/*!
+    @function       AudioServicesPlayAlertSoundWithDetails
+    @abstract       Play the alert designated by the provided SystemSoundID.
+    @param          inSystemSoundID
+                        A SystemSoundID for the system sound server to play with alert sound behavior.
+    @param            inDetails
+                        A set of details as described above.
+    @param          inCompletionBlock
+                        The completion block gets executed for every attempt to play a system sound irrespective
+                        of success or failure. The callbacks are issued on a serial queue and the client is
+                        responsible for handling thread safety.
+*/
+extern void AudioServicesPlayAlertSoundWithDetails(SystemSoundID inSystemSoundID,
+                                                   CFDictionaryRef _Nullable inDetails,
+                                                   void (^_Nullable inCompletionBlock)(void)) API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos) CF_REFINED_FOR_SWIFT;
+
 CF_ASSUME_NONNULL_END
     
 #ifdef __cplusplus
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioToolbox.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioToolbox.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioToolbox.h	2025-04-19 05:14:30
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioToolbox.h	2025-05-28 07:59:15
@@ -34,6 +34,9 @@
 #include <AudioToolbox/AudioWorkInterval.h>
 #include <AudioToolbox/CAFFile.h>
 #include <AudioToolbox/CAShow.h>
+#if !TARGET_OS_OSX && !TARGET_OS_MACCATALYST
+#include <AudioToolbox/CASpatialAudioExperience.h>
+#endif
 #include <AudioToolbox/ExtendedAudioFile.h>
 #include <AudioToolbox/MusicDevice.h>
 #include <AudioToolbox/MusicPlayer.h>
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h	2025-04-19 01:38:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitParameters.h	2025-05-30 23:17:49
@@ -726,6 +726,61 @@
     kAUSoundIsolationSoundType_Voice API_AVAILABLE(macos(13.0), ios(16.0)) API_UNAVAILABLE(tvos) API_UNAVAILABLE(watchos) = 1
 };
 
+// Parameters for AUAudioMix
+CF_ENUM(AudioUnitParameterID) {
+    // Global, Enum, 0->9, 0
+    kAUAudioMixParameter_Style API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 0,
+    // Global, float, 0->1, 0.5
+    kAUAudioMixParameter_RemixAmount API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 1,
+};
+
+/*!
+ @enum          AUAudioMix styles
+ @brief         Constants available as values for parameter kAUAudioMixParameter_Style.
+ 
+ @constant      kAudioMixRenderingStyle_Cinematic
+    Cinematic rendering style (default)
+
+ @constant      kAudioMixRenderingStyle_Studio
+    Studio rendering style
+ 
+ @constant      kAudioMixRenderingStyle_InFrame
+    In-Frame rendering style
+ 
+ @constant      kAudioMixRenderingStyle_CinematicBackgroundStem
+    Cinematic rendering style - background only
+ 
+ @constant      kAudioMixRenderingStyle_CinematicForegroundStem
+    Cinematic rendering style - foreground only
+ 
+ @constant      kAudioMixRenderingStyle_StudioForegroundStem
+    Studio rendering style - foreground only
+ 
+ @constant      kAudioMixRenderingStyle_InFrameForegroundStem
+    In-Frame rendering style - foreground only
+ 
+ @constant      kAudioMixRenderingStyle_Standard
+    Standard rendering style
+ 
+ @constant      kAudioMixRenderingStyle_StudioBackgroundStem
+    Studio rendering style - background only
+ 
+ @constant      kAudioMixRenderingStyle_InFrameBackgroundStem
+    In-Frame rendering style - background only
+ */
+typedef CF_ENUM( UInt32, AUAudioMixRenderingStyle ) {
+    kAudioMixRenderingStyle_Cinematic API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 0,
+    kAudioMixRenderingStyle_Studio API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 1,
+    kAudioMixRenderingStyle_InFrame API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 2,
+    kAudioMixRenderingStyle_CinematicBackgroundStem API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 3,
+    kAudioMixRenderingStyle_CinematicForegroundStem API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 4,
+    kAudioMixRenderingStyle_StudioForegroundStem API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 5,
+    kAudioMixRenderingStyle_InFrameForegroundStem API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 6,
+    kAudioMixRenderingStyle_Standard API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 7,
+    kAudioMixRenderingStyle_StudioBackgroundStem API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 8,
+    kAudioMixRenderingStyle_InFrameBackgroundStem API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 9,
+};
+
 #pragma mark Apple Specific - Desktop
 
 #if !TARGET_OS_IPHONE
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h	2025-04-19 05:14:29
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitProperties.h	2025-05-28 07:59:14
@@ -1162,7 +1162,7 @@
 */
 typedef OSStatus (*HostCallback_GetBeatAndTempo)(void * __nullable	inHostUserData,
 											Float64	* __nullable	outCurrentBeat,
-											Float64	* __nullable	outCurrentTempo);
+											Float64	* __nullable	outCurrentTempo) CA_REALTIME_API;
 
 /*!
 	@typedef		HostCallback_GetMusicalTimeLocation
@@ -1189,7 +1189,7 @@
 												UInt32 * __nullable			outDeltaSampleOffsetToNextBeat,
 												Float32 * __nullable		outTimeSig_Numerator,
 												UInt32 * __nullable			outTimeSig_Denominator,
-												Float64 * __nullable		outCurrentMeasureDownBeat);
+												Float64 * __nullable		outCurrentMeasureDownBeat) CA_REALTIME_API;
 
 /*!
 	@typedef		HostCallback_GetTransportState
@@ -1220,7 +1220,7 @@
 										Float64 * __nullable			outCurrentSampleInTimeLine,
 										Boolean * __nullable			outIsCycling,
 										Float64 * __nullable			outCycleStartBeat,
-										Float64 * __nullable			outCycleEndBeat);
+										Float64 * __nullable			outCycleEndBeat) CA_REALTIME_API;
 
 /*!
 	@typedef		HostCallback_GetTransportState2
@@ -1253,7 +1253,7 @@
 										Float64 * __nullable			outCurrentSampleInTimeLine,
 										Boolean * __nullable			outIsCycling,
 										Float64 * __nullable			outCycleStartBeat,
-										Float64 * __nullable			outCycleEndBeat);
+										Float64 * __nullable			outCycleEndBeat) CA_REALTIME_API;
 
 /*!
 	@struct			HostCallbackInfo
@@ -1321,7 +1321,7 @@
 (*AUMIDIOutputCallback)(void * __nullable				userData,
 						const AudioTimeStamp *			timeStamp,
 						UInt32							midiOutNum,
-						const struct MIDIPacketList *	pktlist);
+						const struct MIDIPacketList *	pktlist) CA_REALTIME_API;
 
 /*!
 	@struct			AUMIDIOutputCallbackStruct
@@ -1398,7 +1398,7 @@
 
 	For further background, see <AudioToolbox/AudioWorkInterval.h>.
 */
-typedef void (^AURenderContextObserver)(const AudioUnitRenderContext *context)
+typedef void (^AURenderContextObserver)(const AudioUnitRenderContext *context) CA_REALTIME_API
 	__SWIFT_UNAVAILABLE_MSG("Swift is not supported for use with audio realtime threads");
 #endif 
 /*!
@@ -1425,9 +1425,9 @@
 	@param eventList
 					One full MIDI, partial MIDI SysEx, or a full SysEx UMP message.
 */
-typedef OSStatus (^ AUMIDIEventListBlock)(AUEventSampleTime					eventSampleTime,
-										  uint8_t 							cable,
-										const struct MIDIEventList *		eventList);
+typedef OSStatus (^AUMIDIEventListBlock)(AUEventSampleTime	eventSampleTime,
+										 uint8_t 							cable,
+										 const struct MIDIEventList *		eventList) CA_REALTIME_API;
 
 //=====================================================================================================================
 #pragma mark - Parameter Definitions
@@ -2506,6 +2506,16 @@
                         Note that as an os_object subclass, workgroup objects are reference-counted,
                         and that AudioUnitGetProperty returns a +1 reference, which the client
                         is responsible for releasing when it is finished with it.
+
+     @constant       kAudioOutputUnitProperty_IntendedSpatialExperience
+                         Scope:            Global
+                         Value Type:     CASpatialAudioExperience* (non-nil)
+                         Access:         read/write
+
+                         Set this output AudioUnit's intended spatial experience override. The default
+                         value of CAAutomaticSpatialAudio means the AudioUnit uses its
+                         AVAudioSession's intended spatial experience. See CASpatialAudioExperience
+                         for more details.
 */
 CF_ENUM(AudioUnitPropertyID) {
 // range  (2000 -> 2999)
@@ -2521,6 +2531,8 @@
 	kAudioOutputUnitProperty_OSWorkgroup
 		__SWIFT_UNAVAILABLE_MSG("Swift is not supported for use with audio realtime threads")
 													= 2015,
+
+    kAudioOutputUnitProperty_IntendedSpatialExperience API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos) CF_REFINED_FOR_SWIFT = 2016,
 };
 
 #if AU_SUPPORT_INTERAPP_AUDIO
@@ -3169,6 +3181,35 @@
 
 
 //=====================================================================================================================
+#pragma mark - AUAudioMix
+/*!
+    @enum           AUAudioMix Property IDs
+    @abstract       The collection of property IDs for AUAudioMix
+ 
+    @discussion     AUAudioMix also supports kAudioUnitProperty_SpatialMixerOutputType
+                    This sets the type of output hardware used by AUSpatialMixer when spatialization is enabled.
+                    See AUSpatialMixer properties and AUSpatialMixerOutputType
+    
+    @constant        kAUAudioMixProperty_SpatialAudioMixMetadata
+                       Scope:            Global
+                        Value Type:     CFDataRef
+                        Access:           Read / Write
+ 
+                        Remix metadata from the file asset
+                        
+    @constant        kAUAudioMixProperty_EnableSpatialization
+                        Scope:            Global
+                        Value Type:     UInt32
+                        Access:           Read / Write
+ 
+                        0 - Output format is FOA + mono foreground (Default)
+                        1 - Enable AUSpatialMixer to render to mono/stereo/surround formats
+ */
+CF_ENUM(AudioUnitPropertyID) {
+    kAUAudioMixProperty_SpatialAudioMixMetadata API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 5000,
+    kAUAudioMixProperty_EnableSpatialization API_AVAILABLE(macos(26.0), ios(26.0)) API_UNAVAILABLE(watchos, tvos, visionos) = 5001,
+};
+
 #pragma mark - _3DMixer (Deprecated)
 /*!
     // AUMixer3D is deprecated. Use AUSpatialMixer instead.
@@ -3400,7 +3441,7 @@
 /*!
 	@typedef			ScheduledAudioSliceCompletionProc
 */
-typedef void (*ScheduledAudioSliceCompletionProc)(void * __nullable userData, ScheduledAudioSlice *bufferList);
+typedef void (*ScheduledAudioSliceCompletionProc)(void * __nullable userData, ScheduledAudioSlice *bufferList) CA_REALTIME_API;
 
 /*
 	@struct				ScheduledAudioSlice
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitUtilities.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitUtilities.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitUtilities.h	2025-04-19 02:35:38
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/AudioUnitUtilities.h	2025-05-24 01:40:36
@@ -328,7 +328,8 @@
                                     const AudioUnitParameter *        inParameter,
                                     AudioUnitParameterValue           inValue,
                                     UInt32                            inBufferOffsetInFrames)
-                                                                                    API_AVAILABLE(macos(10.2), ios(6.0), watchos(2.0), tvos(9.0));
+                                    	CA_REALTIME_API
+										API_AVAILABLE(macos(10.2), ios(6.0), watchos(2.0), tvos(9.0));
 
 /*!
     @function   AUParameterListenerNotify
@@ -359,7 +360,9 @@
 extern OSStatus
 AUParameterListenerNotify(          AUParameterListenerRef __nullable inSendingListener,
                                     void * __nullable                 inSendingObject,
-                                    const AudioUnitParameter *        inParameter)    API_AVAILABLE(macos(10.2), ios(6.0), watchos(2.0), tvos(9.0));
+                                    const AudioUnitParameter *        inParameter)
+                                    	CA_REALTIME_API
+                                    	API_AVAILABLE(macos(10.2), ios(6.0), watchos(2.0), tvos(9.0));
 
 /* ============================================================================= */
 
@@ -512,7 +515,9 @@
 extern OSStatus
 AUEventListenerNotify(              AUEventListenerRef __nullable  inSendingListener,
                                     void * __nullable              inSendingObject,
-                                    const AudioUnitEvent *         inEvent)        API_AVAILABLE(macos(10.3), ios(6.0), watchos(2.0), tvos(9.0));
+                                    const AudioUnitEvent *         inEvent)
+                                    	CA_REALTIME_API
+                                    	API_AVAILABLE(macos(10.3), ios(6.0), watchos(2.0), tvos(9.0));
                                     
 /* ============================================================================= */
 
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CASpatialAudioExperience.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CASpatialAudioExperience.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CASpatialAudioExperience.h	1969-12-31 19:00:00
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/CASpatialAudioExperience.h	2025-05-28 08:05:49
@@ -0,0 +1,167 @@
+#if (defined(__USE_PUBLIC_HEADERS__) && __USE_PUBLIC_HEADERS__) || (defined(USE_AUDIOTOOLBOX_PUBLIC_HEADERS) && USE_AUDIOTOOLBOX_PUBLIC_HEADERS) || !__has_include(<AudioToolboxCore/CASpatialAudioExperience.h>)
+/*!
+    @file        CASpatialAudioExperience.h
+    @framework   AudioToolbox.framework
+    @copyright   (c) 2025 Apple, Inc. All rights reserved.
+    @abstract    API to express spatial experiences for audio playback APIs
+*/
+
+#ifndef CASpatialAudioExperience_h
+#define CASpatialAudioExperience_h
+
+#ifdef __OBJC2__
+
+#import <Foundation/Foundation.h>
+#import <os/availability.h>
+#import <TargetConditionals.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark - Sound Stage Sizes
+
+/// Configure the distribution of audio channels in 3D space.
+///
+/// The Objective-C version of the ``SpatialAudioExperiences.SoundStageSize`` Swift type.
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+typedef NS_ENUM(NSInteger, CASoundStageSize) {
+    
+    /// A system-defined sound stage size.
+    CASoundStageSizeAutomatic,
+    
+    /// Places all of an audio stream's channels near the layout's front.
+    CASoundStageSizeSmall,
+    
+    /// Pulls an audio stream's channels closer to the channel layout's front.
+    CASoundStageSizeMedium,
+    
+    /// Spreads an audio stream's channels around the user according to the
+    /// coordinates described in its channel layout.
+    CASoundStageSizeLarge,
+} NS_REFINED_FOR_SWIFT;
+
+#pragma mark - Anchoring Strategies
+
+/// The center of a head-tracked spatial experience.
+///
+/// The Objective-C version of the ``SpatialAudioExperiences.AnchoringStrategy`` Swift type.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CAAnchoringStrategy : NSObject <NSSecureCoding, NSCopying>
+
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+@end
+
+/// A system-defined anchoring strategy.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CAAutomaticAnchoringStrategy : CAAnchoringStrategy
+
+- (instancetype)init;
++ (instancetype)new;
+
+@end
+
+/// Anchor to the front of the user's space.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CAFrontAnchoringStrategy : CAAnchoringStrategy
+
+- (instancetype)init;
++ (instancetype)new;
+
+@end
+
+/// Anchor to the visual center of a particular UIScene.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CASceneAnchoringStrategy : CAAnchoringStrategy
+
+- (instancetype)initWithSceneIdentifier:(NSString*)sceneIdentifier;
++ (instancetype)new NS_UNAVAILABLE;
+
+@property (readonly) NSString *sceneIdentifier;
+
+@end
+
+#pragma mark - Spatial Experiences
+
+/// Configure an audio stream for spatial computing.
+///
+/// The Objective-C version of the ``SpatialAudioExperience`` Swift type.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CASpatialAudioExperience : NSObject <NSSecureCoding, NSCopying>
+
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+@end
+
+/// A spatial audio experience determined by the system.
+///
+/// The Objective-C version of the ``AutomaticSpatialAudio`` Swift type.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CAAutomaticSpatialAudio : CASpatialAudioExperience
+
+- (instancetype)init;
++ (instancetype)new;
+
+@end
+
+/// An experience in which the system does not apply spatial processing to the audio stream.
+///
+/// The Objective-C version of the ``BypassedSpatialAudio`` Swift type.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CABypassedSpatialAudio : CASpatialAudioExperience
+
+- (instancetype)init;
++ (instancetype)new;
+
+@end
+
+/// A spatial experience that does not take user motion into account.
+///
+/// The Objective-C version of the ``FixedSpatialAudio`` Swift type.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CAFixedSpatialAudio : CASpatialAudioExperience
+
+- (instancetype)initWithSoundStageSize:(CASoundStageSize)soundStageSize;
++ (instancetype)new NS_UNAVAILABLE;
+
+/// The experience's sound stage size.
+@property (readonly) CASoundStageSize soundStageSize;
+
+@end
+
+/// A spatial experience that takes user motion into account.
+///
+/// The Objective-C version of the ``HeadTrackedSpatialAudio`` Swift type.
+NS_REFINED_FOR_SWIFT
+API_AVAILABLE(visionos(26.0)) API_UNAVAILABLE(ios, watchos, tvos, macos)
+@interface CAHeadTrackedSpatialAudio : CASpatialAudioExperience
+
+- (instancetype)initWithSoundStageSize:(CASoundStageSize)soundStageSize
+                     anchoringStrategy:(CAAnchoringStrategy*)anchoringStrategy;
++ (instancetype)new NS_UNAVAILABLE;
+
+/// The experience's sound stage size.
+@property (readonly) CASoundStageSize soundStageSize;
+
+/// The experience's anchoring strategy.
+@property (readonly) CAAnchoringStrategy *anchoringStrategy;
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#endif /* __OBJC2__ */
+#endif /* CASpatialAudioExperience_h */
+
+#else
+#include <AudioToolboxCore/CASpatialAudioExperience.h>
+#endif
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h	2025-04-19 04:03:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicDevice.h	2025-05-28 07:59:14
@@ -213,7 +213,9 @@
 						UInt32					inStatus,
 						UInt32					inData1,
 						UInt32					inData2,
-						UInt32					inOffsetSampleFrame)				API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+						UInt32					inOffsetSampleFrame)
+							CA_REALTIME_API
+							API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicDeviceSysEx
@@ -234,7 +236,9 @@
 extern OSStatus
 MusicDeviceSysEx(		MusicDeviceComponent	inUnit,
 						const UInt8 *			inData,
-						UInt32					inLength)							API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+						UInt32					inLength)
+							CA_REALTIME_API
+							API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicDeviceMIDIEventList
@@ -267,7 +271,9 @@
 extern OSStatus
 MusicDeviceMIDIEventList(   MusicDeviceComponent			inUnit,
 							UInt32							inOffsetSampleFrame,
-							const struct MIDIEventList *	evtList)				API_AVAILABLE(macos(12), ios(15.0), tvos(15.0));
+							const struct MIDIEventList *	evtList)
+								CA_REALTIME_API
+								API_AVAILABLE(macos(12), ios(15.0), tvos(15.0));
 
 /*!
 	@function	MusicDeviceStartNote
@@ -314,7 +320,9 @@
 						MusicDeviceGroupID					inGroupID,
 						NoteInstanceID *					outNoteInstanceID,
 						UInt32								inOffsetSampleFrame,
-						const MusicDeviceNoteParams *	 	inParams)				API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+						const MusicDeviceNoteParams *	 	inParams)
+							CA_REALTIME_API
+							API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicDeviceStopNote
@@ -338,7 +346,9 @@
 MusicDeviceStopNote(	MusicDeviceComponent	inUnit,
 						MusicDeviceGroupID		inGroupID,
 						NoteInstanceID			inNoteInstanceID,
-						UInt32					inOffsetSampleFrame)				API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+						UInt32					inOffsetSampleFrame)
+							CA_REALTIME_API
+							API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 
 /*!
@@ -386,7 +396,7 @@
 								UInt32					inStatus,
 								UInt32					inData1,
 								UInt32					inData2,
-								UInt32					inOffsetSampleFrame);
+								UInt32					inOffsetSampleFrame) CA_REALTIME_API;
 
 /*!
 	@typedef		MusicDeviceSysExProc
@@ -403,7 +413,7 @@
 typedef OSStatus
 (*MusicDeviceSysExProc)(	void *						self,
 							const UInt8 *				inData,
-							UInt32						inLength);
+							UInt32						inLength) CA_REALTIME_API;
 
 /*!
 	@typedef		MusicDeviceStartNoteProc
@@ -423,7 +433,7 @@
 						MusicDeviceGroupID				inGroupID,
 						NoteInstanceID *				outNoteInstanceID,
 						UInt32							inOffsetSampleFrame,
-						const MusicDeviceNoteParams *	inParams);
+						const MusicDeviceNoteParams *	inParams) CA_REALTIME_API;
 
 /*!
 	@typedef		MusicDeviceStopNoteProc
@@ -441,7 +451,7 @@
 (*MusicDeviceStopNoteProc)(	void *						self,
 						MusicDeviceGroupID				inGroupID,
 						NoteInstanceID					inNoteInstanceID,
-						UInt32							inOffsetSampleFrame);
+						UInt32							inOffsetSampleFrame) CA_REALTIME_API;
 
 
 //=====================================================================================================================
diff -ruN /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicPlayer.h /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicPlayer.h
--- /Applications/Xcode_16.4.0.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicPlayer.h	2025-04-19 04:03:15
+++ /Applications/Xcode_26.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AudioToolbox.framework/Headers/MusicPlayer.h	2025-05-24 01:40:36
@@ -339,13 +339,14 @@
 	@typedef MusicSequenceUserCallback
 	@discussion See MusicSequenceSetUserCallback
 */
-typedef void (*MusicSequenceUserCallback)(	void * __nullable			inClientData,
+typedef void (*MusicSequenceUserCallback)(
+											void * __nullable			inClientData,
 											MusicSequence				inSequence,
 											MusicTrack					inTrack,
 											MusicTimeStamp				inEventTime,
 											const MusicEventUserData *	inEventData,
 											MusicTimeStamp				inStartSliceBeat,
-											MusicTimeStamp				inEndSliceBeat);
+											MusicTimeStamp				inEndSliceBeat) CA_REALTIME_API;
 
 /*!
 	enum MusicPlayerErrors
@@ -549,7 +550,9 @@
 extern OSStatus
 MusicPlayerGetHostTimeForBeats(	MusicPlayer 	inPlayer,
 								MusicTimeStamp	inBeats,
-								UInt64 *		outHostTime)					API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
+								UInt64 *		outHostTime)
+									CA_REALTIME_API
+									API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicPlayerGetBeatsForHostTime
@@ -567,7 +570,9 @@
 extern OSStatus
 MusicPlayerGetBeatsForHostTime(	MusicPlayer 	inPlayer,
 								UInt64			inHostTime,
-								MusicTimeStamp *outBeats)						API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
+								MusicTimeStamp *outBeats)
+									CA_REALTIME_API
+									API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicPlayerPreroll
@@ -986,7 +991,9 @@
 extern OSStatus
 MusicSequenceGetSecondsForBeats(	MusicSequence		inSequence,
 									MusicTimeStamp		inBeats,
-									Float64 *			outSeconds)				API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
+									Float64 *			outSeconds)
+										CA_REALTIME_API
+										API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicSequenceGetBeatsForSeconds
@@ -999,7 +1006,9 @@
 extern OSStatus
 MusicSequenceGetBeatsForSeconds(	MusicSequence		inSequence,
 									Float64				inSeconds,
-									MusicTimeStamp *	outBeats)				API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
+									MusicTimeStamp *	outBeats)
+										CA_REALTIME_API
+										API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicSequenceSetUserCallback
@@ -1027,7 +1036,9 @@
 extern OSStatus
 MusicSequenceSetUserCallback(	MusicSequence							inSequence,
 								MusicSequenceUserCallback __nullable	inCallback,
-								void * __nullable						inClientData)		API_AVAILABLE(macos(10.3), ios(5.0), watchos(2.0), tvos(9.0));
+								void * __nullable						inClientData)
+									CA_REALTIME_API
+									API_AVAILABLE(macos(10.3), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicSequenceBeatsToBarBeatTime
@@ -1044,7 +1055,9 @@
 MusicSequenceBeatsToBarBeatTime(MusicSequence				inSequence,
 								MusicTimeStamp				inBeats,
 								UInt32						inSubbeatDivisor,
-								CABarBeatTime *				outBarBeatTime)		API_AVAILABLE(macos(10.5), ios(5.0), watchos(2.0), tvos(9.0));
+								CABarBeatTime *				outBarBeatTime)
+									CA_REALTIME_API
+									API_AVAILABLE(macos(10.5), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicSequenceBarBeatTimeToBeats
@@ -1059,7 +1072,9 @@
 extern OSStatus
 MusicSequenceBarBeatTimeToBeats(MusicSequence				inSequence,
 								const CABarBeatTime *		inBarBeatTime,
-								MusicTimeStamp *			outBeats)			API_AVAILABLE(macos(10.5), ios(5.0), watchos(2.0), tvos(9.0));
+								MusicTimeStamp *			outBeats)
+									CA_REALTIME_API
+									API_AVAILABLE(macos(10.5), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicSequenceGetInfoDictionary
@@ -1461,7 +1476,9 @@
 */
 extern OSStatus
 MusicEventIteratorSeek(				MusicEventIterator 	inIterator,
-									MusicTimeStamp 		inTimeStamp)			API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+									MusicTimeStamp 		inTimeStamp)
+										CA_REALTIME_API
+										API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicEventIteratorNextEvent
@@ -1474,7 +1491,9 @@
 	@param		inIterator		the iterator
 */
 extern OSStatus
-MusicEventIteratorNextEvent(		MusicEventIterator 	inIterator)				API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+MusicEventIteratorNextEvent(		MusicEventIterator 	inIterator)
+	CA_REALTIME_API
+	API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicEventIteratorPreviousEvent
@@ -1486,7 +1505,9 @@
 	@param		inIterator		the iterator
 */
 extern OSStatus
-MusicEventIteratorPreviousEvent(	MusicEventIterator 	inIterator)				API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+MusicEventIteratorPreviousEvent(	MusicEventIterator 	inIterator)
+	CA_REALTIME_API
+	API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicEventIteratorGetEventInfo
@@ -1508,7 +1529,9 @@
 									MusicTimeStamp *		outTimeStamp,
 									MusicEventType *		outEventType,
 									const void * __nullable * __nonnull outEventData,
-									UInt32 *				outEventDataSize)	API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+									UInt32 *				outEventDataSize)
+										CA_REALTIME_API
+										API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 	
 /*!
 	@function	MusicEventIteratorSetEventInfo
@@ -1531,7 +1554,7 @@
 	@abstract	Set a new time for an event
 	@discussion The iterator will still be pointing to the same event, but as the event will have moved, 
 				it may or may not have a next or previous event now (depending of course on the time
-				you moved it too).
+				you moved it to).
 				
 	@param		inIterator		the iterator
 	@param		inTimeStamp		the new time stamp of the event
@@ -1569,7 +1592,9 @@
 */
 extern OSStatus
 MusicEventIteratorHasPreviousEvent(	MusicEventIterator 	inIterator,
-									Boolean	*			outHasPrevEvent)		API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+									Boolean	*			outHasPrevEvent)
+										CA_REALTIME_API
+										API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicEventIteratorHasNextEvent
@@ -1590,7 +1615,9 @@
 */
 extern OSStatus
 MusicEventIteratorHasNextEvent(		MusicEventIterator	inIterator,
-									Boolean	*			outHasNextEvent)		API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
+									Boolean	*			outHasNextEvent)
+										CA_REALTIME_API
+										API_AVAILABLE(macos(10.0), ios(5.0), watchos(2.0), tvos(9.0));
 
 /*!
 	@function	MusicEventIteratorHasCurrentEvent
@@ -1600,7 +1627,9 @@
 */
 extern OSStatus
 MusicEventIteratorHasCurrentEvent(	MusicEventIterator	inIterator,
-									Boolean	*			outHasCurEvent)			API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
+									Boolean	*			outHasCurEvent)
+										CA_REALTIME_API
+										API_AVAILABLE(macos(10.2), ios(5.0), watchos(2.0), tvos(9.0));
 
 
 
Clone this wiki locally