how to get Audio Device UID to pass into NSSound's setPlaybackDeviceIdentifier: - macos

How can i get audio device UID (USB speaker) to pass into NSSound's setPlaybackDeviceIdentifier: method
Thanks

To avoid the deprecated AudioHardwareGetProperty and AudioDeviceGetProperty calls replace them with something like this:
AudioObjectPropertyAddress propertyAddress;
AudioObjectID *deviceIDs;
UInt32 propertySize;
NSInteger numDevices;
propertyAddress.mSelector = kAudioHardwarePropertyDevices;
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;
propertyAddress.mElement = kAudioObjectPropertyElementMaster;
if (AudioObjectGetPropertyDataSize(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &propertySize) == noErr) {
numDevices = propertySize / sizeof(AudioDeviceID);
deviceIDs = (AudioDeviceID *)calloc(numDevices, sizeof(AudioDeviceID));
if (AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &propertySize, deviceIDs) == noErr) {
AudioObjectPropertyAddress deviceAddress;
char deviceName[64];
char manufacturerName[64];
for (NSInteger idx=0; idx<numDevices; idx++) {
propertySize = sizeof(deviceName);
deviceAddress.mSelector = kAudioDevicePropertyDeviceName;
deviceAddress.mScope = kAudioObjectPropertyScopeGlobal;
deviceAddress.mElement = kAudioObjectPropertyElementMaster;
if (AudioObjectGetPropertyData(deviceIDs[idx], &deviceAddress, 0, NULL, &propertySize, deviceName) == noErr) {
propertySize = sizeof(manufacturerName);
deviceAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;
deviceAddress.mScope = kAudioObjectPropertyScopeGlobal;
deviceAddress.mElement = kAudioObjectPropertyElementMaster;
if (AudioObjectGetPropertyData(deviceIDs[idx], &deviceAddress, 0, NULL, &propertySize, manufacturerName) == noErr) {
CFStringRef uidString;
propertySize = sizeof(uidString);
deviceAddress.mSelector = kAudioDevicePropertyDeviceUID;
deviceAddress.mScope = kAudioObjectPropertyScopeGlobal;
deviceAddress.mElement = kAudioObjectPropertyElementMaster;
if (AudioObjectGetPropertyData(deviceIDs[idx], &deviceAddress, 0, NULL, &propertySize, &uidString) == noErr) {
NSLog(#"device %s by %s id %#", deviceName, manufacturerName, uidString);
CFRelease(uidString);
}
}
}
}
}
free(deviceIDs);
}

ok i got it myself...
the theCFString will contain the device UID
UInt32 theSize;
char theString[kMaxStringSize];
UInt32 theNumberDevices;
AudioDeviceID *theDeviceList = NULL;
UInt32 theDeviceIndex;
CFStringRef theCFString = NULL;
OSStatus theStatus = noErr;
// this is our driver
const char *nameString = "Burr-Brown Japan PCM2702";
const char *manufacturerString = "Burr-Brown Japan";
// device list size
theSize = 0;
theStatus = AudioHardwareGetPropertyInfo(kAudioHardwarePropertyDevices, &theSize, NULL);
theNumberDevices = theSize / sizeof(AudioDeviceID);
// allocate the device list
theDeviceList = (AudioDeviceID*)malloc(theNumberDevices * sizeof(AudioDeviceID));
// get the device list
theSize = theNumberDevices * sizeof(AudioDeviceID);
theStatus = AudioHardwareGetProperty(kAudioHardwarePropertyDevices, &theSize, theDeviceList);
// iterate through the device list, find our device and return the UID
for(theDeviceIndex = 0; theDeviceIndex < theNumberDevices; ++theDeviceIndex)
{
// get name
theSize = kMaxStringSize;
theStatus = AudioDeviceGetProperty(theDeviceList[theDeviceIndex],
0, 0, kAudioDevicePropertyDeviceName, &theSize, theString);
NSLog(#"%s",theString);
// is it me?
if (strncmp(theString, nameString, strlen(nameString)) == 0) {
// get manufacturer
theSize = kMaxStringSize;
theStatus = AudioDeviceGetProperty(theDeviceList[theDeviceIndex], 0, 0,
kAudioDevicePropertyDeviceManufacturer, &theSize, theString);
NSLog(#"%s",theString);
// is it really me?
if (strncmp(theString, manufacturerString, strlen(manufacturerString)) == 0) {
// get device UID
theSize = sizeof(CFStringRef);
theStatus = AudioDeviceGetProperty(theDeviceList[theDeviceIndex],
0, 0, kAudioDevicePropertyDeviceUID, &theSize, &theCFString);
NSLog(#"%s",theCFString);
break;
}
}
}

AudioHardwareGetProperty is deprecated in snow leopard.

Related

Unexpected value using kAudioDevicePropertyVolumeScalarToDecibels

I'm getting unexpected values for the volume level using kAudioDevicePropertyVolumeScalarToDecibels on my laptop's built-in audio.
void volume_test()
{
AudioObjectPropertyAddress address = {
.mSelector = kAudioHardwarePropertyDefaultOutputDevice,
.mScope = kAudioObjectPropertyScopeGlobal,
.mElement = kAudioObjectPropertyElementMaster
};
AudioObjectID deviceID = kAudioObjectUnknown;
UInt32 dataSize = sizeof(deviceID);
OSStatus result = AudioObjectGetPropertyData(kAudioObjectSystemObject, &address, 0, NULL, &dataSize, &deviceID);
assert(result == noErr);
address.mSelector = kAudioDevicePropertyVolumeScalar;
address.mScope = kAudioObjectPropertyScopeOutput;
Float32 volumeScalar = 0;
dataSize = sizeof(volumeScalar);
result = AudioObjectGetPropertyData(deviceID, &address, 0, NULL, &dataSize, &volumeScalar);
assert(result == noErr);
address.mSelector = kAudioDevicePropertyVolumeDecibels;
Float32 volumeDecibels = 0;
dataSize = sizeof(volumeDecibels);
result = AudioObjectGetPropertyData(deviceID, &address, 0, NULL, &dataSize, &volumeDecibels);
assert(result == noErr);
address.mSelector = kAudioDevicePropertyVolumeScalarToDecibels;
Float32 convertedVolumeDecibels = volumeScalar;
dataSize = sizeof(convertedVolumeDecibels);
result = AudioObjectGetPropertyData(deviceID, &address, 0, NULL, &dataSize, &convertedVolumeDecibels);
assert(result == noErr);
address.mSelector = kAudioDevicePropertyVolumeDecibelsToScalar;
Float32 convertedVolumeScalar = volumeDecibels;
dataSize = sizeof(convertedVolumeScalar);
result = AudioObjectGetPropertyData(deviceID, &address, 0, NULL, &dataSize, &convertedVolumeScalar);
assert(result == noErr);
NSLog(#"Direct = %.4f %+2.2f dB", volumeScalar, volumeDecibels);
NSLog(#"Converted = %.4f %+2.2f dB", convertedVolumeScalar, convertedVolumeDecibels);
address.mSelector = kAudioDevicePropertyVolumeRangeDecibels;
AudioValueRange decibelRange;
dataSize = sizeof(decibelRange);
result = AudioObjectGetPropertyData(deviceID, &address, 0, NULL, &dataSize, &decibelRange);
assert(result == noErr);
NSLog(#"dB range %+2.2f ... %+2.2f", decibelRange.mMinimum, decibelRange.mMaximum);
}
The output is:
Direct = 0.0620 -47.69 dB
Converted = 0.0620 -59.56 dB
dB range -63.50 ... +0.00
The same thing occurs using the underlying AudioControl directly.
For reference, Audio MIDI setup shows:
Interestingly, using my external display's audio and the elements from kAudioDevicePropertyPreferredChannelsForStereo (since it has no master element), the values match.
Also of note, for the display audio kAudioDevicePropertyVolumeDecibelsToScalarTransferFunction is 5, or kAudioLevelControlTranferFunction2Over1. Attempting to retrieve kAudioDevicePropertyVolumeDecibelsToScalarTransferFunction for the laptop fails with the message
HALC_ShellObject::GetPropertyData: call to the proxy failed, Error: 2003332927 (who?)
HALPlugIn::ObjectGetPropertyData: got an error from the plug-in routine, Error: 2003332927 (who?)
which is not a "normal" error message for an unsupported property.
How should kAudioDevicePropertyVolumeScalarToDecibels be used?

How can I tell if the camera is in use by another process?

In OS X, how can I tell if the camera or microphone is in use by another application or process? The following doesn't seem to work unless the other application has locked the device.
NSArray *devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
NSLog(#"In use by other application %hhd", [device isInUseByAnotherApplication]);
}
You can use CoreAudio to check if microphone is in use or not.
AudioObjectPropertyAddress propertyAddress = {
kAudioHardwarePropertyDevices,
kAudioObjectPropertyScopeGlobal,
kAudioObjectPropertyElementMaster
};
UInt32 dataSize = 0;
OSStatus status = AudioObjectGetPropertyDataSize(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize);
if(kAudioHardwareNoError != status) {
fprintf(stderr, "AudioObjectGetPropertyDataSize (kAudioHardwarePropertyDevices) failed: %i\n", status);
//return NULL;
return;
}
UInt32 deviceCount = (UInt32)(dataSize / sizeof(AudioDeviceID));
AudioDeviceID *audioDevices = (AudioDeviceID*)(malloc(dataSize));
if(NULL == audioDevices) {
fputs("Unable to allocate memory", stderr);
return;
}
status = AudioObjectGetPropertyData(kAudioObjectSystemObject, &propertyAddress, 0, NULL, &dataSize, audioDevices);
if(kAudioHardwareNoError != status) {
fprintf(stderr, "AudioObjectGetPropertyData (kAudioHardwarePropertyDevices) failed: %i\n", status);
free(audioDevices), audioDevices = NULL;
return ;
}
CFMutableArrayRef inputDeviceArray = CFArrayCreateMutable(kCFAllocatorDefault, deviceCount, &kCFTypeArrayCallBacks);
if(NULL == inputDeviceArray) {
fputs("CFArrayCreateMutable failed", stderr);
free(audioDevices), audioDevices = NULL;
return ;
}`
Now Iterate through all the devices and fetch property data kAudioDevicePropertyDeviceIsRunningSomewhere
CFBooleanRef deviceIsRunning = NULL;
dataSize = sizeof(deviceIsRunning);
propertyAddress.mSelector = kAudioDevicePropertyDeviceIsRunningSomewhere;
status = AudioObjectGetPropertyData(audioDevices[i], &propertyAddress, 0, NULL, &dataSize, &deviceIsRunning);
Check deviceIsRunning variable.
I don't have idea about video device. But i will update my answer if i find some solution.
Hope this help.
I'm working on go module that detects camera/microphone state (using cgo) and here is my Objective-C implementations:
IsCameraOn(): https://github.com/antonfisher/go-media-devices-state/blob/main/pkg/camera/camera_darwin.mm
IsMicrophoneOn(): https://github.com/antonfisher/go-media-devices-state/blob/main/pkg/microphone/microphone_darwin.mm
Thanks #Rohan for the accepted answer!

Why AudioDeviceID for default mic is different to AudioDeviceID obtained from UID for the same mic

Background of the issue:
I get AudioDeviceID for default input mic this way:
OSStatus error = noErr;
AudioObjectPropertyAddress propertyAddress = {};
UInt32 propertySize;
/* get default device ID */
AudioDeviceID deviceID = 0;
propertyAddress.mSelector = kAudioHardwarePropertyDevices;
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;
propertyAddress.mElement = kAudioObjectPropertyElementMaster;
propertySize = sizeof(AudioDeviceID);
error = AudioHardwareServiceGetPropertyData(kAudioObjectSystemObject,
&propertyAddress, 0,
NULL, &propertySize, &deviceID);
if(!error)
... // we get some AudioDeviceID, let it be ID_1
And get AudioDeviceID for UID is:
CFStringRef micUID = ... //my UID for the mic
CFStringRef *inDeviceUID = &micUID;
AudioObjectPropertyAddress proprtyAddress = {};
proprtyAddress.mSelector = kAudioHardwarePropertyDeviceForUID;
proprtyAddress.mScope = kAudioObjectPropertyScopeGlobal;
proprtyAddress.mElement = kAudioObjectPropertyElementMaster;
AudioValueTranslation translation = {};
translation.mInputData = inDeviceUID;
translation.mInputDataSize = sizeof(CFStringRef);
translation.mOutputData = outDeviceID;
translation.mOutputDataSize = sizeof(AudioDeviceID);
UInt32 inSize = sizeof(translation);
OSStatus result = AudioObjectGetPropertyData(kAudioObjectSystemObject,
&proprtyAddress,
0,
nullptr,
&inSize,
&translation);
// if no error - we have another AudioDeviceID, let it be ID_2
The question is:
Why is ID_1 != ID_2?
It happens only when I connect two identical web-cameras to Mac. What's weird about it is that these mics have different UID.
And when I try to change some parameters, sampleRate for example, it doesn't work (I get kAudioHardwareUnknownPropertyError error).
For Built-in line input it work ok.

Cocoa Mac OSX application turn off sound

i have faced, witch simple quoestion, i didin't find in google, or in any forum. How to turn application audio/sound off? like mute? i wan't to make checkbox witch controlls sound of application.
I would be grateful for detailed definition
Try this from the source code:
- (void)setDefaultAudioDevice
{
UInt32 propertySize = 0;
OSStatus status = noErr;
AudioObjectPropertyAddress propertyAOPA;
propertyAOPA.mElement = kAudioObjectPropertyElementMaster;
propertyAOPA.mScope = kAudioObjectPropertyScopeGlobal;
propertyAOPA.mSelector = kAudioHardwarePropertyDefaultOutputDevice;
propertySize = sizeof(AudioDeviceID);
status = AudioHardwareServiceGetPropertyData(kAudioObjectSystemObject, &propertyAOPA, 0, NULL, &propertySize, &outputDeviceID);
if(status)
{
// Error
return;
}
}
- (void)muteHardwareVolume
{
UInt32 propertySize = 0;
OSStatus status = noErr;
AudioObjectPropertyAddress propertyAOPA;
[self setDefaultAudioDevice];
propertyAOPA.mElement = kAudioObjectPropertyElementMaster;
propertyAOPA.mScope = kAudioDevicePropertyScopeOutput;
propertyAOPA.mSelector = kAudioDevicePropertyMute;
propertySize = sizeof(UInt32);
UInt32 mute = 1;
status = AudioHardwareServiceSetPropertyData(outputDeviceID, &propertyAOPA, 0, NULL, propertySize, &mute);
if(status)
{
// Error
return;
}
}

Remote IO Play with constant noise

guys! I have a trouble with using remote IO to playback a stream audio.I verified the PCM frame data before I put it in,it's correct.So I'm confused.Could you help me? Thanks a lot!
Below is my codes.
-
(void)initializeAudioPlay
{
OSStatus status;
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
// Get component
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
// Get audio units
status = AudioComponentInstanceNew(inputComponent, &audioPlayUnit);
[self checkStatus:status];
// Enable IO for playback
UInt32 flag = 1;
//kAUVoiceIOProperty_VoiceProcessingEnableAGC
status = AudioUnitSetProperty(audioPlayUnit, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kOutputBus, &flag, sizeof(flag));
[self checkStatus:status];
// Describe format
AudioStreamBasicDescription audioFormat;
memset(&audioFormat, 0, sizeof(audioFormat));
audioFormat.mSampleRate = 8000;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagsCanonical;//kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsSignedInteger;
/*kAudioFormatFlagsCanonical
| (kAudioUnitSampleFractionBits << kLinearPCMFormatFlagsSampleFractionShift)*/
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = (audioFormat.mBitsPerChannel/8) * audioFormat.mChannelsPerFrame;
audioFormat.mBytesPerPacket = audioFormat.mBytesPerFrame;
// Apply format
status = AudioUnitSetProperty(audioPlayUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&audioFormat,
sizeof(audioFormat));
[self checkStatus:status];
float value = (float)10 / 255.0;
AudioUnitSetParameter(audioPlayUnit, kAudioUnitParameterUnit_LinearGain, kAudioUnitScope_Input, 0, value, 0);
AudioChannelLayout new_layout;
new_layout.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
AudioUnitSetProperty( audioPlayUnit,
kAudioUnitProperty_AudioChannelLayout,
kAudioUnitScope_Global,
0, &new_layout, sizeof(new_layout) );
UInt32 bypassEffect = kAudioUnitProperty_RenderQuality;
status = AudioUnitSetProperty(audioPlayUnit,
kAudioUnitProperty_RenderQuality,
kAudioUnitScope_Global,
0,
&bypassEffect,
sizeof(bypassEffect));
[self checkStatus:status];
// Set output callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = playCallback;
callbackStruct.inputProcRefCon = self;
status = AudioUnitSetProperty(audioPlayUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
kOutputBus,
&callbackStruct,
sizeof(callbackStruct));
[self checkStatus:status];
flag = 0;
// Initialize
status = AudioUnitInitialize(audioPlayUnit);
[self checkStatus:status];
DGLog(#"audio play unit initialize = %d", status);
circularBuf = [[CircularBuf alloc] initWithBufLen:kBufferLength];
/*
AudioSessionInitialize(NULL, NULL, NULL, NULL);
Float64 rate =32000.0;
AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareSampleRate, sizeof(rate), &rate);
Float32 volume=20.0;
UInt32 size = sizeof(Float32);
AudioSessionSetProperty(
kAudioSessionProperty_PreferredHardwareIOBufferDuration,
&size, &volume);
//float aBufferLength = 0.185759637188209;
//AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration, sizeof(aBufferLength), &aBufferLength);
AudioSessionSetActive(YES);
*/
AudioSessionInitialize(NULL, NULL, NULL, nil);
AudioSessionSetActive(true);
UInt32 sessionCategory = kAudioSessionCategory_MediaPlayback ;
/* for Iphone we need to do this to route the audio to speaker */
status= AudioSessionSetProperty (
kAudioSessionProperty_AudioCategory,
sizeof (sessionCategory),
&sessionCategory
);
//NSLog(#"Error: %d", status);
//
// UInt32 audioRouteOverride = kAudioSessionOverrideAudioRoute_Speaker;
// status = AudioSessionSetProperty (
// kAudioSessionProperty_OverrideAudioRoute,
// sizeof (audioRouteOverride),
// &audioRouteOverride);
UInt32 audioMixed = 1;
status = AudioSessionSetProperty (
kAudioSessionProperty_OverrideCategoryMixWithOthers,
sizeof (audioMixed),
&audioMixed);
}
- (void)processAudio:(AudioBuffer *)buffer
{
short pcmTemp[160];
unsigned char * amrBuffer=NULL;
AudioUnitSampleType sample;
int i = 0;
int j = 0;
if ([circularBuf isReadTwoRegion]) {
amrBuffer = [circularBuf ReadData];
} else {
amrBuffer = [circularBuf ReadData];
i = [circularBuf ReadPos];
}
j = i + circularBuf.Length;
if (j - i >= 320) {
memcpy((void*)pcmTemp, (void*)amrBuffer, 320);
for(i=0; i<160; i++)
{
sample = 3.162277*pcmTemp[i];//10db
if(sample > 32767)sample = 32767;
else if(sample < -32768)sample = -32768;
buffData[i] = sample;
}
memcpy(buffer->mData, buffData, buffer->mDataByteSize);
[circularBuf AdvanceReadPos:320];
}
else
{
memset(buffer->mData, 0, buffer->mDataByteSize);
}
}
/**
This callback is called when the audioUnit needs new data to play through the
speakers. If you don't have any, just don't write anything in the buffers
*/
static OSStatus playCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
// Notes: ioData contains buffers (may be more than one!)
// Fill them up as much as you can. Remember to set the size value in each buffer to match how
// much data is in the buffer.
AudioPlay *audioPlay = (AudioPlay *)inRefCon;
for ( int i=0; i < ioData->mNumberBuffers; i++ ) {
memset(ioData->mBuffers[i].mData, 0, ioData->mBuffers[i].mDataByteSize);
}
ioData->mBuffers[0].mNumberChannels = 1;
[audioPlay processAudio:&ioData->mBuffers[0]];
return noErr;
}

Resources