Auto-detect USB Device Connect/Disconnect - macos

I have a Cocoa app that needs to be notified whenever a device is connected or disconnected from a USB port. I can get the DeviceConnected callback to work, but the DeviceDisconnected function does not get called when disconnecting a USB device.
Below is my code:
+ (void)listenForUSBEvents
{
io_iterator_t portIterator = 0;
CFMutableDictionaryRef matchingDict = IOServiceMatching( kIOUSBDeviceClassName );
IONotificationPortRef notifyPort = IONotificationPortCreate( kIOMasterPortDefault );
CFRunLoopSourceRef runLoopSource = IONotificationPortGetRunLoopSource( notifyPort );
CFRunLoopRef runLoop = CFRunLoopGetCurrent();
CFRunLoopAddSource( runLoop, runLoopSource, kCFRunLoopDefaultMode);
CFRetain( matchingDict );
kern_return_t returnCode = IOServiceAddMatchingNotification( notifyPort, kIOMatchedNotification, matchingDict, DeviceConnected, NULL, &portIterator );
if ( returnCode == 0 )
{
DeviceConnected( nil, portIterator );
}
returnCode = IOServiceAddMatchingNotification( notifyPort, kIOMatchedNotification, matchingDict, DeviceDisconnected, NULL, &portIterator );
if ( returnCode == 0 )
{
DeviceDisconnected( nil, portIterator );
}
}
#end
void DeviceConnected( void *refCon, io_iterator_t iterator )
{
kern_return_t returnCode = KERN_FAILURE;
io_object_t usbDevice;
while ( ( usbDevice = IOIteratorNext( iterator ) ) )
{
io_name_t name;
returnCode = IORegistryEntryGetName( usbDevice, name );
if ( returnCode != KERN_SUCCESS )
{
return;
}
[[NSNotificationCenter defaultCenter] postNotificationName:deviceConnectedNotification object:nil userInfo:nil];
}
}
void DeviceDisconnected( void *refCon, io_iterator_t iterator )
{
[[NSNotificationCenter defaultCenter] postNotificationName:deviceDiconnectedNotification object:nil userInfo:nil];
}

I figured out what I was doing wrong.
First of all, IOServiceAddMatchingNotification for deviceDisconnected should look like this:
returnCode = IOServiceAddMatchingNotification( notifyPort, kIOTerminatedNotification, matchingDict, DeviceDisconnected, NULL, &portIterator );
Second, the DeviceDisconnected function should look like this:
void DeviceDisconnected( void *refCon, io_iterator_t iterator )
{
kern_return_t returnCode = KERN_FAILURE;
io_object_t usbDevice;
while ( ( usbDevice = ioIteratorNext( iterator ) ) )
{
returnCode = IOObjectRelease( usbDevice );
if ( returnCode != kIOReturnSuccess )
{
NSLog( #"Couldn't release raw device object: %08x.", returnCode );
}
}
[[NSNotificationCenter defaultCenter] postNotificationName:deviceDiconnectedNotification object:nil userInfo:nil];
}

Related

Executable not running in windows except when using terminal

I'm using the SDL_TFF library and it compiles, but the executable doesn't run directly, except when I run it from the terminal. If I comment out the SDL_TFF code it runs normally.
Does anyone know what could be causing this problem?
Edit:
#include <stdio.h>
#include <SDL.h>
#include <SDL_ttf.h> //version 2.0.12
TTF_Font* Font;
SDL_DisplayMode Desktop_Display_Mode;
SDL_Window* Window;
SDL_Surface* Window_surface;
SDL_Renderer* Renderer;
bool load_font()
{
//Font = TTF_OpenFont("fonts\\NotoSans-Bold.ttf", 16);
Font = TTF_OpenFont("fonts\\NotoSansCJKjp-Bold.otf", 18);
if (Font == NULL)
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Count not load font! TTF_Error: %s\n", TTF_GetError()
);
return false;
}
return true;
}
bool initialize()
{
// Initialize SDL Library
if( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"SDL could not initialize! SDL_Error: %s\n", SDL_GetError()
);
return false;
}
// Initialize SDL_ttf library
if (TTF_Init() != 0)
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"SDL_ttf could not initialize! TTF_Error: %s\n", TTF_GetError()
);
return false;
}
// Load Font ---------------------------------------------------------------
if ( !load_font() )
{
return false;
}
// Get Desktop Display Mode
if (SDL_GetDesktopDisplayMode(0, &Desktop_Display_Mode) != 0)
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"SDL could not get Desktop Display Mode! SDL_Error: %s\n",
SDL_GetError()
);
return false;
}
// Create Window
Window = SDL_CreateWindow(
"SDL Test",
SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
Desktop_Display_Mode.w, Desktop_Display_Mode.h,
SDL_WINDOW_BORDERLESS //Flags
);
if( Window == NULL )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Window could not be created! SDL_Error: %s\n",
SDL_GetError()
);
return false;
}
Renderer = SDL_CreateRenderer( Window, -1,
SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC
);
if ( Renderer == NULL )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Renderer could not be created! SDL_Error: %s\n",
SDL_GetError()
);
return false;
}
return true;
}
bool clear_screen()
{
if ( SDL_SetRenderDrawColor( Renderer, 0x00, 0x00, 0x00, 0xFF ) != 0 )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Could not set render draw color! SDL_Error: %s\n",
SDL_GetError()
);
return false;
}
if ( SDL_RenderClear( Renderer ) != 0 )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Could not clear the renderer! SDL_Error: %s\n",
SDL_GetError()
);
return false;
}
return true;
}
int main( int argc, char* args[] )
{
SDL_Log("Started.");
bool running = initialize();
SDL_Color text_color = {255, 255, 255};
const char* text_string;
text_string = "A journey of a thousand miles begins with a single step.\n こんにちは";
SDL_Rect text_dest;
text_dest.x = 100;
text_dest.y = 100;
text_dest.w = 0;
text_dest.h = 0;
if ( TTF_SizeUTF8(Font, text_string, &text_dest.w, &text_dest.h) != 0)
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Unable to get text size! TTF_Error: %s\n", TTF_GetError()
);
running = false;
}
SDL_Surface* text_surface = NULL;
text_surface = TTF_RenderUTF8_Solid(
Font,
text_string,
text_color
);
if ( text_surface == NULL )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Unable to render text! SDL_Error: %s\n",
TTF_GetError()
);
running = false;
}
SDL_Texture* text_texture = NULL;
text_texture = SDL_CreateTextureFromSurface( Renderer, text_surface );
if ( text_texture == NULL )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Unable to render text! SDL_Error: %s\n",
TTF_GetError()
);
running = false;
}
// MAIN LOOP ===============================================================
SDL_Event event;
while (running)
{
// Clear the screen
if ( !clear_screen() )
{
break;
}
// Check for events
if (SDL_PollEvent( &event))
{
// Check for the quit event
if (event.type == SDL_QUIT)
{
SDL_Log("Quit.");
break;
}
}
// Apply the text
if ( SDL_RenderCopy( Renderer, text_texture, NULL, &text_dest ) != 0 )
{
SDL_LogCritical(
SDL_LOG_CATEGORY_APPLICATION,
"Unable to draw text! SDL_Error: %s\n",
SDL_GetError()
);
break;
}
//Update Window
SDL_RenderPresent( Renderer );
}
//Destroy Window
SDL_DestroyWindow( Window );
SDL_DestroyRenderer( Renderer);
//Quit SDL subsystems
TTF_Quit();
SDL_Quit();
SDL_Log("Ended.");
return 0;
}
Edit2: I tested more thorougly and it seems that the function TTF_OpenFont() is causing the problem. As long as I don't call that function the exe will run normally.
SOLVED!
It was the way I was executing the program from command line: "\bin\myprog.exe"
Thus the relative filepath was incorrect because "\bin\fonts..." did not exist. As I have not implemented SDL_LogSetOutputFunction to write logs to a file and std out is apparently suppressed I could not see that I must have been getting error messages indicating that the file was not being loaded.

Official method to programmatically create .textClipping?

The Finder creates a .textClipping when dragged text is dropped.
Is the underlying code somewhere exposed? I didn't find anything in the documentation, on this site, and in the Internet.
Or is there a better method than the below to programmatically create a .textClipping?
-(void)makeTextClip:(NSString*)srcStr :(NSString*)clipPath
{
NSUInteger len;
NSData *data = [NSData dataWithBytes:[srcStr UTF8String]
length:(len = [srcStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding])];
NSMutableString *hexStr = [NSMutableString stringWithCapacity:len+150];
[hexStr setString:[data description]];
if([hexStr hasPrefix:#"<"]) [hexStr deleteCharactersInRange:NSMakeRange(0,1)];
if([hexStr hasSuffix:#">"]) [hexStr deleteCharactersInRange:NSMakeRange([hexStr length]-1,1)];
[hexStr insertString:
#"data 'utf8' (256) {\n"
"$\""
atIndex:0];
[hexStr appendString:
#"\"\n};\n"
"data \'drag\' (128) "
"{\n$\"0000 0001 0000 0000 0000 0000 0000 0001\"\n"
"$\"7574 6638 0000 0100 0000 0000 0000 0000\"\n};"
];
NSError *err = nil;
NSURL *tmpDir = [[NSFileManager defaultManager]
URLForDirectory:NSItemReplacementDirectory
inDomain:NSUserDomainMask
appropriateForURL:[NSURL fileURLWithPath:#"M"]
create:YES
error:&err];
if(err || !tmpDir) { NSBeep(); return; }
char *tmpName = mktemp((char*)
[[[tmpDir
URLByAppendingPathComponent:#"XXXXXX"]
path]
cStringUsingEncoding:NSASCIIStringEncoding]
);
if(!tmpName) { NSBeep(); return; }
FILE *tmpFile = fopen(tmpName, "w+");
if(!tmpFile) { NSBeep(); return; }
const char *cStr = [hexStr cStringUsingEncoding:NSASCIIStringEncoding];
if(!cStr) { NSBeep(); goto cleanup; }
len = strlen(cStr);
int ret = fprintf(tmpFile, "%s", cStr);
if(ret < 0 || ret != len) { NSBeep(); goto cleanup; }
fclose(tmpFile);
tmpFile = NULL;
const char *cmd = [[NSString
stringWithFormat:
#"Rez \'%s\' -align longword -c MACS -t clpt -o \'%#\'",
tmpName, clipPath]
cStringUsingEncoding:NSASCIIStringEncoding];
if(!cmd) { NSBeep(); goto cleanup; }
ret = system(NULL);
if(!ret) { NSBeep(); goto cleanup; }
ret = system(cmd);
if(ret < 0 || ret == 127) NSBeep();
cleanup:
if(tmpFile) fclose(tmpFile);
if(tmpDir) [[NSFileManager defaultManager] removeItemAtURL:tmpDir error:nil];
}
Edit:
Better, yet not official:
OSStatus dragCoreEndianFlipProc(OSType ostype, OSType datatype, short rid, void* dataPtr, UInt32 dataSize, Boolean currentlyNative, void* refcon)
{
return handlerNotFoundErr;
}
-(void)makeTextClip:(NSString*)srcStr :(NSURL*)clipPath
{
NSFileManager *fm = [NSFileManager defaultManager];
if(![fm createFileAtPath:[clipPath path] contents:[NSData data] attributes:nil]) { NSBeep(); return; }
FSRef fsRef;
if(!CFURLGetFSRef((CFURLRef)clipPath, &fsRef)) { NSBeep(); return; }
HFSUniStr255 resourceForkName;
OSErr err = FSGetResourceForkName(&resourceForkName);
if(err) { NSBeep(); return; }
OSStatus stat;
stat = CoreEndianInstallFlipper(kCoreEndianResourceManagerDomain, 'drag', (CoreEndianFlipProc)dragCoreEndianFlipProc, NULL);
ResFileRefNum newForkRef = -1, curForkRef;
curForkRef = CurResFile();
err = FSCreateResourceFork(&fsRef, resourceForkName.length, resourceForkName.unicode, 0);
err = FSOpenResourceFile(&fsRef, resourceForkName.length, resourceForkName.unicode, fsCurPerm, &newForkRef);
if(err) { NSBeep(); return; }
UseResFile(newForkRef);
Handle dragHand, utf8Hand;
int dragBytes[8] = { Endian32_Swap(1), 0, 0, Endian32_Swap(1), Endian32_Swap('utf8'), Endian32_Swap(256), 0, 0 };
err = PtrToHand(dragBytes, &dragHand, 32);
if(err) {
NSBeep();
DisposeHandle(dragHand);
dragHand = NULL;
goto cleanup;
}
const char *utf8Bytes = [srcStr UTF8String];
err = PtrToHand(utf8Bytes, &utf8Hand, strlen(utf8Bytes));
if(err) {
NSBeep();
if(dragHand) DisposeHandle(dragHand);
DisposeHandle(utf8Hand);
goto cleanup;
}
ResType dragResType = 'drag', utf8Type = 'utf8';
ResID dragid = 128, utf8id = 256;
if(CurResFile() == newForkRef) {
AddResource(dragHand, dragResType, dragid, NULL);
if(ResError()) { NSBeep(); goto cleanup; }
AddResource(utf8Hand, utf8Type, utf8id, NULL);
if(ResError()) { NSBeep(); goto cleanup; }
}
cleanup:
CloseResFile(newForkRef);
UseResFile(curForkRef);
}

How to record and play back audio in real time on OS X

I'm trying to record sound from the microphone and play it back in real time on OS X. Eventually it will be streamed over the network, but for now I'm just trying to achieve local recording/playback.
I'm able to record sound and write to a file, which I could do with both AVCaptureSession and AVAudioRecorder. However, I'm not sure how to play back the audio as I record it. Using AVCaptureAudioDataOutput works:
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
AVCaptureAudioDataOutput *audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
self.serialQueue = dispatch_queue_create("audioQueue", NULL);
[audioDataOutput setSampleBufferDelegate:self queue:self.serialQueue];
if (audioInput && [self.captureSession canAddInput:audioInput] && [self.captureSession canAddOutput:audioDataOutput]) {
[self.captureSession addInput:audioInput];
[self.captureSession addOutput:audioDataOutput];
[self.captureSession startRunning];
// Stop after arbitrary time
double delayInSeconds = 4.0;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
[self.captureSession stopRunning];
});
} else {
NSLog(#"Couldn't add them; error = %#",error);
}
...but I'm not sure how to implement the callback:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
?
}
I've tried getting the data out of the sampleBuffer and playing it using AVAudioPlayer by copying the code from this SO answer, but that code crashes on the appendBytes:length: method.
AudioBufferList audioBufferList;
NSMutableData *data= [NSMutableData data];
CMBlockBufferRef blockBuffer;
CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer);
for( int y=0; y< audioBufferList.mNumberBuffers; y++ ){
AudioBuffer audioBuffer = audioBufferList.mBuffers[y];
Float32 *frame = (Float32*)audioBuffer.mData;
NSLog(#"Length = %i",audioBuffer.mDataByteSize);
[data appendBytes:frame length:audioBuffer.mDataByteSize]; // Crashes here
}
CFRelease(blockBuffer);
NSError *playerError;
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:data error:&playerError];
if(player && !playerError) {
NSLog(#"Player was valid");
[player play];
} else {
NSLog(#"Error = %#",playerError);
}
Edit The CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer method returns an OSStatus code of -12737, which according to the documentation is kCMSampleBufferError_ArrayTooSmall
Edit2: Based on this mailing list response, I passed a size_t out parameter as the second parameter to ...GetAudioBufferList.... This returned 40. Right now I'm just passing in 40 as a hard-coded value, which seems to work (the OSStatus return value is 0, atleast).
Now the player initWithData:error: method gives the error:
Error Domain=NSOSStatusErrorDomain Code=1954115647 "The operation couldn’t be completed. (OSStatus error 1954115647.)" which I'm looking into.
I've done iOS programming for a long time, but I haven't used AVFoundation, CoreAudio, etc until now. It looks like there are a dozen ways to accomplish the same thing, depending on how low or high level you want to be, so any high level overviews or framework recommendations are appreciated.
Appendix
Recording to a file
Recording to a file using AVCaptureSession:
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification
{
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(captureSessionStartedNotification:) name:AVCaptureSessionDidStartRunningNotification object:nil];
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
AVCaptureAudioFileOutput *audioOutput = [[AVCaptureAudioFileOutput alloc] init];
if (audioInput && [self.captureSession canAddInput:audioInput] && [self.captureSession canAddOutput:audioOutput]) {
NSLog(#"Can add the inputs and outputs");
[self.captureSession addInput:audioInput];
[self.captureSession addOutput:audioOutput];
[self.captureSession startRunning];
double delayInSeconds = 5.0;
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, (int64_t)(delayInSeconds * NSEC_PER_SEC));
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
[self.captureSession stopRunning];
});
}
else {
NSLog(#"Error was = %#",error);
}
}
- (void)captureSessionStartedNotification:(NSNotification *)notification
{
AVCaptureSession *session = notification.object;
id audioOutput = session.outputs[0];
NSLog(#"Capture session started; notification = %#",notification);
NSLog(#"Notification audio output = %#",audioOutput);
[audioOutput startRecordingToOutputFileURL:[[self class] outputURL] outputFileType:AVFileTypeAppleM4A recordingDelegate:self];
}
+ (NSURL *)outputURL
{
NSArray *searchPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentPath = [searchPaths objectAtIndex:0];
NSString *filePath = [documentPath stringByAppendingPathComponent:#"z1.alac"];
return [NSURL fileURLWithPath:filePath];
}
Recording to a file using AVAudioRecorder:
NSDictionary *recordSettings = [NSDictionary
dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:AVAudioQualityMin],
AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16],
AVEncoderBitRateKey,
[NSNumber numberWithInt: 2],
AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100.0],
AVSampleRateKey,
#(kAudioFormatAppleLossless),
AVFormatIDKey,
nil];
NSError *recorderError;
self.recorder = [[AVAudioRecorder alloc] initWithURL:[[self class] outputURL] settings:recordSettings error:&recorderError];
self.recorder.delegate = self;
if (self.recorder && !recorderError) {
NSLog(#"Success!");
[self.recorder recordForDuration:10];
} else {
NSLog(#"Failure, recorder = %#",self.recorder);
NSLog(#"Error = %#",recorderError);
}
Ok, I ended up working at a lower level than AVFoundation -- not sure if that was necessary. I read up to Chapter 5 of Learning Core Audio and went with an implementation using Audio Queues. This code is translated from being used for recording to a file/playing back a file, so there are surely some unnecessary bits I've accidentally left in. Additionally, I'm not actually re-enqueuing buffers onto the Output Queue (I should be), but just as a proof of concept this works. The only file is listed here, and is also on Github.
//
// main.m
// Recorder
//
// Created by Maximilian Tagher on 8/7/13.
// Copyright (c) 2013 Tagher. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#define kNumberRecordBuffers 3
//#define kNumberPlaybackBuffers 3
#define kPlaybackFileLocation CFSTR("/Users/Max/Music/iTunes/iTunes Media/Music/Taylor Swift/Red/02 Red.m4a")
#pragma mark - User Data Struct
// listing 4.3
struct MyRecorder;
typedef struct MyPlayer {
AudioQueueRef playerQueue;
SInt64 packetPosition;
UInt32 numPacketsToRead;
AudioStreamPacketDescription *packetDescs;
Boolean isDone;
struct MyRecorder *recorder;
} MyPlayer;
typedef struct MyRecorder {
AudioQueueRef recordQueue;
SInt64 recordPacket;
Boolean running;
MyPlayer *player;
} MyRecorder;
#pragma mark - Utility functions
// Listing 4.2
static void CheckError(OSStatus error, const char *operation) {
if (error == noErr) return;
char errorString[20];
// See if it appears to be a 4-char-code
*(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(error);
if (isprint(errorString[1]) && isprint(errorString[2])
&& isprint(errorString[3]) && isprint(errorString[4])) {
errorString[0] = errorString[5] = '\'';
errorString[6] = '\0';
} else {
// No, format it as an integer
NSLog(#"Was integer");
sprintf(errorString, "%d",(int)error);
}
fprintf(stderr, "Error: %s (%s)\n",operation,errorString);
exit(1);
}
OSStatus MyGetDefaultInputDeviceSampleRate(Float64 *outSampleRate)
{
OSStatus error;
AudioDeviceID deviceID = 0;
AudioObjectPropertyAddress propertyAddress;
UInt32 propertySize;
propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice;
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;
propertyAddress.mElement = 0;
propertySize = sizeof(AudioDeviceID);
error = AudioHardwareServiceGetPropertyData(kAudioObjectSystemObject,
&propertyAddress, 0, NULL,
&propertySize,
&deviceID);
if (error) return error;
propertyAddress.mSelector = kAudioDevicePropertyNominalSampleRate;
propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;
propertyAddress.mElement = 0;
propertySize = sizeof(Float64);
error = AudioHardwareServiceGetPropertyData(deviceID,
&propertyAddress, 0, NULL,
&propertySize,
outSampleRate);
return error;
}
// Recorder
static void MyCopyEncoderCookieToFile(AudioQueueRef queue, AudioFileID theFile)
{
OSStatus error;
UInt32 propertySize;
error = AudioQueueGetPropertySize(queue, kAudioConverterCompressionMagicCookie, &propertySize);
if (error == noErr && propertySize > 0) {
Byte *magicCookie = (Byte *)malloc(propertySize);
CheckError(AudioQueueGetProperty(queue, kAudioQueueProperty_MagicCookie, magicCookie, &propertySize), "Couldn't get audio queue's magic cookie");
CheckError(AudioFileSetProperty(theFile, kAudioFilePropertyMagicCookieData, propertySize, magicCookie), "Couldn't set audio file's magic cookie");
free(magicCookie);
}
}
// Player
static void MyCopyEncoderCookieToQueue(AudioFileID theFile, AudioQueueRef queue)
{
UInt32 propertySize;
// Just check for presence of cookie
OSStatus result = AudioFileGetProperty(theFile, kAudioFilePropertyMagicCookieData, &propertySize, NULL);
if (result == noErr && propertySize != 0) {
Byte *magicCookie = (UInt8*)malloc(sizeof(UInt8) * propertySize);
CheckError(AudioFileGetProperty(theFile, kAudioFilePropertyMagicCookieData, &propertySize, magicCookie), "Get cookie from file failed");
CheckError(AudioQueueSetProperty(queue, kAudioQueueProperty_MagicCookie, magicCookie, propertySize), "Set cookie on file failed");
free(magicCookie);
}
}
static int MyComputeRecordBufferSize(const AudioStreamBasicDescription *format, AudioQueueRef queue, float seconds)
{
int packets, frames, bytes;
frames = (int)ceil(seconds * format->mSampleRate);
if (format->mBytesPerFrame > 0) { // Not variable
bytes = frames * format->mBytesPerFrame;
} else { // variable bytes per frame
UInt32 maxPacketSize;
if (format->mBytesPerPacket > 0) {
// Constant packet size
maxPacketSize = format->mBytesPerPacket;
} else {
// Get the largest single packet size possible
UInt32 propertySize = sizeof(maxPacketSize);
CheckError(AudioQueueGetProperty(queue, kAudioConverterPropertyMaximumOutputPacketSize, &maxPacketSize, &propertySize), "Couldn't get queue's maximum output packet size");
}
if (format->mFramesPerPacket > 0) {
packets = frames / format->mFramesPerPacket;
} else {
// Worst case scenario: 1 frame in a packet
packets = frames;
}
// Sanity check
if (packets == 0) {
packets = 1;
}
bytes = packets * maxPacketSize;
}
return bytes;
}
void CalculateBytesForPlaythrough(AudioQueueRef queue,
AudioStreamBasicDescription inDesc,
Float64 inSeconds,
UInt32 *outBufferSize,
UInt32 *outNumPackets)
{
UInt32 maxPacketSize;
UInt32 propSize = sizeof(maxPacketSize);
CheckError(AudioQueueGetProperty(queue,
kAudioQueueProperty_MaximumOutputPacketSize,
&maxPacketSize, &propSize), "Couldn't get file's max packet size");
static const int maxBufferSize = 0x10000;
static const int minBufferSize = 0x4000;
if (inDesc.mFramesPerPacket) {
Float64 numPacketsForTime = inDesc.mSampleRate / inDesc.mFramesPerPacket * inSeconds;
*outBufferSize = numPacketsForTime * maxPacketSize;
} else {
*outBufferSize = maxBufferSize > maxPacketSize ? maxBufferSize : maxPacketSize;
}
if (*outBufferSize > maxBufferSize &&
*outBufferSize > maxPacketSize) {
*outBufferSize = maxBufferSize;
} else {
if (*outBufferSize < minBufferSize) {
*outBufferSize = minBufferSize;
}
}
*outNumPackets = *outBufferSize / maxPacketSize;
}
#pragma mark - Record callback function
static void MyAQInputCallback(void *inUserData,
AudioQueueRef inQueue,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp *inStartTime,
UInt32 inNumPackets,
const AudioStreamPacketDescription *inPacketDesc)
{
// NSLog(#"Input callback");
// NSLog(#"Input thread = %#",[NSThread currentThread]);
MyRecorder *recorder = (MyRecorder *)inUserData;
MyPlayer *player = recorder->player;
if (inNumPackets > 0) {
// Enqueue on the output Queue!
AudioQueueBufferRef outputBuffer;
CheckError(AudioQueueAllocateBuffer(player->playerQueue, inBuffer->mAudioDataBytesCapacity, &outputBuffer), "Input callback failed to allocate new output buffer");
memcpy(outputBuffer->mAudioData, inBuffer->mAudioData, inBuffer->mAudioDataByteSize);
outputBuffer->mAudioDataByteSize = inBuffer->mAudioDataByteSize;
// [NSData dataWithBytes:inBuffer->mAudioData length:inBuffer->mAudioDataByteSize];
// Assuming LPCM so no packet descriptions
CheckError(AudioQueueEnqueueBuffer(player->playerQueue, outputBuffer, 0, NULL), "Enqueing the buffer in input callback failed");
recorder->recordPacket += inNumPackets;
}
if (recorder->running) {
CheckError(AudioQueueEnqueueBuffer(inQueue, inBuffer, 0, NULL), "AudioQueueEnqueueBuffer failed");
}
}
static void MyAQOutputCallback(void *inUserData, AudioQueueRef inAQ, AudioQueueBufferRef inCompleteAQBuffer)
{
// NSLog(#"Output thread = %#",[NSThread currentThread]);
// NSLog(#"Output callback");
MyPlayer *aqp = (MyPlayer *)inUserData;
MyRecorder *recorder = aqp->recorder;
if (aqp->isDone) return;
}
int main(int argc, const char * argv[])
{
#autoreleasepool {
MyRecorder recorder = {0};
MyPlayer player = {0};
recorder.player = &player;
player.recorder = &recorder;
AudioStreamBasicDescription recordFormat;
memset(&recordFormat, 0, sizeof(recordFormat));
recordFormat.mFormatID = kAudioFormatLinearPCM;
recordFormat.mChannelsPerFrame = 2; //stereo
// Begin my changes to make LPCM work
recordFormat.mBitsPerChannel = 16;
// Haven't checked if each of these flags is necessary, this is just what Chapter 2 used for LPCM.
recordFormat.mFormatFlags = kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
// end my changes
MyGetDefaultInputDeviceSampleRate(&recordFormat.mSampleRate);
UInt32 propSize = sizeof(recordFormat);
CheckError(AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,
0,
NULL,
&propSize,
&recordFormat), "AudioFormatGetProperty failed");
AudioQueueRef queue = {0};
CheckError(AudioQueueNewInput(&recordFormat, MyAQInputCallback, &recorder, NULL, NULL, 0, &queue), "AudioQueueNewInput failed");
recorder.recordQueue = queue;
// Fills in ABSD a little more
UInt32 size = sizeof(recordFormat);
CheckError(AudioQueueGetProperty(queue,
kAudioConverterCurrentOutputStreamDescription,
&recordFormat,
&size), "Couldn't get queue's format");
// MyCopyEncoderCookieToFile(queue, recorder.recordFile);
int bufferByteSize = MyComputeRecordBufferSize(&recordFormat,queue,0.5);
NSLog(#"%d",__LINE__);
// Create and Enqueue buffers
int bufferIndex;
for (bufferIndex = 0;
bufferIndex < kNumberRecordBuffers;
++bufferIndex) {
AudioQueueBufferRef buffer;
CheckError(AudioQueueAllocateBuffer(queue,
bufferByteSize,
&buffer), "AudioQueueBufferRef failed");
CheckError(AudioQueueEnqueueBuffer(queue, buffer, 0, NULL), "AudioQueueEnqueueBuffer failed");
}
// PLAYBACK SETUP
AudioQueueRef playbackQueue;
CheckError(AudioQueueNewOutput(&recordFormat,
MyAQOutputCallback,
&player, NULL, NULL, 0,
&playbackQueue), "AudioOutputNewQueue failed");
player.playerQueue = playbackQueue;
UInt32 playBufferByteSize;
CalculateBytesForPlaythrough(queue, recordFormat, 0.1, &playBufferByteSize, &player.numPacketsToRead);
bool isFormatVBR = (recordFormat.mBytesPerPacket == 0
|| recordFormat.mFramesPerPacket == 0);
if (isFormatVBR) {
NSLog(#"Not supporting VBR");
player.packetDescs = (AudioStreamPacketDescription*) malloc(sizeof(AudioStreamPacketDescription) * player.numPacketsToRead);
} else {
player.packetDescs = NULL;
}
// END PLAYBACK
recorder.running = TRUE;
player.isDone = false;
CheckError(AudioQueueStart(playbackQueue, NULL), "AudioQueueStart failed");
CheckError(AudioQueueStart(queue, NULL), "AudioQueueStart failed");
CFRunLoopRunInMode(kCFRunLoopDefaultMode, 10, TRUE);
printf("Playing through, press <return> to stop:\n");
getchar();
printf("* done *\n");
recorder.running = FALSE;
player.isDone = true;
CheckError(AudioQueueStop(playbackQueue, false), "Failed to stop playback queue");
CheckError(AudioQueueStop(queue, TRUE), "AudioQueueStop failed");
AudioQueueDispose(playbackQueue, FALSE);
AudioQueueDispose(queue, TRUE);
}
return 0;
}

how can I get information about the mouse, keyboard, gamepad in MAC OS

need information about the buttons of the device, how many, etc.
I tried iokit but got a strange list
name = IOUSBRootHubDevice
name = IOUSBHubDevice
name = IOUSBDevice
name = IOUSBHubDevice
name = IOUSBDevice
name = IOUSBRootHubDevice
name = IOUSBHubDevice
name = IOUSBDevice
name = IOUSBDevice
name = IOUSBDevice
my code
CFMutableDictionaryRef matchingDict;
io_iterator_t iter;
kern_return_t kr;
io_service_t device;
io_name_t name;
//io_object_t device;
/* set up a matching dictionary for the class */
matchingDict = IOServiceMatching(kIOUSBDeviceClassName);
if (matchingDict == NULL){
return -1; // fail
}
/* Now we have a dictionary, get an iterator.*/
kr = IOServiceGetMatchingServices(kIOMasterPortDefault, matchingDict, &iter);
if (kr != KERN_SUCCESS){
return -1;
}
/* iterate */
while ((device = IOIteratorNext(iter))){
IOObjectGetClass(device, name);
printf("name = %s \n", name);
/* do something with device, eg. check properties */
/* ... */
/* And free the reference taken before continuing to the next item */
IOObjectRelease(device);
}
/* Done, release the iterator */
IOObjectRelease(iter);
there are probably a better option, but I can not find in google
turned himself in, as it so
static void MyCreateHIDDeviceInterface(io_object_t hidDevice,
IOHIDDeviceInterface122 ***hidDeviceInterface)
{
io_name_t className;
IOCFPlugInInterface **plugInInterface = NULL;
HRESULT plugInResult = S_OK;
SInt32 score = 0;
IOReturn ioReturnValue = kIOReturnSuccess;
ioReturnValue = IOObjectGetClass(hidDevice, className);
printf("Failed to get class name.\n");//print_errmsg_if_io_err(ioReturnValue, "Failed to get class name.");
printf("Found device type %s\n", className);
ioReturnValue = IOCreatePlugInInterfaceForService(hidDevice,
kIOHIDDeviceUserClientTypeID,
kIOCFPlugInInterfaceID,
&plugInInterface,
&score);
if (ioReturnValue == kIOReturnSuccess)
{
//Call a method of the intermediate plug-in to create the device
//interface
plugInResult = (*plugInInterface)->QueryInterface(plugInInterface,
CFUUIDGetUUIDBytes(kIOHIDDeviceInterfaceID),
(LPVOID *) hidDeviceInterface);
printf("Couldn't create HID class device interface.\n");//print_errmsg_if_err(plugInResult != S_OK, "Couldn't create HID class device interface");
IODestroyPlugInInterface(plugInInterface);
//(*plugInInterface)->Release(plugInInterface);
}
}
void getCookies(IOHIDDeviceInterface122** deviceInterface){
CFArrayRef elements;
CFDictionaryRef element;
CFTypeRef object;
long number;
long usagePage;
long usage;
IOHIDElementCookie cookie;
(*deviceInterface)->copyMatchingElements(deviceInterface, NULL, &elements);
for ( CFIndex i=0; i<CFArrayGetCount(elements); i++ ){
element = (CFDictionaryRef)CFArrayGetValueAtIndex(elements, i);
// Get usage page
object = CFDictionaryGetValue(element, CFSTR(kIOHIDElementUsagePageKey));
if( object==0 || CFGetTypeID(object) != CFNumberGetTypeID() ){
continue;
}
if( !CFNumberGetValue((CFNumberRef) object, kCFNumberLongType, &number) ){
continue;
}
usagePage = number;
if( usagePage!=kHIDPage_GenericDesktop && usagePage!=kHIDPage_Button ){
continue;
}
// Get usage
object = CFDictionaryGetValue( element, CFSTR(kIOHIDElementUsageKey) );
if( object==0 || CFGetTypeID(object) != CFNumberGetTypeID() ){
continue;
}
if( !CFNumberGetValue((CFNumberRef) object, kCFNumberLongType, &number) ){
continue;
}
usage = number;
// Get cookie
object = CFDictionaryGetValue( element, CFSTR(kIOHIDElementCookieKey) );
if( object==0 || CFGetTypeID(object) != CFNumberGetTypeID() ){
continue;
}
if( !CFNumberGetValue((CFNumberRef) object, kCFNumberLongType, &number) ){
continue;
}
cookie = (IOHIDElementCookie) number;
if(usagePage == kHIDPage_GenericDesktop){
switch( usage )
{
case kHIDUsage_GD_Pointer: {
printf("kHIDUsage_GD_Pointer \n");
break;
}
case kHIDUsage_GD_Mouse: {
printf("kHIDUsage_GD_Mouse \n");
break;
};
case kHIDUsage_GD_Joystick: {
printf("kHIDUsage_GD_Joystick \n");
break;
}
case kHIDUsage_GD_GamePad: {
printf("kHIDUsage_GD_GamePad \n");
break;
}
case kHIDUsage_GD_Keyboard:{
printf("kHIDUsage_GD_Keyboard \n");
break;
};
case kHIDUsage_GD_Keypad: {
printf("kHIDUsage_GD_Keypad \n");
break;
};
case kHIDUsage_GD_MultiAxisController:{
printf("kHIDUsage_GD_MultiAxisController \n");
break;
};
case kHIDUsage_GD_X: {
printf("kHIDUsage_GD_X \n");
break;
};
case kHIDUsage_GD_Y: {
printf("kHIDUsage_GD_Y \n");
break;
};
}
} else if( usagePage == kHIDPage_Button){
printf("kHIDPage_Button \n");
}
}
}
int main(int argc, const char * argv[])
{
CFMutableDictionaryRef matchingDict;
io_iterator_t iter;
kern_return_t kr;
io_object_t device;
char name[128];
IOHIDDeviceInterface122 **deviceInterface = NULL;
/* set up a matching dictionary for the class */
matchingDict = IOServiceMatching(kIOHIDDeviceKey);//kIOHIDDeviceKey
if (matchingDict == NULL)
{
return -1; // fail
}
/* Now we have a dictionary, get an iterator.*/
kr = IOServiceGetMatchingServices(kIOMasterPortDefault, matchingDict, &iter);
if (kr != KERN_SUCCESS)
{
return -1;
}
/* iterate */
while ((device = IOIteratorNext(iter)))
{
IOObjectGetClass(device, name);
if(!strcmp(name,"BNBMouseDevice")){
MyCreateHIDDeviceInterface(device, &deviceInterface);
if(*deviceInterface != NULL){
getCookies(deviceInterface);
(*deviceInterface)->Release(deviceInterface);
}
printf("name = %s \n \n", name);
}else if(!strcmp(name,"AppleBluetoothHIDKeyboard")){
MyCreateHIDDeviceInterface(device, &deviceInterface);
if(*deviceInterface != NULL){
getCookies(deviceInterface);
(*deviceInterface)->Release(deviceInterface);
}
printf("name = %s \n \n", name);
}else if(!strcmp(name,"IOUSBHIDDriver")){
MyCreateHIDDeviceInterface(device, &deviceInterface);
if(*deviceInterface != NULL){
getCookies(deviceInterface);
(*deviceInterface)->Release(deviceInterface);
}
printf("name = %s \n \n", name);
}
IOObjectRelease(device);
}
/* Done, release the iterator */
IOObjectRelease(iter);
return 0;
}

UDID equivalent on the Mac [duplicate]

This question already has answers here:
Closed 11 years ago.
Possible Duplicate:
Unique Identifier of a Mac?
On iOS, retrieving a unique and anonymous string for the current device is fairly easy ([[UIDevice currentDevice] uniqueIdentifier]). And I'm not referring here to the computer's serial number but at a hash made out of different components characteristics/sn, like the iOS udid's.
Is there something similar on Mac OS X side, and how to access it?
See Tech Note 1103:
#include <CoreFoundation/CoreFoundation.h>
#include <IOKit/IOKitLib.h>
// Returns the serial number as a CFString.
// It is the caller's responsibility to release the returned CFString when done with it.
void CopySerialNumber(CFStringRef *serialNumber)
{
if (serialNumber != NULL) {
*serialNumber = NULL;
io_service_t platformExpert = IOServiceGetMatchingService(kIOMasterPortDefault,
IOServiceMatching("IOPlatformExpertDevice"));
if (platformExpert) {
CFTypeRef serialNumberAsCFString =
IORegistryEntryCreateCFProperty(platformExpert,
CFSTR(kIOPlatformSerialNumberKey),
kCFAllocatorDefault, 0);
if (serialNumberAsCFString) {
*serialNumber = serialNumberAsCFString;
}
IOObjectRelease(platformExpert);
}
}
}
Don't use the computer serial number— it's only valid on initial factory installations. If your motherboard gets replaced at any point, you'll no longer have a serial number, since it wasn't setup to have one as part of a full machine at the factory.
Instead, you should use the hardware ethernet ID, specifically the one for device 'en0'. The following (quite similar) code will give you that:
//
// MACAddress.m
// XPPublisherCore
//
// Created by Jim Dovey on 11-01-30.
// Copyright 2011 XPlatform Inc. All rights reserved.
//
#import "MACAddress.h"
#import <IOKit/IOKitLib.h>
NSData * GetMACAddress( void )
{
kern_return_t kr = KERN_SUCCESS;
CFMutableDictionaryRef matching = NULL;
io_iterator_t iterator = IO_OBJECT_NULL;
io_object_t service = IO_OBJECT_NULL;
CFDataRef result = NULL;
matching = IOBSDNameMatching( kIOMasterPortDefault, 0, "en0" );
if ( matching == NULL )
{
fprintf( stderr, "IOBSDNameMatching() returned empty dictionary\n" );
return ( NULL );
}
kr = IOServiceGetMatchingServices( kIOMasterPortDefault, matching, &iterator );
if ( kr != KERN_SUCCESS )
{
fprintf( stderr, "IOServiceGetMatchingServices() returned %d\n", kr );
return ( NULL );
}
while ( (service = IOIteratorNext(iterator)) != IO_OBJECT_NULL )
{
io_object_t parent = IO_OBJECT_NULL;
kr = IORegistryEntryGetParentEntry( service, kIOServicePlane, &parent );
if ( kr == KERN_SUCCESS )
{
if ( result != NULL )
CFRelease( result );
result = IORegistryEntryCreateCFProperty( parent, CFSTR("IOMACAddress"), kCFAllocatorDefault, 0 );
IOObjectRelease( parent );
}
else
{
fprintf( stderr, "IORegistryGetParentEntry returned %d\n", kr );
}
IOObjectRelease( service );
}
return ( (NSData *)NSMakeCollectable(result) );
}
NSString * GetMACAddressDisplayString( void )
{
NSData * macData = GetMACAddress();
if ( [macData length] == 0 )
return ( nil );
const UInt8 *bytes = [macData bytes];
NSMutableString * result = [NSMutableString string];
for ( NSUInteger i = 0; i < [macData length]; i++ )
{
if ( [result length] != 0 )
[result appendFormat: #":%02hhx", bytes[i]];
else
[result appendFormat: #"%02hhx", bytes[i]];
}
return ( [[result copy] autorelease] );
}

Resources