For some odd reason AVCaptureVideoDataOutputSampleBufferDelegate isn't triggering. I've added the delegate and everything, i'm not sure why it isn't being ran in my code. Can anybody help me figure out why?
Delegates in my .h
#class AVPlayer;
#class AVPlayerClass;
#interface Camera : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate> {
.m code (initializeCamera is being called in ViewDidLoad)
-(void)initializeCamera {
Session = [[AVCaptureSession alloc]init];
[Session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
[Session addInput:audioInput];
// Preview Layer***************
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:Session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.CameraView.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
[Session beginConfiguration];
//Remove existing input
[Session removeInput:newVideoInput];
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
// FrontCamera = NO;
[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
//Check size based configs are supported before setting them
[Session setSessionPreset:AVCaptureSessionPreset1920x1080];
//Add input to session
NSError *err = nil;
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(#"Error creating capture device input: %#", err.localizedDescription);
}
else if ([Session canAddInput:newVideoInput])
{
[Session addInput:newVideoInput];
}
[Session commitConfiguration];
stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[Session addOutput:stillImageOutput];
MovieFileOutput = [[AVCaptureMovieFileOutput alloc]init];
Float64 TotalSeconds = 10;
int32_t preferredTimeScale = 60;
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;
if ([Session canAddOutput:MovieFileOutput])
[Session addOutput:MovieFileOutput];
// Create a VideoDataOutput and add it to the session
// AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
//
// [Session addOutput:output];
//
// // Configure your output.
//
// dispatch_queue_t queue = dispatch_get_main_queue();
//
// [output setSampleBufferDelegate:self queue:queue];
//
// // dispatch_release(queue);
//
// // Specify the pixel format
//
// output.videoSettings = [NSDictionary dictionaryWithObject:
//
// [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
//
// forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//
//
//
//
//
// AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
//
// [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
// [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
// forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//
// if ([Session canAddOutput:dataOutput])
// [Session addOutput:dataOutput];
// sessionに追加
// [self setupVideoOutput];
[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
//Check size based configs are supported before setting them
[Session setSessionPreset:AVCaptureSessionPreset1920x1080];
[Session startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef )sampleBuffer fromConnection:(AVCaptureConnection *)connections {
NSLog(#"Buff");
pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
VideoBuffer = pixelBuffer;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
NSLog(#"The drop");
}
My code isn't triggering AVCaptureVideoDataOutputSampleBufferDelegate because I am using AVCaptureMovieFileOutput instead of AVCaptureVideoDataOutput. AVCaptureMovieFileOutput apparently does not use sample buffers. As soon as I now how to set up AVCaptureVideoDataOutput correctly to use sample buffers I will post my code. Hope this helps somebody.
Related
On osx i use AVFoundation to capture image from a USB camera, all work fine, but the image I get is darker compared to live video.
Device capture configuration
-(BOOL)prepareCapture{
captureSession = [[AVCaptureSession alloc] init];
NSError *error;
imageOutput=[[AVCaptureStillImageOutput alloc] init];
NSNumber * pixelFormat = [NSNumber numberWithInt:k32BGRAPixelFormat];
[imageOutput setOutputSettings:[NSDictionary dictionaryWithObject:pixelFormat forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
videoOutput=[[AVCaptureMovieFileOutput alloc] init];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:MyVideoDevice error:&error];
if (videoInput) {
[captureSession beginConfiguration];
[captureSession addInput:videoInput];
[captureSession setSessionPreset:AVCaptureSessionPresetHigh];
//[captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
[captureSession addOutput:imageOutput];
[captureSession addOutput:videoOutput];
[captureSession commitConfiguration];
}
else {
// Handle the failure.
return NO;
}
return YES;
}
Add view for live preview
-(void)settingPreview:(NSView*)View{
// Attach preview to session
previewView = View;
CALayer *previewViewLayer = [previewView layer];
[previewViewLayer setBackgroundColor:CGColorGetConstantColor(kCGColorBlack)];
AVCaptureVideoPreviewLayer *newPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
[newPreviewLayer setFrame:[previewViewLayer bounds]];
[newPreviewLayer setAutoresizingMask:kCALayerWidthSizable | kCALayerHeightSizable];
[previewViewLayer addSublayer:newPreviewLayer];
//[self setPreviewLayer:newPreviewLayer];
[captureSession startRunning];
}
Code to capture the image
-(void)captureImage{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:
^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
CFDictionaryRef exifAttachments =
CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments) {
// Do something with the attachments.
}
// Continue as appropriate.
//IMG is a global NSImage
IMG = [self imageFromSampleBuffer:imageSampleBuffer];
[[self delegate] imageReady:IMG];
}];
}
Create a NSImage from sample buffer data, i think the problem is here
- (NSImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// Create an image object from the Quartz image
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
NSImage * image = [[NSImage alloc] initWithCGImage:quartzImage size:NSZeroSize];
// Release the Quartz image
CGImageRelease(quartzImage);
return (image);
}
Solution found
The problem was in imageFromSampleBuffer
I used this code and the picture is perfect
// Continue as appropriate.
//IMG = [self imageFromSampleBuffer:imageSampleBuffer];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
if (imageBuffer) {
CVBufferRetain(imageBuffer);
NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
IMG = [[NSImage alloc] initWithSize: [imageRep size]];
[IMG addRepresentation: imageRep];
CVBufferRelease(imageBuffer);
}
Code found in this answer
In my case, you still need to call captureStillImageAsynchronouslyFromConnection: multiple times to force the built-in camera to expose properly.
int primeCount = 8; //YMMV
for (int i = 0; i < primeCount; i++) {
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {}];
}
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
if (imageBuffer) {
CVBufferRetain(imageBuffer);
NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
IMG = [[NSImage alloc] initWithSize: [imageRep size]];
[IMG addRepresentation: imageRep];
}
}];
My goal is to mirror the screen of an iDevice to OSX, as lag-free as possible.
To my knowledge there are 2 ways to this:
Airplay Mirroring (e.g. Reflector)
CoreMediaIO via Lightning (e.g. Quicktime Recording)
I have chosen to pursue the second method, because (to my knowledge) connected iDevices can be recognized as DAL devices automatically after a one-time setup.
The main resource on how to do this is this blog: https://nadavrub.wordpress.com/2015/07/06/macos-media-capture-using-coremediaio/
That blog goes very deep into how to use CoreMediaIO, however it seems like you can work with AVFoundation once you have recognized the connected iDevice as an AVCaptureDevice.
This question: How to mirror iOS screen via USB? has posted a solution on how to grab each frame of the H264 (Annex B) muxxed datastream supplied by the iDevice.
However, my problem is that VideoToolbox will not correctly decode (Error Code -8969, BadData), even though there shouldn't be any difference in the code.
vtDecompressionDuctDecodeSingleFrame signalled err=-8969 (err) (VTVideoDecoderDecodeFrame returned error) at /SourceCache/CoreMedia_frameworks/CoreMedia-1562.240/Sources/VideoToolbox/VTDecompressionSession.c line 3241
Complete Code:
#import "ViewController.h"
#import CoreMediaIO;
#import AVFoundation;
#import AppKit;
#implementation ViewController
AVCaptureSession *session;
AVCaptureDeviceInput *newVideoDeviceInput;
AVCaptureVideoDataOutput *videoDataOutput;
- (void)viewDidLoad {
[super viewDidLoad];
}
- (instancetype)initWithCoder:(NSCoder *)coder
{
self = [super initWithCoder:coder];
if (self) {
// Allow iOS Devices Discovery
CMIOObjectPropertyAddress prop =
{ kCMIOHardwarePropertyAllowScreenCaptureDevices,
kCMIOObjectPropertyScopeGlobal,
kCMIOObjectPropertyElementMaster };
UInt32 allow = 1;
CMIOObjectSetPropertyData( kCMIOObjectSystemObject,
&prop, 0, NULL,
sizeof(allow), &allow );
// Get devices
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
BOOL deviceAttahced = false;
for (int i = 0; i < [devices count]; i++) {
AVCaptureDevice *device = devices[i];
if ([[device uniqueID] isEqualToString:#"b48defcadf92f300baf5821923f7b3e2e9fb3947"]) {
deviceAttahced = true;
[self startSession:device];
break;
}
}
}
return self;
}
- (void) deviceConnected:(AVCaptureDevice *)device {
if ([[device uniqueID] isEqualToString:#"b48defcadf92f300baf5821923f7b3e2e9fb3947"]) {
[self startSession:device];
}
}
- (void) startSession:(AVCaptureDevice *)device {
// Init capturing session
session = [[AVCaptureSession alloc] init];
// Star session configuration
[session beginConfiguration];
// Add session input
NSError *error;
newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (newVideoDeviceInput == nil) {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(#"%#", error);
});
} else {
[session addInput:newVideoDeviceInput];
}
// Add session output
videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey];
dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", NULL);
[videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
[session addOutput:videoDataOutput];
// Finish session configuration
[session commitConfiguration];
// Start the session
[session startRunning];
}
#pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
//NSImage *resultNSImage = [self imageFromSampleBuffer:sampleBuffer];
//self.imageView.image = [self nsImageFromSampleBuffer:sampleBuffer];
self.imageView.image = [[NSImage alloc] initWithData:imageToBuffer(sampleBuffer)];
}
NSData* imageToBuffer( CMSampleBufferRef source) {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(source);
CVPixelBufferLockBaseAddress(imageBuffer,0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
void *src_buff = CVPixelBufferGetBaseAddress(imageBuffer);
NSData *data = [NSData dataWithBytes:src_buff length:bytesPerRow * height];
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return data;
}
No, you must remove annex b start codes and replace them with size values. Same format as MP4
How can I change this UITableViewController custom class to dynamically change the height of the Table View Cells? I have specified different font sizes for the iPad and iPhone size classes.
(This is a continuation of a previous discussion with #rdelmar)
#import "CREWFoodWaterList.h"
#interface CREWFoodWaterList ()
#end
#implementation CREWFoodWaterList
#define VIEW_NAME #"FoodWaterList" // add to database for this view and notes view
#define VIEW_DESCRIPTION #"Food Water - items to keep available" // to add to the database
#define RETURN_NC #"NCSuggestedContentsMenu" // where to return to when complete processing should be specified there
#define NOTES_TITLE #"Food and Water Notes" // pass to notes VC
#define THIS_NC #"NCFoodWaterList" // pass to next VC (so returns to the NC for this view)
- (IBAction)changedSwitch:(UISwitch *)sender {
if (sender.tag == 0) {
[self saveSwitch: #"switchWater" toValue:[NSNumber numberWithBool:sender.on]];
} else if (sender.tag == 1) {
[self saveSwitch: #"switchCanned" toValue:[NSNumber numberWithBool:sender.on]];
} else if (sender.tag == 2) {
[self saveSwitch: #"switchComfort" toValue:[NSNumber numberWithBool:sender.on]];
} else if (sender.tag == 3) {
[self saveSwitch: #"switchSpecial" toValue:[NSNumber numberWithBool:sender.on]];
}
} // end of changedSwitch
-(void)loadSavedSwitches {
// now set the switches to previously stored values
NSNumber *switchValue;
switchValue = [self getSwitch: #"switchWater"];
if ([switchValue intValue] == 0) {
self.switchWater.on = NO;
} else {
self.switchWater.on = YES;
}
self.textWater.text = [self getDescription:#"switchWater"];
switchValue = [self getSwitch: #"switchCanned"];
if ([switchValue intValue] == 0) {
self.switchCanned.on = NO;
} else {
self.switchCanned.on = YES;
}
self.textCanned.text = [self getDescription:#"switchCanned"];
switchValue = [self getSwitch: #"switchComfort"];
if ([switchValue intValue] == 0) {
self.switchComfort.on = NO;
} else {
self.switchComfort.on = YES;
}
self.textComfort.text = [self getDescription:#"switchComfort"];
// self.textComfort.textColor = [UIColor whiteColor];
switchValue = [self getSwitch: #"switchSpecial"];
if ([switchValue intValue] == 0) {
self.switchSpecial.on = NO;
} else {
self.switchSpecial.on = YES;
}
self.textSpecial.text = [self getDescription:#"switchSpecial"];
}
- (void) createNewSwitches {
// set create all switches and set to off
[self newSwitch: #"switchWater" toValue:#"At least three litres of bottle water per person per day"];
[self newSwitch: #"switchCanned" toValue:#"Canned foods, dried goods and staples"];
[self newSwitch: #"switchComfort" toValue:#"Comfort foods"];
[self newSwitch: #"switchSpecial" toValue:#"Food for infants, seniors and special diets"];
}
// COMMON Methods
- (void)viewDidLoad {
[super viewDidLoad];
self.tableView.rowHeight = UITableViewAutomaticDimension;
self.tableView.estimatedRowHeight = 50;
NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
// set font size for table headers
[[UIDevice currentDevice] model];
NSString *myModel = [[UIDevice currentDevice] model];
NSInteger nWords = 1;
NSRange wordRange = NSMakeRange(0, nWords);
NSArray *firstWord = [[myModel componentsSeparatedByString:#" "] subarrayWithRange:wordRange];
NSString *obj = [firstWord objectAtIndex:0];
if ([obj isEqualToString:#"iPad"]) {
[[UILabel appearanceWhenContainedIn:[UITableViewHeaderFooterView class], nil] setFont:[UIFont boldSystemFontOfSize:26]];}
else if ([obj isEqualToString:#"iPhone"]) {
[[UILabel appearanceWhenContainedIn:[UITableViewHeaderFooterView class], nil] setFont:[UIFont boldSystemFontOfSize:16]];
};
[defaults setObject:NOTES_TITLE forKey: #"VCtitle"]; // pass to notes VC
[defaults setObject:VIEW_NAME forKey: #"VCname"]; // pass to notes VC to use to store notes
[defaults setObject:THIS_NC forKey: #"CallingNC"]; // get previous CallingNC and save it for use to return to in doneAction
[self initializeView];
} // end of viewDidLoad
-(void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
[self.tableView reloadData]; // this was necessary to have the cells size correctly when the table view first appeared
}
- (void) initializeView {
CREWAppDelegate *appDelegate = [[UIApplication sharedApplication]delegate];
NSManagedObjectContext *context = [appDelegate managedObjectContext];
NSEntityDescription *entityDesc = [NSEntityDescription entityForName:#"View" inManagedObjectContext:context];
NSFetchRequest * request = [[NSFetchRequest alloc] init];
[request setEntity:entityDesc];
NSPredicate *pred = [NSPredicate predicateWithFormat:#"(viewName = %#)", VIEW_NAME];
[request setPredicate:pred];
NSError *error = nil;
NSArray * objects = [context executeFetchRequest:request error:&error];
if ([objects count] == 0) { // create new view instance
NSManagedObject * newView;
newView = [NSEntityDescription insertNewObjectForEntityForName:#"View" inManagedObjectContext:context];
[newView setValue:VIEW_NAME forKey:#"viewName"];
[newView setValue:VIEW_DESCRIPTION forKey:#"viewDescription"];
// create all switches and set to off
[self createNewSwitches];
}
// load all switches
[self loadSavedSwitches];
} // end of initializeView
// create a new switch
- (void) newSwitch:(NSString*)switchName toValue:(NSString*)switchDescription {
NSError *error = nil;
CREWAppDelegate *appDelegate = [[UIApplication sharedApplication]delegate];
NSManagedObjectContext *context = [appDelegate managedObjectContext];
NSManagedObject * newSwitch;
newSwitch = [NSEntityDescription insertNewObjectForEntityForName:#"Switch" inManagedObjectContext:context];
[newSwitch setValue:VIEW_NAME forKey:#"viewName"];
[newSwitch setValue:switchName forKey:#"switchName"];
[newSwitch setValue:switchDescription forKey:#"switchDescription"];
[newSwitch setValue:[NSNumber numberWithInt:0] forKey:#"switchValue"];
if (! [context save:&error])
// NSLog(#"**** START entering %#",VIEW_NAME);
NSLog(#"newSwitch Couldn't save new data! Error:%#", [error description]);
} // end of newSwitch
// read existing switch settings
- (NSNumber *) getSwitch:(NSString*)switchName {
CREWAppDelegate *appDelegate = [[UIApplication sharedApplication]delegate];
NSManagedObjectContext *context = [appDelegate managedObjectContext];
NSEntityDescription *entityDesc = [NSEntityDescription entityForName:#"Switch" inManagedObjectContext:context];
NSFetchRequest * request = [[NSFetchRequest alloc] init];
[request setEntity:entityDesc];
NSPredicate *pred = [NSPredicate predicateWithFormat:#"(viewName = %#) AND (switchName = %#)", VIEW_NAME, switchName];
[request setPredicate:pred];
// Execute Fetch Request
NSManagedObjectContext * matches = nil;
NSError *fetchError = nil;
NSArray *objects = [appDelegate.managedObjectContext executeFetchRequest:request error:&fetchError];
if (fetchError) {
};
NSNumber *switchValue;
if (! [objects count] == 0) {
matches = objects [0];
switchValue = [matches valueForKey : #"switchValue"];
}
return switchValue;
} // end of getSwitch
- (NSString *) getDescription:(NSString*)switchName {
CREWAppDelegate *appDelegate = [[UIApplication sharedApplication]delegate];
NSManagedObjectContext *context = [appDelegate managedObjectContext];
NSEntityDescription *entityDesc = [NSEntityDescription entityForName:#"Switch" inManagedObjectContext:context];
NSFetchRequest * request = [[NSFetchRequest alloc] init];
[request setEntity:entityDesc];
NSPredicate *pred = [NSPredicate predicateWithFormat:#"(viewName = %#) AND (switchName = %#)", VIEW_NAME, switchName];
[request setPredicate:pred];
// Execute Fetch Request
NSManagedObjectContext * matches = nil;
NSError *fetchError = nil;
NSArray *objects = [appDelegate.managedObjectContext executeFetchRequest:request error:&fetchError];
if (fetchError) {
// NSLog(#"**** START entering %#",VIEW_NAME);(#"getSwitch Fetch error:%#", fetchError); // no
};
NSString *switchDescription;
if (! [objects count] == 0) {
matches = objects [0];
switchDescription = [matches valueForKey : #"switchDescription"];
}
return switchDescription;
} // end of getDescription
- (void)saveSwitch:(NSString*)switchName toValue:(NSNumber*)newSwitchValue {
CREWAppDelegate *appDelegate = [[UIApplication sharedApplication]delegate];
NSManagedObjectContext *context = [appDelegate managedObjectContext];
NSEntityDescription *entityDesc = [NSEntityDescription entityForName:#"Switch" inManagedObjectContext:context]; // get switch entity
NSFetchRequest * request = [[NSFetchRequest alloc] init];
[request setEntity:entityDesc];
NSPredicate *pred = [NSPredicate predicateWithFormat:#"(viewName = %#) AND (switchName = %#)", VIEW_NAME, switchName ];
[request setPredicate:pred];
NSManagedObject *matches = nil;
NSError *fetchError = nil;
NSArray * objects = [context executeFetchRequest:request error:&fetchError];
if (![objects count] == 0) {
matches = objects[0];
}
[matches setValue:newSwitchValue forKey:#"switchValue"];
if (! [context save:&fetchError])
// NSLog(#"**** START entering %#",VIEW_NAME);
NSLog(#"saveSwitch Couldn't save data! Error:%#", [fetchError description]); // perhaps this can be Done later
} // end of saveSwitch
// set alternate cells to different colors
- (UIColor *)getUIColorObjectFromHexString:(NSString *)hexStr alpha:(CGFloat)alpha
{
// Convert hex string to an integer
unsigned int hexint = [self intFromHexString:hexStr];
// Create color object, specifying alpha as well
UIColor *color =
[UIColor colorWithRed:((CGFloat) ((hexint & 0xFF0000) >> 16))/255
green:((CGFloat) ((hexint & 0xFF00) >> 8))/255
blue:((CGFloat) (hexint & 0xFF))/255
alpha:alpha];
return color;
}
// Helper method..
- (unsigned int)intFromHexString:(NSString *)hexStr
{
unsigned int hexInt = 0;
// Create scanner
NSScanner *scanner = [NSScanner scannerWithString:hexStr];
// Tell scanner to skip the # character
[scanner setCharactersToBeSkipped:[NSCharacterSet characterSetWithCharactersInString:#"#"]];
// Scan hex value
[scanner scanHexInt:&hexInt];
return hexInt;
}
- (void)tableView:(UITableView *)tableView willDisplayCell:(UITableViewCell *)cell forRowAtIndexPath:(NSIndexPath *)indexPath
{
NSString *grayishCyan = #"#a3c9ca";
NSString *grayishRed = #"#caa4a3";
UIColor *colorCyan = [self getUIColorObjectFromHexString:grayishCyan alpha:.9];
UIColor *colorPink = [self getUIColorObjectFromHexString:grayishRed alpha:.9];
// // NSLog(#"**** START entering %#",VIEW_NAME);(#"UIColor: %#", colorPink);
if (indexPath.row == 0 || indexPath.row%2 == 0) {
cell.backgroundColor = colorCyan;
}
else {
cell.backgroundColor = colorPink;
}
}
- (IBAction)doneAction:(id)sender {
// NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
UIViewController *viewController = [[UIStoryboard storyboardWithName:#"Main" bundle:nil] instantiateViewControllerWithIdentifier:RETURN_NC]; // previous Navigation controller
[self presentViewController:viewController animated:YES completion:nil];
};
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
#end
I'm working on sampling the screen using AVCaptureScreenInput and outputting it using a AVCaptureVideoDataOutput, and it's not working. The images it does output are blank, but it appears like I'm doing everything right according to all the documentation I've read.
I've made sure I make the AVCaptureVideoDataOutput to something that could be read by a CGImage (kCVPixelFormatType_32BGRA). When I run this same code and have it output to a AVCaptureMovieFileOutput, the movie renders fine and everything looks good - but what I really want is a series of images.
#import "ScreenRecorder.h"
#import <QuartzCore/QuartzCore.h>
#interface ScreenRecorder() <AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate> {
BOOL _isRecording;
#private
AVCaptureSession *_session;
AVCaptureOutput *_movieFileOutput;
AVCaptureStillImageOutput *_imageFileOutput;
NSUInteger _frameIndex;
NSTimer *_timer;
NSString *_outputDirectory;
}
#end
#implementation ScreenRecorder
- (BOOL)recordDisplayImages:(CGDirectDisplayID)displayId toURL:(NSURL *)fileURL windowBounds:(CGRect)windowBounds duration:(NSTimeInterval)duration {
if (_isRecording) {
return NO;
}
_frameIndex = 0;
// Create a capture session
_session = [[AVCaptureSession alloc] init];
// Set the session preset as you wish
_session.sessionPreset = AVCaptureSessionPresetHigh;
// Create a ScreenInput with the display and add it to the session
AVCaptureScreenInput *input = [[[AVCaptureScreenInput alloc] initWithDisplayID:displayId] autorelease];
if (!input) {
[_session release];
_session = nil;
return NO;
}
if ([_session canAddInput:input]) {
[_session addInput:input];
}
input.cropRect = windowBounds;
// Create a MovieFileOutput and add it to the session
_movieFileOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
[((AVCaptureVideoDataOutput *)_movieFileOutput) setVideoSettings:[NSDictionary dictionaryWithObjectsAndKeys:#(kCVPixelFormatType_32BGRA),kCVPixelBufferPixelFormatTypeKey, nil]];
// ((AVCaptureVideoDataOutput *)_movieFileOutput).alwaysDiscardsLateVideoFrames = YES;
if ([_session canAddOutput:_movieFileOutput])
[_session addOutput:_movieFileOutput];
// Start running the session
[_session startRunning];
// Delete any existing movie file first
if ([[NSFileManager defaultManager] fileExistsAtPath:[fileURL path]])
{
NSError *err;
if (![[NSFileManager defaultManager] removeItemAtPath:[fileURL path] error:&err])
{
NSLog(#"Error deleting existing movie %#",[err localizedDescription]);
}
}
_outputDirectory = [[fileURL path] retain];
[[NSFileManager defaultManager] createDirectoryAtPath:_outputDirectory withIntermediateDirectories:YES attributes:nil error:nil];
// Set the recording delegate to self
dispatch_queue_t queue = dispatch_queue_create("com.schaefer.lolz", 0);
[(AVCaptureVideoDataOutput *)_movieFileOutput setSampleBufferDelegate:self queue:queue];
//dispatch_release(queue);
if (0 != duration) {
_timer = [[NSTimer scheduledTimerWithTimeInterval:duration target:self selector:#selector(finishRecord:) userInfo:nil repeats:NO] retain];
}
_isRecording = YES;
return _isRecording;
}
- (void)dealloc
{
if (nil != _session) {
[_session stopRunning];
[_session release];
}
[_outputDirectory release];
_outputDirectory = nil;
[super dealloc];
}
- (void)stopRecording {
if (!_isRecording) {
return;
}
_isRecording = NO;
// Stop recording to the destination movie file
if ([_movieFileOutput isKindOfClass:[AVCaptureFileOutput class]]) {
[_movieFileOutput performSelector:#selector(stopRecording)];
}
[_session stopRunning];
[_session release];
_session = nil;
[_timer release];
_timer = nil;
}
-(void)finishRecord:(NSTimer *)timer
{
[self stopRecording];
}
//AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef image = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
_frameIndex++;
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
dispatch_async(dispatch_get_main_queue(), ^{
NSURL *URL = [NSURL fileURLWithPath:[_outputDirectory stringByAppendingPathComponent:[NSString stringWithFormat:#"%d.jpg", (int)_frameIndex]]];
CGImageDestinationRef destination = CGImageDestinationCreateWithURL((CFURLRef)URL, kUTTypeJPEG, 1, NULL);
CGImageDestinationAddImage(destination, image, nil);
if (!CGImageDestinationFinalize(destination)) {
NSLog(#"Failed to write image to %#", URL);
}
CFRelease(destination);
CFRelease(image);
});
}
#end
Your data isn't planar, so there is no base address for plane 0--there's no plane 0. (To be sure, you can check with CVPixelBufferIsPlanar.) You'll need CVPixelBufferGetBaseAddress to get a pointer to the first pixel. All the data will be interleaved.
Is there any good tutorial/source code to parse jSon data into the cocos2d project? I know how to parse jSon (also the XML) into the xcode and display into the tableview but I need to do that for my cocos2d project.
Here is what I was trying to do:
#import "Eighties.h"
#import "HelloWorldLayer.h"
#import "GameScene.h"
#import "JSON.h"
#define kLatestKivaLoansURL #"http://api.kivaws.org/v1/loans/search.json?status=fundraising"
#implementation Eighties
#synthesize responseData;
+(CCScene *) scene
{
// 'scene' is an autorelease object.
CCScene *scene = [CCScene node];
// 'layer' is an autorelease object.
Eighties *layer = [Eighties node];
// add layer as a child to scene
[scene addChild: layer];
// return the scene
return scene;
}
-(id) init
{
// always call "super" init
// Apple recommends to re-assign "self" with the "super's" return value
if( (self=[super init]) ) {
CGSize winSize = [[CCDirector sharedDirector] winSize];
CCSprite *bg = [CCSprite spriteWithFile:#"bg.jpg"];
[bg setPosition:ccp(winSize.width/2, winSize.height/2)];
[self addChild:bg z:0];
/*
CCMenuItem *menuItems = [CCMenuItemImage itemWithNormalImage:#"back_pink.png" selectedImage:#"back_blue.png" block:^(id sender) {
NSLog(#"Pressed");
[[SimpleAudioEngine sharedEngine] playEffect:#"tongue-clap.wav"];
[[CCDirector sharedDirector] replaceScene:[CCTransitionFade transitionWithDuration:1.0 scene:[GameScene scene] withColor:ccWHITE]];
}];
*/
CCMenuItem *menuItems2 = [CCMenuItemImage itemWithNormalImage:#"back_pink.png" selectedImage:#"back_blue.png" target:self selector:#selector(loadData)];
menuItems2.position = ccp(winSize.width/2-50, winSize.height/2-50);
CCMenu *menu = [CCMenu menuWithItems:menuItems2, nil];
menu.position = ccp(winSize.width/2, winSize.height/2);
[self addChild:menu];
}
return self;
}
-(void)test {
NSLog(#"Success");
UIAlertView *message = [[UIAlertView alloc] initWithTitle:#"Success"
message:#"Test Method Called"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[message show];
}
-(void)loadData
{
self.responseData = [NSMutableData data];
NSURLRequest *request = [NSURLRequest requestWithURL:[NSURL URLWithString:kLatestKivaLoansURL]];
[[NSURLConnection alloc] initWithRequest:request delegate:self];
}
- (void)connection:(NSURLConnection *)connection didReceiveResponse:(NSURLResponse *)response {
[responseData setLength:0];
}
- (void)connection:(NSURLConnection *)connection didReceiveData:(NSData *)data {
[responseData appendData:data];
}
- (void)connection:(NSURLConnection *)connection didFailWithError:(NSError *)error {
[connection release];
self.responseData = nil;
}
#pragma mark -
#pragma mark Process loan data
- (void)connectionDidFinishLoading:(NSURLConnection *)connection {
[connection release];
NSString *responseString = [[NSString alloc] initWithData:responseData encoding:NSUTF8StringEncoding];
self.responseData = nil;
NSArray* latestLoans = [(NSDictionary*)[responseString JSONValue] objectForKey:#"loans"];
[responseString release];
//choose a random loan
for (int i=0; i<=18; i++) {
NSDictionary* loan = [latestLoans objectAtIndex:i];
//fetch the data
NSNumber* fundedAmount = [loan objectForKey:#"funded_amount"];
NSNumber* loanAmount = [loan objectForKey:#"loan_amount"];
//float outstandingAmount = [loanAmount floatValue] - [fundedAmount floatValue];
//NSString* name = [loan objectForKey:#"name"];
//NSString* country = [(NSDictionary*)[loan objectForKey:#"location"] objectForKey:#"country"];
//set the text to the label
/*
label.text = [NSString stringWithFormat:#"Latest loan: %# from %# needs another $%.2f, please help",
name,country,outstandingAmount
];
*/
NSLog(#"%d",i);
//NSLog(#"%#",label.text);
NSLog(#"\n");
/*
UIAlertView *message = [[UIAlertView alloc] initWithTitle:name
message:country
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[message show];
*/
}
}
#end
There are a lot of ways to deserialize JSON objects, some ways are even baked into the SDK.
This question details a few ways you can approach the problem.
or you can take your JSON and have this utility generate the parsing code for you
https://itunes.apple.com/us/app/json-accelerator/id511324989?mt=12