My code was working great on iOS7.x. But since I compile for iOS 8, it does no longer retrieve others information than basic ones. Here is the only information I get now (no more DateTime, Artist, Model etc.):
{
ColorModel = RGB;
DPIHeight = 72;
DPIWidth = 72;
Depth = 8;
Orientation = 1;
PixelHeight = 2448;
PixelWidth = 3264;
"{Exif}" = {
ColorSpace = 1;
PixelXDimension = 3264;
PixelYDimension = 2448;
};
"{JFIF}" = {
DensityUnit = 1;
JFIFVersion = (
1,
0,
1
);
XDensity = 72;
YDensity = 72;
};
"{TIFF}" = {
Orientation = 1;
};
}
My code is as follows:
-(NSDictionary*)getExif:(NSURL*)assetURL asset:(ALAssetRepresentation *)image_representation {
// create a buffer to hold image data
uint8_t *buffer = (Byte*)malloc(image_representation.size);
NSUInteger length = [image_representation getBytes:buffer fromOffset: 0.0 length:image_representation.size error:nil];
if (length != 0) {
// buffer -> NSData object; free buffer afterwards
NSData *adata = [[NSData alloc] initWithBytesNoCopy:buffer length:image_representation.size freeWhenDone:YES];
// identify image type (jpeg, png, RAW file, ...) using UTI hint
NSDictionary* sourceOptionsDict = [NSDictionary dictionaryWithObjectsAndKeys:(id)[image_representation UTI],kCGImageSourceTypeIdentifierHint,nil];
// create CGImageSource with NSData
CGImageSourceRef sourceRef = CGImageSourceCreateWithData((__bridge CFDataRef) adata,
(__bridge CFDictionaryRef) sourceOptionsDict);
// get imagePropertiesDictionary
CFDictionaryRef imagePropertiesDictionary = CGImageSourceCopyPropertiesAtIndex(sourceRef,0, NULL);
NSLog(#"image props: %#",imagePropertiesDictionary);
CFDictionaryRef propsRef = (CFDictionaryRef)CFDictionaryGetValue(imagePropertiesDictionary, #"{TIFF}");
NSDictionary *props = (__bridge NSDictionary*)propsRef;
NSMutableDictionary *d = [[NSMutableDictionary alloc] init];
NSString * make = nil;
NSString *model = nil;
NSString *
s = [props objectForKey:#"DateTime" ]; if (s) [d setObject: s forKey:#"DateTime"];
s = [props objectForKey:#"Artist" ]; if (s) [d setObject:[s capitalizedString] forKey:#"Artist"];
s = make = [props objectForKey:#"Make" ]; if (s) [d setObject:s forKey:#"Make"];
s = model = [props objectForKey:#"Model" ]; if (s) {
if (make ) s = [s stringByReplacingOccurrencesOfString:make withString:#""];
[d setObject:s forKey:#"Model"];
}
// etc.
IOS8. Photos application bug?
I think it's a bug of Photos.Framework .Because I can get fully metadata when fetch all PHAssets,but lost some information when I fetch the PHAsset by collection.
Related
I am trying to write an image into a specific path. for that the code I written was:
- (void)thumbnailWithDataProvider:(CGDataProviderRef)dataProvider url:(NSURL *)url guid:(NSString *)guid {
// The caller of this method typically releases this strait after calling.
// We therefore retain it and release it at the end of the block.
CGDataProviderRetain(dataProvider);
// Dispatch the generation in a block on a queue sutable for this guid
dispatch_async([self queueForGuid:guid], ^{
NRLog(#"PDFORDER: Generate start %# %#", guid, url);
CGPDFDocumentRef documentRef = CGPDFDocumentCreateWithProvider(dataProvider);
size_t numPages = CGPDFDocumentGetNumberOfPages(documentRef);
if ( numPages ) {
CGPDFPageRef pageRef = CGPDFDocumentGetPage(documentRef, 1);
CGRect cropBox = CGPDFPageGetBoxRect(pageRef, kCGPDFCropBox);
for ( size_t i = 0; i < NRMThumbnailCount; i++ ) {
size_t scale = NRMThumbnailSizes[i];
NSString *path = [url path];
path = [NSString stringWithFormat:#"%#%lu.png", path, scale];
NSURL *outurl = [NSURL fileURLWithPath:path];
CGImageRef imageRef;
CGFloat scaleX = scale/cropBox.size.width;
CGFloat scaleY = scale/cropBox.size.height;
CGFloat pdfScale = ( scaleX < scaleY ? scaleX : scaleY );
CGFloat width = (CGFloat)ceil((double) pdfScale*cropBox.size.width);
CGFloat height = (CGFloat)ceil((double) pdfScale*cropBox.size.height);
CGColorSpaceRef colorSpace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, width*4, colorSpace, kCGImageAlphaPremultipliedLast);
CGColorSpaceRelease(colorSpace);
CGContextScaleCTM(context, pdfScale, pdfScale);
CGContextSetFillColor(context, NRMPDFBackgroundColorComponents);
CGContextFillRect(context, cropBox);
CGContextDrawPDFPage( context, pageRef );
imageRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
CGImageDestinationRef imageDest = CGImageDestinationCreateWithURL((CFURLRef)outurl, THUMBNAIL_TYPE, 1, NULL);//Getting error at this line as " <Error>: ImageIO: CGImageDestinationSetProperties image destination parameter is nil " and the app getting crashed.
if(!imageDest) {
NSLog(#"***Could not create image destination ***");
}
CFStringRef keys[1];
keys[0] = kCGImageDestinationLossyCompressionQuality;
CFNumberRef values[1];
CGFloat compression = (CGFloat)THUMBNAIL_COMPRESSION;
CFNumberRef compressionNumber = CFNumberCreate(kCFAllocatorDefault, kCFNumberCGFloatType, &compression);
values[0] = compressionNumber;
CFDictionaryRef properties = CFDictionaryCreate(kCFAllocatorDefault, (void *)keys, (void *)values, 1, &kCFCopyStringDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
CGImageDestinationSetProperties(imageDest, properties);
CGImageDestinationAddImage(imageDest, imageRef, NULL);
CGImageDestinationFinalize(imageDest);
CGImageRelease(imageRef);
CFSafeRelease(imageDest);
CFSafeRelease(compressionNumber);
CFSafeRelease(properties);
}
[[NSNotificationCenter defaultCenter] postNotificationOnMainThreadWithName:kNRMPDFThumbnailImageChangeNotification object:guid];
}
CGPDFDocumentRelease(documentRef);
CGDataProviderRelease(dataProvider);
NRLog(#"PDFORDER: Generate end %# %#", guid, url);
});
}
All the parameters I passed into the method has values still destination value becoming nil.
This happening only with Mac OS X 10.11
can anyone suggest on this.
I strongly suspect this code:
NSString *path = [url path];
path = [NSString stringWithFormat:#"%#%lu.png", path, scale];
NSURL *outurl = [NSURL fileURLWithPath:path];
Better is:
NSString *filename = [NSString stringWithFormat:#"%lu.png", scale];
NSURL *outurl = [url URLByAppendingPathComponent:filename];
this is my code to generate a new keypair
//Create RSA Key Pair
CFMutableDictionaryRef parameters = CFDictionaryCreateMutable(
kCFAllocatorDefault,
0,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(parameters, kSecAttrKeyType, kSecAttrKeyTypeRSA);
int32_t rawnum = 2048;
CFNumberRef num = CFNumberCreate(kCFAllocatorDefault,
kCFNumberIntType, &rawnum);
CFDictionarySetValue(parameters,
kSecAttrKeySizeInBits,
num);
publicKey = NULL;
privateKey = NULL;
SecKeyGeneratePair(parameters, &publicKey, &privateKey);
and i've tried to get the data using this method
- (NSData *)getRSAKeyBitsFromKey:(SecKeyRef)givenKey {
static const uint8_t publicKeyIdentifier[] = "com.company";
NSData *publicTag = [[NSData alloc] initWithBytes:publicKeyIdentifier length:sizeof(publicKeyIdentifier)];
OSStatus sanityCheck = noErr;
NSData * publicKeyBits = nil;
NSMutableDictionary * queryPublicKey = [[NSMutableDictionary alloc] init];
[queryPublicKey setObject:(__bridge id)kSecClassKey forKey:(__bridge id)kSecClass];
//[queryPublicKey setObject:publicTag forKey:(__bridge id)kSecAttrApplicationTag];
[queryPublicKey setObject:(__bridge id)kSecAttrKeyTypeRSA forKey:(__bridge id)kSecAttrKeyType];
// Temporarily add key to the Keychain, return as data:
NSMutableDictionary * attributes = [queryPublicKey mutableCopy];
[attributes setObject:(__bridge id)givenKey forKey:(__bridge id)kSecValueRef];
[attributes setObject:#YES forKey:(__bridge id)kSecReturnData];
CFTypeRef result;
sanityCheck = SecItemAdd((__bridge CFDictionaryRef) attributes, &result);
if (sanityCheck == errSecSuccess) {
publicKeyBits = CFBridgingRelease(result);
// Remove from Keychain again:
(void)SecItemDelete((__bridge CFDictionaryRef) queryPublicKey);
}
return publicKeyBits;
}
I know that probably i didn't set the PublicKeyIdentifier and the other attributes (should i had to?).
I am trying to display twitter feed into a UITableView. I was able to get the feeds and NSLog the information. However, I am confused as to how to display the information instead of logging it to the console. I was told to create a custom object in order to store the desired data from the feed and then make a UITableVIew to display the information, and that is where I get stuck. Are there any suggestions or could anyone point me in the right direction? Here is how my code looks right now. I appreciate any help and thank you for your valuable time.
CODE BELOW:
#import "ViewController.h"
#import <Accounts/Accounts.h>
#import <Social/Social.h>
#interface ViewController ()
#end
#implementation ViewController
- (void)viewDidLoad
{
[self refreshTwitter];
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
-(void)refreshTwitter
{
ACAccountStore *accountStore = [[ACAccountStore alloc]init];
if (accountStore != nil)
{
ACAccountType *accountType = [accountStore accountTypeWithAccountTypeIdentifier:ACAccountTypeIdentifierTwitter];
if (accountType != nil)
{
[accountStore requestAccessToAccountsWithType:accountType options:nil completion:^(BOOL granted, NSError *error)
{
if (granted)
{
//Succesful Access
NSArray *twitterAccounts = [accountStore accountsWithAccountType:accountType];
if (twitterAccounts != nil)
{
ACAccount *currentAccount = [twitterAccounts objectAtIndex:0];
if (currentAccount != nil)
{
NSString *requestString = #"https://api.twitter.com/1.1/statuses/user_timeline.json";
SLRequest *request = [SLRequest requestForServiceType:SLServiceTypeTwitter requestMethod:SLRequestMethodGET URL:[NSURL URLWithString:requestString] parameters:nil];
[request setAccount:currentAccount];
[request performRequestWithHandler:^(NSData *responseData, NSHTTPURLResponse *urlResponse, NSError *error)
{
if ((error == nil) && ([urlResponse statusCode] == 200))
{
NSArray *twitterFeed = [NSJSONSerialization JSONObjectWithData:responseData options:0 error:nil];
NSDictionary *firstPost = [twitterFeed objectAtIndex:0];
NSLog(#"firstPost = %#", [firstPost description]);
}
}];
}
}
}
else
{
//Access Denied
}
}];
}
}
}
#end
JSON DATA:
2014-07-08 13:22:37.442 demoApp[19277:4607] firstPost = {
contributors = "<null>";
coordinates = "<null>";
"created_at" = "Wed Jul 02 18:29:43 +0000 2014";
entities = {
hashtags = (
{
indices = (
0,
20
);
text = ikercasillasoficial;
}
);
symbols = (
);
urls = (
);
"user_mentions" = (
);
};
"favorite_count" = 0;
favorited = 0;
geo = "<null>";
id = 484403559677837312;
"id_str" = 484403559677837312;
"in_reply_to_screen_name" = "<null>";
"in_reply_to_status_id" = "<null>";
"in_reply_to_status_id_str" = "<null>";
"in_reply_to_user_id" = "<null>";
"in_reply_to_user_id_str" = "<null>";
lang = es;
place = "<null>";
"retweet_count" = 0;
retweeted = 0;
source = "Twitter for iPad";
text = "#ikercasillasoficial Bien Iker. Pero fuiste muy sutil. Ojala ese hp cuando tenga hijos se le pudran en el vientre se su mujer.";
truncated = 0;
user = {
"contributors_enabled" = 0;
"created_at" = "Wed Jan 18 02:10:12 +0000 2012";
"default_profile" = 1;
"default_profile_image" = 0;
description = "Hello world";
entities = {
description = {
urls = (
);
};
};
"favourites_count" = 0;
"follow_request_sent" = 0;
"followers_count" = 2;
following = 0;
"friends_count" = 18;
"geo_enabled" = 0;
id = 467043064;
"id_str" = 467043064;
"is_translation_enabled" = 0;
"is_translator" = 0;
lang = en;
"listed_count" = 0;
location = "";
name = "Omar Devila";
notifications = 0;
"profile_background_color" = C0DEED;
"profile_background_image_url" = "http://abs.twimg.com/images/themes/theme1/bg.png";
"profile_background_image_url_https" = "https://abs.twimg.com/images/themes/theme1/bg.png";
"profile_background_tile" = 0;
"profile_image_url" = "http://pbs.twimg.com/profile_images/483760718895140864/3pLRpyzk_normal.jpeg";
"profile_image_url_https" = "https://pbs.twimg.com/profile_images/483760718895140864/3pLRpyzk_normal.jpeg";
"profile_link_color" = 0084B4;
"profile_sidebar_border_color" = C0DEED;
"profile_sidebar_fill_color" = DDEEF6;
"profile_text_color" = 333333;
"profile_use_background_image" = 1;
protected = 0;
"screen_name" = DevilaOmar;
"statuses_count" = 18;
"time_zone" = "<null>";
url = "<null>";
"utc_offset" = "<null>";
verified = 0;
};
}
Declare twitterFeed array on the class level
numberOfSectionsInTableView
-(NSInteger)numberOfSectionsInTableView:(UITableView *)tableView{
return 1;
}
numberOfRowsInSection
-(NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section{
return twitterFeed.count;
}
cellForRowAtIndexPath
-(UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath{
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:#"cell" forIndexPath:indexPath];
cell.textLabel = twitterFeed[indexPath.row][#"user"][#"screen_name"];
return cell;
}
I have 3 files, one with only a red channel, one with only a green channel, one with only a blue channel. Now i want to combine those 3 images to one, where every image is one color-channel in the finished image.
How can i do this with cocoa? I have a solution that is working but is too slow:
NSBitmapImageRep *rRep = [[rImage representations] objectAtIndex: 0];
NSBitmapImageRep *gRep = [[gImage representations] objectAtIndex: 0];
NSBitmapImageRep *bRep = [[bImage representations] objectAtIndex: 0];
NSBitmapImageRep *finalRep = [rRep copy];
for (NSUInteger i = 0; i < [rRep pixelsWide]; i++) {
for (NSUInteger j = 0; j < [rRep pixelsHigh]; j++) {
CGFloat r = [[rRep colorAtX:i y:j] redComponent];
CGFloat g = [[gRep colorAtX:i y:j] greenComponent];
CGFloat b = [[bRep colorAtX:i y:j] blueComponent];
[finalRep setColor:[NSColor colorWithCalibratedRed:r green:g blue:b alpha:1.0] atX:i y:j];
}
}
NSData *data = [finalRep representationUsingType:NSJPEGFileType properties:[NSDictionary dictionaryWithObject:[NSNumber numberWithDouble:0.7] forKey:NSImageCompressionFactor]];
[data writeToURL:[panel URL] atomically:YES];
The Accelerate.framework provides a function to combine 3 planar images into one destination:
vImageConvert_Planar8toRGB888.
I haven't tried your approach but the vImage based method below is quite fast.
I was able to combine three (R,G,B) planes of a 1680x1050 image in ~0.1s on my Mac. The actual conversion takes ~1/3 of that time - The rest is setup & file IO.
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification
{
NSDate* start = [NSDate date];
NSURL* redImageURL = [[NSBundle mainBundle] URLForImageResource:#"red"];
NSURL* greenImageURL = [[NSBundle mainBundle] URLForImageResource:#"green"];
NSURL* blueImageURL = [[NSBundle mainBundle] URLForImageResource:#"blue"];
NSData* redImageData = [self newChannelDataFromImageAtURL:redImageURL];
NSData* greenImageData = [self newChannelDataFromImageAtURL:greenImageURL];
NSData* blueImageData = [self newChannelDataFromImageAtURL:blueImageURL];
//We use our "Red" image to measure the dimensions. We assume that all images & the destination have the same size
CGImageSourceRef imageSource = CGImageSourceCreateWithURL((__bridge CFURLRef)redImageURL, NULL);
NSDictionary* properties = (__bridge NSDictionary*)CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL);
CGFloat width = [properties[(id)kCGImagePropertyPixelWidth] doubleValue];
CGFloat height = [properties[(id)kCGImagePropertyPixelHeight] doubleValue];
self.image = [self newImageWithSize:CGSizeMake(width, height) fromRedChannel:redImageData greenChannel:greenImageData blueChannel:blueImageData];
NSLog(#"Combining 3 (R, G, B) planes of size %# took:%fs", NSStringFromSize(CGSizeMake(width, height)), [[NSDate date] timeIntervalSinceDate:start]);
}
- (NSImage*)newImageWithSize:(CGSize)size fromRedChannel:(NSData*)redImageData greenChannel:(NSData*)greenImageData blueChannel:(NSData*)blueImageData
{
vImage_Buffer redBuffer;
redBuffer.data = (void*)redImageData.bytes;
redBuffer.width = size.width;
redBuffer.height = size.height;
redBuffer.rowBytes = [redImageData length]/size.height;
vImage_Buffer greenBuffer;
greenBuffer.data = (void*)greenImageData.bytes;
greenBuffer.width = size.width;
greenBuffer.height = size.height;
greenBuffer.rowBytes = [greenImageData length]/size.height;
vImage_Buffer blueBuffer;
blueBuffer.data = (void*)blueImageData.bytes;
blueBuffer.width = size.width;
blueBuffer.height = size.height;
blueBuffer.rowBytes = [blueImageData length]/size.height;
size_t destinationImageBytesLength = size.width*size.height*3;
const void* destinationImageBytes = valloc(destinationImageBytesLength);
NSData* destinationImageData = [[NSData alloc] initWithBytes:destinationImageBytes length:destinationImageBytesLength];
vImage_Buffer destinationBuffer;
destinationBuffer.data = (void*)destinationImageData.bytes;
destinationBuffer.width = size.width;
destinationBuffer.height = size.height;
destinationBuffer.rowBytes = [destinationImageData length]/size.height;
vImage_Error result = vImageConvert_Planar8toRGB888(&redBuffer, &greenBuffer, &blueBuffer, &destinationBuffer, 0);
NSImage* image = nil;
if(result == kvImageNoError)
{
//TODO: If you need color matching, use an appropriate colorspace here
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGDataProviderRef dataProvider = CGDataProviderCreateWithCFData((__bridge CFDataRef)(destinationImageData));
CGImageRef finalImageRef = CGImageCreate(size.width, size.height, 8, 24, destinationBuffer.rowBytes, colorSpace, kCGBitmapByteOrder32Big|kCGImageAlphaNone, dataProvider, NULL, NO, kCGRenderingIntentDefault);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(dataProvider);
image = [[NSImage alloc] initWithCGImage:finalImageRef size:NSMakeSize(size.width, size.height)];
CGImageRelease(finalImageRef);
}
free((void*)destinationImageBytes);
return image;
}
- (NSData*)newChannelDataFromImageAtURL:(NSURL*)imageURL
{
CGImageSourceRef imageSource = CGImageSourceCreateWithURL((__bridge CFURLRef)imageURL, NULL);
if(imageSource == NULL){return NULL;}
CGImageRef image = CGImageSourceCreateImageAtIndex(imageSource, 0, NULL);
CFRelease(imageSource);
if(image == NULL){return NULL;}
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image);
CGFloat width = CGImageGetWidth(image);
CGFloat height = CGImageGetHeight(image);
size_t bytesPerRow = CGImageGetBytesPerRow(image);
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(image);
CGContextRef bitmapContext = CGBitmapContextCreate(NULL, width, height, 8, bytesPerRow, colorSpace, bitmapInfo);
NSData* data = NULL;
if(NULL != bitmapContext)
{
CGContextDrawImage(bitmapContext, CGRectMake(0.0, 0.0, width, height), image);
CGImageRef imageRef = CGBitmapContextCreateImage(bitmapContext);
if(NULL != imageRef)
{
data = (NSData*)CFBridgingRelease(CGDataProviderCopyData(CGImageGetDataProvider(imageRef)));
}
CGImageRelease(imageRef);
CGContextRelease(bitmapContext);
}
CGImageRelease(image);
return data;
}
Your program creates many many many many many many color objects.
Although your program could simply access the image reps' bitmapData, it would require your program to know a lot about bitmap representations.
Before taking that approach, you should prefer to let Quartz do the heavy lifting by rendering each image to a CGBitmapContext (e.g. using CGContextDrawImage(gtx, rect, img.CGImage)) and then extracting/copying the rendered component values from the rendered result over to a destination RGB bitmap.
If your inputs are not multicomponent color models (e.g. grayscale), then you should render to the source color model to save a bunch of CPU time and memory.
i always get : CGImageCreate: invalid image size: 0 x 0.
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos.
[library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos
usingBlock:^(ALAssetsGroup *group, BOOL *stop) {
// Within the group enumeration block, filter to enumerate just videos.
[group setAssetsFilter:[ALAssetsFilter allVideos]];
// For this example, we're only interested in the first item.
[group enumerateAssetsAtIndexes:[NSIndexSet indexSetWithIndex:0]
options:0
usingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop) {
// The end of the enumeration is signaled by asset == nil.
if (alAsset) {
ALAssetRepresentation *representation = [[alAsset defaultRepresentation] retain];
NSURL *url = [representation url];
AVURLAsset *avAsset = [[AVURLAsset URLAssetWithURL:url options:nil] retain];
AVAssetReader *assetReader = [[AVAssetReader assetReaderWithAsset:avAsset error:nil] retain];
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
AVAssetReaderTrackOutput *assetReaderOutput = [[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:nil] retain];
if (![assetReader canAddOutput:assetReaderOutput]) {printf("could not read reader output\n");}
[assetReader addOutput:assetReaderOutput];
[assetReader startReading];
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
UIImage* image = imageFromSampleBuffer(nextBuffer);
}
}];
}
failureBlock: ^(NSError *error) {NSLog(#"No groups");}];
the imageFromSampleBuffer comes directly from apple:
UIImage* imageFromSampleBuffer(CMSampleBufferRef nextBuffer) {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(nextBuffer);
printf("total size:%u\n",CMSampleBufferGetTotalSampleSize(nextBuffer));
// Lock the base address of the pixel buffer.
//CVPixelBufferLockBaseAddress(imageBuffer,0);
// Get the number of bytes per row for the pixel buffer.
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height.
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
printf("b:%d w:%d h:%d\n",bytesPerRow,width,height);
// Create a device-dependent RGB color space.
static CGColorSpaceRef colorSpace = NULL;
if (colorSpace == NULL) {
colorSpace = CGColorSpaceCreateDeviceRGB();
if (colorSpace == NULL) {
// Handle the error appropriately.
return nil;
}
}
// Get the base address of the pixel buffer.
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// Get the data size for contiguous planes of the pixel buffer.
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer);
// Create a Quartz direct-access data provider that uses data we supply.
CGDataProviderRef dataProvider =
CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, NULL);
// Create a bitmap image from data supplied by the data provider.
CGImageRef cgImage =
CGImageCreate(width, height, 8, 32, bytesPerRow,
colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little,
dataProvider, NULL, true, kCGRenderingIntentDefault);
CGDataProviderRelease(dataProvider);
// Create and return an image object to represent the Quartz image.
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return image;
}
i try to get the length and width, basically it will print out the size of the sample buffer, knowing that the buffer itself is not inexistant, but i get no UIImage
for AVAssetReaderTrackOutput *assetReaderOutput...
NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
[outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
I understand you want to read first image from all your local videos?
You can use simple way to do all of this.
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos.
[library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos
usingBlock:^(ALAssetsGroup *group, BOOL *stop) {
// Within the group enumeration block, filter to enumerate just videos.
[group setAssetsFilter:[ALAssetsFilter allVideos]];
// For this example, we're only interested in the first item.
[group enumerateAssetsAtIndexes:[NSIndexSet indexSetWithIndex:0]
options:0
usingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop) {
// The end of the enumeration is signaled by asset == nil.
if (alAsset) {
ALAssetRepresentation *representation = [[alAsset defaultRepresentation] retain];
NSURL *url = [representation url];
AVURLAsset *avAsset = [[AVURLAsset URLAssetWithURL:url options:nil] retain];
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:avAsset];
CMTime thumbTime = CMTimeMakeWithSeconds(1, 30);
NSError *error;
CMTime actualTime;
[imageGenerator setMaximumSize:MAXSIZE];
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:thumbTime actualTime:&actualTime error:&error];
}
}];
}
failureBlock: ^(NSError *error) {NSLog(#"No groups");}];