What would be the condition in a while loop using the FaceRecognizer - xcode

How do I make an while loop with FaceRecognizer so that while a face is recognized, a command will happen? I am not sure what the condition would be. What variable do I use and what to I equate it to?
Here is my code
#import "ViewController.h"
NSString* const faceCascadeFilename = #"haarcascade_frontalface_alt2";
const int HaarOptions = CV_HAAR_FIND_BIGGEST_OBJECT | CV_HAAR_DO_ROUGH_SEARCH;
#interface ViewController ()
#end
#implementation ViewController
#synthesize videoCamera;
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
self.videoCamera = [[CvVideoCamera alloc] initWithParentView:imageView];
self.videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.videoCamera.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288;
self.videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
self.videoCamera.defaultFPS = 30;
self.videoCamera.grayscaleMode = NO;
self.videoCamera.delegate = self;
NSString* faceCascadePath = [[NSBundle mainBundle] pathForResource:faceCascadeFilename ofType:#"xml"];
faceCascade.load([faceCascadePath UTF8String]);
Label1.hidden=YES;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark - Protocol CvVideoCameraDelegate
#ifdef __cplusplus
- (void)processImage:(Mat&)image;
{
Mat grayscaleFrame;
cvtColor(image, grayscaleFrame, CV_BGR2GRAY);
equalizeHist(grayscaleFrame, grayscaleFrame);
std::vector<cv::Rect> faces;
faceCascade.detectMultiScale(grayscaleFrame, faces, 1.1, 2, HaarOptions, cv::Size(60, 60));
for (int i = 0; i < faces.size(); i++)
{
cv::Point pt1(faces[i].x + faces[i].width, faces[i].y + faces[i].height);
cv::Point pt2(faces[i].x, faces[i].y);
cv::rectangle(image, pt1, pt2, cvScalar(0, 255, 0, 0), 1, 8 ,0);
}
}
//#endif
#pragma mark - UI Actions
- (IBAction)startCamera:(id)sender
{
[self.videoCamera start];
imageView.hidden=YES;
while (FaceRecognizer...) {
Label1.hidden=NO;
}
}

Related

Warning on XCodeGLKit deprecated consider migrating to Metal instead and Apple email Deprecated API Usage, use WKWebView instead of UIWebView

I've completed my game using Buildbox 2.3.3 and have cleaned up as many warnings as possible on Xcode. However, I've been at this for weeks now on me migrating from OpenGL to Metal which I assume is the reason I have that error in Xcode saying GLKit is deprecated, consider migrating to metal instead.
I attempted to upload the game to App store connect without me solving this error but then I instantly got sent an email saying
ITMS-90809: Deprecated API Usage, New apps no longer use UIWebView,
use WKWebView instead.
I've no clue how to go about converting my code to cater for this I would really appreciate some guidance or if anyone can rewrite my code for me converting the OpenGL to metal.
I'll attach the code I'm using below, I would really appreciate it if anyone can help me. I've been stuck at this final stage for weeks now, its very frustrating.
AppDelegate.h
#import <UIKit/UIKit.h>
#interface AppDelegate : UIResponder <UIApplicationDelegate>{
}
#property (strong, nonatomic) UIWindow *window;
#end
AppDelegate.mm
#import "AppDelegate.h"
#import <GLKit/GLKit.h>
#include "PTPSettingsController.h"
#include "libs/cocos2dx/include/audio/include/SimpleAudioEngine.h"
#interface AppDelegate ()
#end
#implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
cocos2d::CCDirector::sharedDirector()->pause();
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
cocos2d::CCApplication::sharedApplication()->applicationDidEnterBackground();
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
cocos2d::CCApplication::sharedApplication()->applicationWillEnterForeground();
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
cocos2d::CCDirector::sharedDirector()->resume();
}
- (void)applicationWillTerminate:(UIApplication *)application {
}
- (void)loadingDidComplete{
}
-(void)showCustomFullscreenAd{
}
- (void)screenOnEnter:(const char*) name{
}
- (void)screenOnExit:(const char*) name{
}
#end
GameViewController.h
#import <UIKit/UIKit.h>
#import <GLKit/GLKit.h>
#interface GameViewController : GLKViewController
#end
GameViewController.mm
#import "GameViewController.h"
#import <OpenGLES/ES2/glext.h>
#import "PTModelController.h"
#import "PTModelGeneralSettings.h"
#import "PTPAppDelegate.h"
#import "cocos2d.h"
#import "PTPConfig.h"
#include "PTPSettingsController.h"
#define IOS_MAX_TOUCHES_COUNT 10
static PTPAppDelegate s_sharedApplication;
#interface GameViewController () {
NSString* shareMessage;
bool sheduledForShareWidget;
}
#property (strong, nonatomic) EAGLContext *context;
#end
#implementation GameViewController
- (void)viewDidLoad{
[super viewDidLoad];
sheduledForShareWidget = false;
self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!self.context) {
NSLog(#"Failed to create ES context");
}
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
[view setMultipleTouchEnabled: YES];
[self setPreferredFramesPerSecond:60];
[EAGLContext setCurrentContext:self.context];
PTModelController *mc = PTModelController::shared();
mc->clean();
unsigned long size = 0;
char* pBuffer = (char*)CCFileUtils::sharedFileUtils()->getFileData("data/data.pkg", "rb", &size);
if (pBuffer != NULL && size > 0){
mc->setUsingDataEncryption( true );
}
mc->loadDataForSplashScreen("data/data.pkg", processor().c_str());
s_sharedApplication.setDataArchiveProcessor(processor());
cocos2d::CCApplication::sharedApplication()->run();
}
- (void)dealloc{
if ([EAGLContext currentContext] == self.context) {
[EAGLContext setCurrentContext:nil];
}
}
- (void)didReceiveMemoryWarning{
[super didReceiveMemoryWarning];
if ([self isViewLoaded] && ([[self view] window] == nil)) {
self.view = nil;
if ([EAGLContext currentContext] == self.context) {
[EAGLContext setCurrentContext:nil];
}
self.context = nil;
}
// Dispose of any resources that can be recreated.
}
- (BOOL)prefersStatusBarHidden {
return YES;
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect{
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
cocos2d::CCDirector::sharedDirector()->setViewport();
cocos2d::CCDirector::sharedDirector()->mainLoop();
}
- (void)update{
if(sheduledForShareWidget == true){
sheduledForShareWidget = false;
GLKView *view = (GLKView *)self.view;
UIImage* screenshot = view.snapshot;
PTLog("Opens Share Widget: screenshot was taken");
UIActivityViewController *activityVC = [[UIActivityViewController alloc] initWithActivityItems:#[shareMessage, screenshot] applicationActivities:nil];
NSArray *excludeActivities = #[UIActivityTypeSaveToCameraRoll,
UIActivityTypeAssignToContact];
activityVC.excludedActivityTypes = excludeActivities;
float iOSVersion = [[UIDevice currentDevice].systemVersion floatValue];
if(iOSVersion > 8.0){
activityVC.popoverPresentationController.sourceView = self.view;
}
[self presentViewController:activityVC animated:YES completion:nil];
PTLog("opens Share Widget: view controller presented");
}
}
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(nullable UIEvent *)event{
int ids[IOS_MAX_TOUCHES_COUNT] = {0};
float xs[IOS_MAX_TOUCHES_COUNT] = {0.0f};
float ys[IOS_MAX_TOUCHES_COUNT] = {0.0f};
int i = 0;
for (UITouch *touch in touches) {
ids[i] = (intptr_t)touch;
xs[i] = [touch locationInView: [touch view]].x * self.view.contentScaleFactor;
ys[i] = [touch locationInView: [touch view]].y * self.view.contentScaleFactor;
++i;
}
cocos2d::CCEGLView::sharedOpenGLView()->handleTouchesBegin(i, ids, xs, ys);
}
- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(nullable UIEvent *)event{
int ids[IOS_MAX_TOUCHES_COUNT] = {0};
float xs[IOS_MAX_TOUCHES_COUNT] = {0.0f};
float ys[IOS_MAX_TOUCHES_COUNT] = {0.0f};
int i = 0;
for (UITouch *touch in touches) {
ids[i] = (intptr_t)touch;
xs[i] = [touch locationInView: [touch view]].x * self.view.contentScaleFactor;;
ys[i] = [touch locationInView: [touch view]].y * self.view.contentScaleFactor;;
++i;
}
cocos2d::CCEGLView::sharedOpenGLView()->handleTouchesMove(i, ids, xs, ys);
}
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(nullable UIEvent *)event{
int ids[IOS_MAX_TOUCHES_COUNT] = {0};
float xs[IOS_MAX_TOUCHES_COUNT] = {0.0f};
float ys[IOS_MAX_TOUCHES_COUNT] = {0.0f};
int i = 0;
for (UITouch *touch in touches) {
ids[i] = (intptr_t)touch;
xs[i] = [touch locationInView: [touch view]].x * self.view.contentScaleFactor;;
ys[i] = [touch locationInView: [touch view]].y * self.view.contentScaleFactor;;
++i;
}
cocos2d::CCEGLView::sharedOpenGLView()->handleTouchesEnd(i, ids, xs, ys);
}
- (void)touchesCancelled:(NSSet<UITouch *> *)touches withEvent:(nullable UIEvent *)event{
int ids[IOS_MAX_TOUCHES_COUNT] = {0};
float xs[IOS_MAX_TOUCHES_COUNT] = {0.0f};
float ys[IOS_MAX_TOUCHES_COUNT] = {0.0f};
int i = 0;
for (UITouch *touch in touches) {
ids[i] = (intptr_t)touch;
xs[i] = [touch locationInView: [touch view]].x * self.view.contentScaleFactor;;
ys[i] = [touch locationInView: [touch view]].y * self.view.contentScaleFactor;;
++i;
}
cocos2d::CCEGLView::sharedOpenGLView()->handleTouchesCancel(i, ids, xs, ys);
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
PTModelGeneralSettingsPtr generalSettings = PTModelGeneralSettings::shared();
if(generalSettings->orientation() == PTModelGeneralSettings::LandscapeOrientation){
return UIInterfaceOrientationIsLandscape( interfaceOrientation );
}
else if(generalSettings->orientation() == PTModelGeneralSettings::PortraitOrientation){
return UIInterfaceOrientationIsPortrait( interfaceOrientation );
}
return NO;
}
- (NSUInteger) supportedInterfaceOrientations{
PTModelGeneralSettingsPtr generalSettings = PTModelGeneralSettings::shared();
if(generalSettings->orientation() == PTModelGeneralSettings::LandscapeOrientation){
return UIInterfaceOrientationMaskLandscape;
}
else if(generalSettings->orientation() == PTModelGeneralSettings::PortraitOrientation){
return UIInterfaceOrientationMaskPortrait;
}
return NO;
}
- (BOOL) shouldAutorotate {
return NO;
}
-(void) scheduleOpenShareWidget:(const char*) message{
shareMessage = [NSString stringWithUTF8String:message];
sheduledForShareWidget = true;
}
#end
As you've already been told, your question is a little unsuitable for Stack Overflow. You can start rewriting your project from OpenGL to Metal, and ask questions if anything goes wrong.
Apple documentation is a good starting point:
Migrating OpenGL Code to Metal
Mixing Metal and OpenGL Rendering in a View
You could also watch WWDC 2019 video and learn a step-by-step approach for transitioning OpenGL-based apps to the Metal API.
OpenGL is portable and widely supported, so it's a pity not to be able to use it. Luckily the MetalANGLE framework is an almost perfect drop-in replacement for GLKit. I have started using it in the development branch of my map rendering library, CartoType, and it works correctly: I can't see any difference in the graphics compared to GLKit, and I had to make only one minor change to my code to get it working - and that change was probably caused by an incorrect use of GLKit.
So my advice is: stay with OpenGL and use MetalANGLE.

Creating MTLTexture costs lots of time, how to improve it?

I tried to create a MTLTexture(size is 1920x1080) and it costs lots of time when calling [replaceRegion:mipmapLevel:withBytes:bytesPerRow], more about 15ms on my iPhoneX. Is there any way to improve the performance?
Here's my test code, I found out that, if i make texture in [viewDidAppear] it only costs about 4ms. What's the difference?
#import "ViewController.h"
#import <Metal/Metal.h>
#define I_WIDTH 1920
#define I_HEIHG 1080
#interface ViewController ()
#property(strong, nonatomic) id<MTLDevice> device;
#property(strong, nonatomic) NSTimer* timer;
#end
#implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.device = MTLCreateSystemDefaultDevice();
}
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
//Method 1. This would run really slow, aboult 15ms per loop
self.timer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(mkTexture) userInfo:nil repeats:true];
//Method 2. This would run really fast, aboult 3ms per loop
// for (int i = 0; i < 3000; i++) {
// [self mkTexture];
// }
}
- (void)mkTexture {
double start = CFAbsoluteTimeGetCurrent();
MTLTextureDescriptor* desc = [[MTLTextureDescriptor alloc] init];
desc.width = I_WIDTH;
desc.height = I_HEIHG;
desc.pixelFormat = MTLPixelFormatBGRA8Unorm;
desc.usage = MTLTextureUsageShaderRead;
id<MTLTexture> texture = [self.device newTextureWithDescriptor:desc];
char* bytes = (char *)malloc(I_WIDTH * I_HEIHG * 4);
[texture replaceRegion:MTLRegionMake3D(0, 0, 0, I_WIDTH, I_HEIHG, 1) mipmapLevel:0 withBytes:bytes bytesPerRow:I_WIDTH * 4];
double end = CFAbsoluteTimeGetCurrent();
NSLog(#"%.2fms", (end - start) * 1000);
free(bytes);
}
#end
With [Method 1], the function mkTexture would cost about 15ms, with [Method 2], the function mkTexture only costs 4ms. It's really strange.

How to make an if statement with FaceRecognizer

How do I make an if statement with FaceRecognizer so that if a face is recognized, a command will happen?
I am not sure what the condition would be. What variable do I use and what to I equate it to?
Here is my code
#import "ViewController.h"
NSString* const faceCascadeFilename = #"haarcascade_frontalface_alt2";
const int HaarOptions = CV_HAAR_FIND_BIGGEST_OBJECT | CV_HAAR_DO_ROUGH_SEARCH;
#interface ViewController ()
#end
#implementation ViewController
#synthesize videoCamera;
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
self.videoCamera = [[CvVideoCamera alloc] initWithParentView:imageView];
self.videoCamera.defaultAVCaptureDevicePosition = AVCaptureDevicePositionFront;
self.videoCamera.defaultAVCaptureSessionPreset = AVCaptureSessionPreset352x288;
self.videoCamera.defaultAVCaptureVideoOrientation = AVCaptureVideoOrientationPortrait;
self.videoCamera.defaultFPS = 30;
self.videoCamera.grayscaleMode = NO;
self.videoCamera.delegate = self;
NSString* faceCascadePath = [[NSBundle mainBundle] pathForResource:faceCascadeFilename ofType:#"xml"];
faceCascade.load([faceCascadePath UTF8String]);
Label1.hidden=YES;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark - Protocol CvVideoCameraDelegate
#ifdef __cplusplus
- (void)processImage:(Mat&)image;
{
Mat grayscaleFrame;
cvtColor(image, grayscaleFrame, CV_BGR2GRAY);
equalizeHist(grayscaleFrame, grayscaleFrame);
std::vector<cv::Rect> faces;
faceCascade.detectMultiScale(grayscaleFrame, faces, 1.1, 2, HaarOptions, cv::Size(60, 60));
for (int i = 0; i < faces.size(); i++)
{
cv::Point pt1(faces[i].x + faces[i].width, faces[i].y + faces[i].height);
cv::Point pt2(faces[i].x, faces[i].y);
cv::rectangle(image, pt1, pt2, cvScalar(0, 255, 0, 0), 1, 8 ,0);
}
}
//#endif
#pragma mark - UI Actions
- (IBAction)startCamera:(id)sender
{
[self.videoCamera start];
imageView.hidden=YES;
while (FaceRecognizer...) {
Label1.hidden=NO;
}
}

How make [UIView animateWithDuration: ..] work in application porting by apportable?

On portal application by apportable I need to make some animation (move/scale/change alpha) of UIView *object via call:
[UIView
animateWithDuration:1.f
delay:0.5f
options:UIViewAnimationOptionAllowUserInteraction
animations:^(void)
{
myview.center = moveTo;
myview.transform = transformTo;
myview.alpha = alphaTo;
}
completion:^(BOOL finished)
{
[self animationFinished];
}];
For now it's only make delay, then execute animation code & completion code immediately.
Thank you for answer.
But I need animation "today", so I make next class.
It's work not good enoght, but it's much better then nothing.
Maybe for one it will be helpful
AOTAnimate.h
//
// AOTAnimate.h
//
// Created by Andrei Bakulin on 18/11/2013.
//
#import <Foundation/Foundation.h>
#interface AOTAnimate : NSObject
{
UIView *view;
NSInteger animationTicksLeft;
CGFloat scaleX;
CGFloat scaleY;
CGPoint moveDelta;
CGSize scaleCurrent;
CGSize scaleDelta;
CGFloat alphaDelta;
void (^completeAction)();
}
#property (nonatomic, assign) CGFloat duration;
#property (nonatomic, assign) CGFloat delay;
#property (nonatomic, assign) CGFloat frequency;
#property (nonatomic, assign) UIViewAnimationOptions options;
#property (nonatomic, assign) CGPoint moveFrom;
#property (nonatomic, assign) CGAffineTransform transformFrom;
#property (nonatomic, assign) CGFloat alphaFrom;
#property (nonatomic, assign) CGPoint moveTo;
#property (nonatomic, assign) CGAffineTransform transformTo;
#property (nonatomic, assign) CGFloat alphaTo;
+ (AOTAnimate*)makeAnimationOnView:(UIView*)view_ duration:(CGFloat)duration_;
+ (AOTAnimate*)makeAnimationOnView:(UIView*)view_ duration:(CGFloat)duration_ delay:(CGFloat)delay_;
- (void)run;
- (void)runWithCompleteAction:(void (^)(void))complete_;
#end
AOTAnimate.m
//
// AOTAnimate.m
//
// Created by Andrei Bakulin on 18/11/2013.
//
#import "AOTAnimate.h"
#implementation AOTAnimate
#synthesize duration, delay, frequency, options;
#synthesize moveFrom, transformFrom, alphaFrom;
#synthesize moveTo, transformTo, alphaTo;
+ (AOTAnimate*)makeAnimationOnView:(UIView*)view_ duration:(CGFloat)duration_
{
return [self makeAnimationOnView:view_ duration:duration_ delay:0.f];
}
+ (AOTAnimate*)makeAnimationOnView:(UIView*)view_ duration:(CGFloat)duration_ delay:(CGFloat)delay_
{
return [[AOTAnimate alloc] initWithView:view_ duration:(CGFloat)duration_ delay:(CGFloat)delay_];
}
//----------------------------------
- (void)dealloc
{
[view release];
if( completeAction )
Block_release(completeAction);
[super dealloc];
}
- (id)initWithView:(UIView*)view_ duration:(CGFloat)duration_ delay:(CGFloat)delay_
{
self = [super init];
if (self)
{
view = [view_ retain];
duration = duration_;
delay = delay_;
frequency = 0.025f;
options = UIViewAnimationOptionAllowUserInteraction;
moveFrom = view.center;
transformFrom = view.transform;
alphaFrom = view.alpha;
moveTo = view.center;
transformTo = view.transform;
alphaTo = view.alpha;
}
return self;
}
//----------------------------------
#pragma mark - Run animation
- (void)run
{
[self runWithCompleteAction:nil];
}
- (void)runWithCompleteAction:(void (^)(void))complete_
{
view.center = moveFrom;
view.transform = transformFrom;
view.alpha = alphaFrom;
#ifndef ANDROID
[UIView
animateWithDuration:duration
delay:delay
options:options
animations:^(void)
{
view.center = moveTo;
view.transform = transformTo;
view.alpha = alphaTo;
}
completion:^(BOOL finished)
{
if( complete_ )
complete_();
}];
#else
if( duration <= 0.f )
{
[self doAnimationComplete];
return;
}
animationTicksLeft = ceil( duration / frequency );
if( animationTicksLeft == 0 )
{
[self doAnimationComplete];
return;
}
moveDelta = CGPointMake( (moveTo.x-moveFrom.x)/animationTicksLeft, (moveTo.y-moveFrom.y)/animationTicksLeft );
alphaDelta = (alphaTo-alphaFrom)/animationTicksLeft;
CGSize scaleFrom = CGSizeMake( [self scaleX:transformFrom], [self scaleY:transformFrom] );
CGSize scaleTo = CGSizeMake( [self scaleX:transformTo], [self scaleY:transformTo] );
scaleDelta = CGSizeMake((scaleTo.width - scaleFrom.width)/animationTicksLeft,
(scaleTo.height - scaleFrom.height)/animationTicksLeft );
scaleCurrent = scaleFrom;
if( complete_ )
{
completeAction = Block_copy(complete_);
}
[self performSelector:#selector(doAnimationTick) withObject:nil afterDelay:delay];
#endif
}
//----------------------------------
#pragma mark - Manual animation
#ifdef ANDROID
- (void)doAnimationTick
{
if( CGPointEqualToPoint( moveDelta, CGPointZero ) == NO )
{
view.center = CGPointMake( view.center.x + moveDelta.x, view.center.y + moveDelta.y );
}
if( CGSizeEqualToSize( scaleDelta, CGSizeZero) == NO )
{
view.transform = CGAffineTransformMakeScale( scaleCurrent.width, scaleCurrent.height );
scaleCurrent.width += scaleDelta.width;
scaleCurrent.height += scaleDelta.height;
}
if( alphaDelta != 0.f )
{
view.alpha = view.alpha + alphaDelta;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - -
animationTicksLeft--;
if( animationTicksLeft > 0 )
{
[self performSelector:#selector(doAnimationTick) withObject:nil afterDelay:frequency];
}
else
{
[self doAnimationComplete];
}
}
- (void)doAnimationComplete
{
view.center = moveTo;
view.transform = transformTo;
view.alpha = alphaTo;
if( completeAction )
completeAction();
}
//----------------------------------
#pragma mark - Helpers
- (CGFloat)scaleX:(CGAffineTransform)t
{
return sqrt(t.a * t.a + t.c * t.c);
}
- (CGFloat)scaleY:(CGAffineTransform)t
{
return sqrt(t.b * t.b + t.d * t.d);
}
#endif
#end
Use like this:
UIView *someview;
AOTAnimate *animate = [AOTAnimate makeAnimationOnView:someview duration:1.f delay:0.5f];
// allow to assign - animate.moveFrom / .tranfromFrom / alphaFrom properties,
// but by default they are copy from UIView* object
animate.moveTo = CGPointMake( 100, 200 ); // new point where need to move
animate.transformTo = CGAffineTransformScale( CGAffineTransformIdentity, 1.5f, 1.5f );
animate.alphaTo = 0.5f;
[animate runWithCompleteAction:^{
NSLog(#"Animation done..);
}];
If this method will run on iOS device - it'll use normal [UIView animateWithDuration:...] method
PS: This class do only "move" from one center point to another. Transform use only to scale object (not move). Alpha on my 2 test devices not supported, but maybe some where it does.
Animations do not work on the current version of Apportable's UIKit. We have fully functioning animations coming in the next version of UIKit, though. We will be releasing that once we are satisfied with the quality and coverage.

subclassing TTTableMessageItemCell, anything wrong?

I subclassing TTTableMessageItemCell, got EXC_BAD_ACCESS runtime error. Anythign wrong?
CustomTTTableSubtitleItemCell.h
#import "Three20/Three20.h"
#interface CustomTTTableSubtitleItemCell : TTTableMessageItemCell {
TTButton *_rightButton;
}
#end
CustomTTTableSubtitleItemCell.m
#import "CustomTTTableSubtitleItemCell.h"
#import "CustomTTTableSubtitleItem.h"
#import "XYDefaultStyleSheet.h"
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
static CGFloat kHPadding = 10;
static CGFloat kVPadding = 15;
#interface ButtonStyleSheet : TTDefaultStyleSheet
#end
#implementation ButtonStyleSheet
- (TTStyle*)blueToolbarButton:(UIControlState)state {
TTShape* shape = [TTRoundedRectangleShape shapeWithRadius:4.5];
UIColor* tintColor = RGBCOLOR(30, 110, 255);
return [TTSTYLESHEET toolbarButtonForState:state shape:shape tintColor:tintColor font:nil];
}
#end
#implementation CustomTTTableSubtitleItemCell
+ (CGFloat)tableView:(UITableView*)tableView rowHeightForItem:(id)item {
CustomTTTableSubtitleItem* captionedItem = item;
CGFloat maxWidth = tableView.width - kHPadding*2;
CGSize titleSize = [captionedItem.title sizeWithFont:TTSTYLEVAR(myTitleFont)
constrainedToSize:CGSizeMake(maxWidth, CGFLOAT_MAX)
lineBreakMode:UILineBreakModeWordWrap];
CGSize textSize = [captionedItem.text sizeWithFont:TTSTYLEVAR(myHeadingFont)
constrainedToSize:CGSizeMake(maxWidth, CGFLOAT_MAX)
lineBreakMode:UILineBreakModeWordWrap];
CGSize subtextSize = [captionedItem.caption sizeWithFont:TTSTYLEVAR(mySubtextFont)
constrainedToSize:CGSizeMake(maxWidth, CGFLOAT_MAX) lineBreakMode:UILineBreakModeWordWrap];
return kVPadding*2 + titleSize.height + textSize.height + subtextSize.height + kVPadding;
}
- (id)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString*)identifier {
if (self = [super initWithStyle:UITableViewCellStyleValue2 reuseIdentifier:identifier]) {
_item = nil;
[TTStyleSheet setGlobalStyleSheet:[[[ButtonStyleSheet alloc] init] autorelease]];
}
return self;
}
- (void)layoutSubviews {
[super layoutSubviews];
[self.detailTextLabel sizeToFit];
self.detailTextLabel.top = kVPadding;
self.textLabel.height = self.detailTextLabel.height;
//_rightButton.frame = CGRectMake(20, self.detailTextLabel.bottom + kVPadding, kImageWidth, kImageHeight);
//_rightButton.alpha = !self.showingDeleteConfirmation;
[_rightButton sizeToFit];
_rightButton.left = self.contentView.width - (_timestampLabel.width + kHPadding);
_rightButton.top = self.height/2;
}
- (id)object {
return _item;
}
- (void)setObject:(id)object {
if (_item != object) {
[super setObject:object];
CustomTTTableSubtitleItem* item = object;
//self.textLabel.textColor = TTSTYLEVAR(myHeadingColor);
// self.textLabel.font = TTSTYLEVAR(myHeadingFont);
// self.textLabel.textAlignment = UITextAlignmentRight;
// self.textLabel.contentMode = UIViewContentModeCenter;
// self.textLabel.lineBreakMode = UILineBreakModeWordWrap;
// self.textLabel.numberOfLines = 0;
//
// self.detailTextLabel.textColor = TTSTYLEVAR(mySubtextColor);
// self.detailTextLabel.font = TTSTYLEVAR(mySubtextFont);
// self.detailTextLabel.textAlignment = UITextAlignmentLeft;
// self.detailTextLabel.contentMode = UIViewContentModeTop;
// self.detailTextLabel.lineBreakMode = UILineBreakModeWordWrap;
_rightButton = [TTButton
buttonWithStyle:#"blueToolbarButton:" title:item.rightButtonTitle];
}
}
- (void)dealloc {
TT_RELEASE_SAFELY(_rightButton);
[super dealloc];
}
#end
You're creating the TTButton using autorelease, while you're releasing it in your dealloc function. So both the release pool and the dealloc are trying to release your _rightButton TTButton.
in your header file, try adding:
#property (nonatomic, readonly, retain) TTButton* rightButton;
And then create the TTButton using his get function in your source file:
///////////////////////////////////////////////////////////////////////////////////////////////////
- (TTButton*)rightButton {
if (!_rightButton) {
_rightButton = [[TTButton
buttonWithStyle:#"blueToolbarButton:" title:item.rightButtonTitle] retain];
[self.contentView addSubview:rightButton];
}
return rightButton;
}
When using the rightButton, make sure to use self.rightBotton and not _rightButton, such as in the layout function (Because you need to object to be created).
self.rightButton.frame = CGRectMake(20, self.detailTextLabel.bottom + kVPadding, kImageWidth, kImageHeight);
I suggest opening the Three20UI/TTTableMessageItemCell.h & source file and trying to copy the behavior of one of the elements. That's what I did.

Resources