I have been stuck on a problem for a few days.
Our CameraPreview works perfectly, along with the VNDetectRectanglesRequest.
Once we move over a rectangle our Handler is called, which makes me believe the CVPixelBuffer is passing a valid image into the processor.
When the handler is called we request GetResults and get the exception:
[0:] Rectangle Observations Exception - Objective-C exception thrown. Name: NSRangeException Reason: *** -[__NSSingleObjectArrayI objectAtIndex:]: index 4354863128 beyond bounds [0 .. 0]
I can obviously tell that this is related to an array however I have a working copy using: UIImagePickerController which is working perfectly within a simulator, but moving the code over to a live preview is having issues.
Any help would be highly appreciated.
Here is my code snippet:
public class VisionDocumentDecoder : DefaultDecoderBase
{
private TaskCompletionSource<DocumentScanResult> documentResult = null;
private VNDetectRectanglesRequest documentRequest;
private UIImageOrientation orientation = UIImageOrientation.Down;
public VisionDocumentDecoder() : base()
{
documentRequest = new VNDetectRectanglesRequest(OnImageRectangleHandled);
//documentRequest.MaximumObservations = 6;
//documentRequest.MinimumConfidence = 0.1f;
//documentRequest.MinimumAspectRatio = 0.3f;
//documentRequest.QuadratureTolerance = 75;
}
public UIImage Convert(CVPixelBuffer pixelBuffer)
{
CIImage ciImage = CIImage.FromImageBuffer(pixelBuffer);
CIContext temporaryContext = CIContext.FromOptions(null);
using (CGImage cgImage = temporaryContext.CreateCGImage(ciImage, new CGRect(0, 0, pixelBuffer.Width, pixelBuffer.Height)))
{
UIImage uiImage = UIImage.FromImage(cgImage);
return uiImage;
}
}
public override IScanResult Decode(CVPixelBuffer pixelBuffer)
{
Logger.Log("Decoding sample ...");
documentResult = new TaskCompletionSource<DocumentScanResult>();
CIImage ciImage = CIImage.FromImageBuffer(pixelBuffer);
UIImage image = Convert(pixelBuffer);
if (image == null)
{
documentResult.SetResult(null);
return documentResult.Task.Result;
}
var handler = new VNImageRequestHandler(image.CGImage,
ConvertToCGImagePropertyOrientation(image.Orientation), new NSDictionary());
DispatchQueue.DefaultGlobalQueue.DispatchAsync(() =>
{
handler.Perform(new VNRequest[]
{
documentRequest
}, out NSError error);
});
documentResult.Task.Wait();
return documentResult.Task.Result;
}
void OnImageRectangleHandled(VNRequest request, NSError error)
{
if (error != null)
{
documentResult.SetException(new NSErrorException(error));
return;
}
Logger.Log("Rectangle image handler");
var decoder = PerformanceCounter.Start();
var scanResults = new DocumentScanResult();
var documentResults = new List<DocumentPointResults>();
scanResults.Success = true;
try
{
Logger.Log($"Taking observations ...");
var observations = request.GetResults<VNRectangleObservation>();
//request.
//var observations = request.GetResults<VNRectangleObservation>();
Logger.Log($"Rectangles found {observations.Length}");
if (observations.Length > 0)
{
Logger.Log($"Rectangles found {observations.Length}");
// foreach (var obs in observations)
// {
// Logger.Log("Found observation!");
// var boundingBox = obs.BoundingBox;
// var topLeft = new Point((int)boundingBox.X, (int)boundingBox.Y);
// var topRight = new Point((int)boundingBox.X + (int)boundingBox.Width, (int)boundingBox.Y);
// var bottomLeft = new Point((int)boundingBox.X, (int)boundingBox.Y + (int)boundingBox.Height);
// var bottomRight = new Point((int)boundingBox.X + (int)boundingBox.Width, (int)boundingBox.Y + (int)boundingBox.Height);
// documentResults.Add(new DocumentPointResults(topLeft, topRight, bottomLeft, bottomRight));
// }
}
}
catch (Exception e)
{
Logger.Log($"Rectangle Observations Exception - {e.Message}");
}
PerformanceCounter.Stop(decoder, "Vision document rectangle handler take {0} ms.");
scanResults.Points = documentResults;
documentResult.SetResult(scanResults);
}
//public override void ScanningOptionsUpdate(ScanningOptionsBase options)
//{
//}
CGImagePropertyOrientation ConvertToCGImagePropertyOrientation(UIImageOrientation orientation)
{
switch (orientation)
{
case UIImageOrientation.Up:
return CGImagePropertyOrientation.Up;
case UIImageOrientation.UpMirrored:
return CGImagePropertyOrientation.UpMirrored;
case UIImageOrientation.Down:
return CGImagePropertyOrientation.Down;
case UIImageOrientation.DownMirrored:
return CGImagePropertyOrientation.DownMirrored;
case UIImageOrientation.Left:
return CGImagePropertyOrientation.Left;
case UIImageOrientation.LeftMirrored:
return CGImagePropertyOrientation.LeftMirrored;
case UIImageOrientation.Right:
return CGImagePropertyOrientation.Right;
case UIImageOrientation.RightMirrored:
return CGImagePropertyOrientation.RightMirrored;
default:
throw new Exception("unrecognized orientation");
}
}
}
Related
In my xamarin.forms app, I created a custom camera by using Camera view and custom renders. Everything works fine. In android after the photo capture I can check whether the taken photo contains a face or not.It is done by using Camera.IFaceDetectionListener. My question is how can I achieve this in iOS? I know there is vision API. But I don't want the live face tracking. I just simply want to check whether the taken photo contains face. Any help is appreciated.
My iOS CameraPreview
public class UICameraPreview : UIView, IAVCaptureMetadataOutputObjectsDelegate
{
AVCaptureVideoPreviewLayer previewLayer;
public AVCaptureDevice[] videoDevices;
CameraOptions cameraOptions;
public AVCaptureStillImageOutput stillImageOutput;
public AVCaptureDeviceInput captureDeviceInput;
public AVCaptureDevice device;
public event EventHandler<EventArgs> Tapped;
public AVCaptureSession CaptureSession { get; set; }
public bool IsPreviewing { get; set; }
public AVCaptureStillImageOutput CaptureOutput { get; set; }
public UICameraPreview(CameraOptions options)
{
cameraOptions = options;
IsPreviewing = false;
Initialize();
}
public override void LayoutSubviews()
{
base.LayoutSubviews();
if (previewLayer != null)
previewLayer.Frame = Bounds;
}
public override void TouchesBegan(NSSet touches, UIEvent evt)
{
base.TouchesBegan(touches, evt);
OnTapped();
}
protected virtual void OnTapped()
{
var eventHandler = Tapped;
if (eventHandler != null)
{
eventHandler(this, new EventArgs());
}
}
void Initialize()
{
CaptureSession = new AVCaptureSession();
previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession)
{
Frame = Bounds,
VideoGravity = AVLayerVideoGravity.ResizeAspectFill
};
videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back;
device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition);
if (device == null)
{
return;
}
NSError error;
captureDeviceInput = new AVCaptureDeviceInput(device, out error);
CaptureSession.AddInput(captureDeviceInput);
var dictionary = new NSMutableDictionary();
dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
stillImageOutput = new AVCaptureStillImageOutput()
{
OutputSettings = new NSDictionary()
};
CaptureSession.AddOutput(stillImageOutput);
Layer.AddSublayer(previewLayer);
CaptureSession.StartRunning();
IsPreviewing = true;
}
// Photo Capturing
public async Task CapturePhoto()
{
try
{
var videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
var sampleBuffer = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection);
var jpegData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
var photo = new UIImage(jpegData);
var rotatedPhoto = RotateImage(photo, 180f);
var img = rotatedPhoto;
CALayer layer = new CALayer
{
ContentsScale = 1.0f,
Frame = Bounds,
Contents = rotatedPhoto.CGImage //Contents = photo.CGImage,
};
CaptureSession.StopRunning();
photo.SaveToPhotosAlbum((image, error) =>
{
if (!string.IsNullOrEmpty(error?.LocalizedDescription))
{
Console.Error.WriteLine($"\t\t\tError: {error.LocalizedDescription}");
}
});
}
catch (Exception ex)
{
}
//MainPage.UpdateSource(UIImageFromLayer(layer).AsJPEG().AsStream());
//MainPage.UpdateImage(UIImageFromLayer(layer).AsJPEG().AsStream());
}
}
My CameraPreviewRenderer
public class CameraPreviewRenderer : ViewRenderer<CameraPreview, UICameraPreview>, IAVCaptureMetadataOutputObjectsDelegate
{
UICameraPreview uiCameraPreview;
AVCaptureSession captureSession;
AVCaptureDeviceInput captureDeviceInput;
AVCaptureStillImageOutput stillImageOutput;
protected override void OnElementChanged(ElementChangedEventArgs<CameraPreview> e)
{
base.OnElementChanged(e);
if (e.OldElement != null)
{
// Unsubscribe
uiCameraPreview.Tapped -= OnCameraPreviewTapped;
}
if (e.NewElement != null)
{
if (Control == null)
{
uiCameraPreview = new UICameraPreview(e.NewElement.Camera);
SetNativeControl(uiCameraPreview);
MessagingCenter.Subscribe<Camera_Popup>(this, "CaptureClick", async (sender) =>
{
try
{
// Using messeging center to take photo when clicking button from shared code
var data = new AVCaptureMetadataOutputObjectsDelegate();
await uiCameraPreview.CapturePhoto();
}
catch (Exception ex)
{
return;
}
});
}
MessagingCenter.Subscribe<Camera_Popup>(this, "RetryClick", (sender) =>
{
Device.BeginInvokeOnMainThread(() =>
{
uiCameraPreview.CaptureSession.StartRunning();
uiCameraPreview.IsPreviewing = true;
});
});
MessagingCenter.Subscribe<Camera_Popup>(this, "FlipClick", (sender) =>
{
try
{
var devicePosition = uiCameraPreview.captureDeviceInput.Device.Position;
if (devicePosition == AVCaptureDevicePosition.Front)
{
devicePosition = AVCaptureDevicePosition.Back;
}
else
{
devicePosition = AVCaptureDevicePosition.Front;
}
uiCameraPreview.device = uiCameraPreview.videoDevices.FirstOrDefault(d => d.Position == devicePosition);
uiCameraPreview.CaptureSession.BeginConfiguration();
uiCameraPreview.CaptureSession.RemoveInput(uiCameraPreview.captureDeviceInput);
uiCameraPreview.captureDeviceInput = AVCaptureDeviceInput.FromDevice(uiCameraPreview.device);
uiCameraPreview.CaptureSession.AddInput(uiCameraPreview.captureDeviceInput);
uiCameraPreview.CaptureSession.CommitConfiguration();
}
catch (Exception ex)
{
var abc = ex.InnerException.Message;
}
});
uiCameraPreview.Tapped += OnCameraPreviewTapped;
}
}
void OnCameraPreviewTapped(object sender, EventArgs e)
{
if (uiCameraPreview.IsPreviewing)
{
uiCameraPreview.CaptureSession.StopRunning();
uiCameraPreview.IsPreviewing = false;
}
else
{
uiCameraPreview.CaptureSession.StartRunning();
uiCameraPreview.IsPreviewing = true;
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
Control.CaptureSession.Dispose();
Control.Dispose();
}
base.Dispose(disposing);
}
}
The CoreImage framework has CIDetector that provides image detectors for faces, QR codes, text, .... in which you pass in an image and you get a specific "feature set" back.
Example from Xamarin docs:
var imageFile = "photoFace2.jpg";
var image = new UIImage(imageFile);
var context = new CIContext ();
var detector = CIDetector.CreateFaceDetector (context, true);
var ciImage = CIImage.FromCGImage (image.CGImage);
var features = detector.GetFeatures (ciImage);
Console.WriteLine ("Found " + features.Length + " faces");
re: https://learn.microsoft.com/en-us/dotnet/api/CoreImage.CIDetector?view=xamarin-ios-sdk-12
I am using Xamarin.Android to use inbuilt camera app to take a photo
but there are two missed things that I cant do and I have been googling them for long time:
I want to get a msg or popup (anything) after pressing the button to take a photo like "photo taken"
I want to let the user focus on any point of the camera - TAP TO FOCUS
async void TakePhotoButtonTapped(object sender, EventArgs e)
{
camera.StopPreview();
Android.Hardware.Camera.Parameters parameters = camera.GetParameters();
parameters.FocusMode = global::Android.Hardware.Camera.Parameters.FocusModeAuto;
camera.SetParameters(parameters);
var image = textureView.Bitmap;
try
{
var absolutePath = Android.OS.Environment.GetExternalStoragePublicDirectory(Android.OS.Environment.DirectoryDcim).AbsolutePath;
var folderPath = absolutePath + "/Camera";
var filePath = System.IO.Path.Combine(folderPath, string.Format("photo_{0}.jpg", Guid.NewGuid()));
var fileStream = new FileStream(filePath, FileMode.Create);
await image.CompressAsync(Bitmap.CompressFormat.Jpeg, 92, fileStream);
fileStream.Close();
image.Recycle();
var intent = new Android.Content.Intent(Android.Content.Intent.ActionMediaScannerScanFile);
var file = new Java.IO.File(filePath);
var uri = Android.Net.Uri.FromFile(file);
intent.SetData(uri);
MainActivity.Instance.SendBroadcast(intent);
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine(#" ", ex.Message);
}
camera.StartPreview();
}
I tried this but not working:
public void OnAutoFocus(bool success, Android.Hardware.Camera camera)
{
var parameters = camera.GetParameters();
if (parameters.FocusMode != Android.Hardware.Camera.Parameters.FocusModeContinuousPicture)
{
parameters.FocusMode = Android.Hardware.Camera.Parameters.FocusModeContinuousPicture;
if (parameters.MaxNumFocusAreas > 0)
{
parameters.FocusAreas = null;
}
camera.SetParameters(parameters);
camera.StartPreview();
}
}
public bool OnTouch(Android.Views.View view, MotionEvent e)
{
if (camera != null)
{
var parameters = camera.GetParameters();
camera.CancelAutoFocus();
Rect focusRect = CalculateTapArea(e.GetX(), e.GetY(), 1f);
if (parameters.FocusMode != Android.Hardware.Camera.Parameters.FocusModeAuto)
{
parameters.FocusMode = Android.Hardware.Camera.Parameters.FocusModeAuto;
}
if (parameters.MaxNumFocusAreas > 0)
{
List<Area> mylist = new List<Area>();
mylist.Add(new Android.Hardware.Camera.Area(focusRect, 1000));
parameters.FocusAreas = mylist;
}
try
{
camera.CancelAutoFocus();
camera.SetParameters(parameters);
camera.StartPreview();
camera.AutoFocus(this);
}
catch (System.Exception ex)
{
Console.WriteLine(ex.ToString());
Console.Write(ex.StackTrace);
}
return true;
}
return false;
}
private Rect CalculateTapArea(object x, object y, float coefficient)
{
var focusAreaSize = 500;
int areaSize = Java.Lang.Float.ValueOf(focusAreaSize * coefficient).IntValue();
int left = clamp((int) x - areaSize / 2, 0, textureView.Width - areaSize);
int top = clamp((int) y - areaSize / 2, 0, textureView.Height - areaSize);
RectF rectF = new RectF(left, top, left + areaSize, top + areaSize);
Matrix.MapRect(rectF);
return new Rect((int) System.Math.Round(rectF.Left), (int) System.Math.Round(rectF.Top), (int) System.Math.Round(rectF.Right),
(int) System.Math.Round(rectF.Bottom));
}
private int clamp(int x, int min, int max)
{
if (x > max)
{
return max;
}
if (x < min)
{
return min;
}
return x;
}
For focusing the camera when touching the preview you will need to:
Add a touch event handler to listen for the user touching the preview
Get the X and Y coordinates from that touch event, which are usually in the event arguments
Create a rectangle to focus to tell the Android Camera where to focus and in which area
Set FocusAreas and MeteringAreas on Camera.Parameters from your rectangle
Set the new Camera.Parameters on the camera
Set a AutoFocus callback on the camera
When the callback triggers, remove the callback from the camera, and cancel auto focus
To notify the user about a picture being taken, you can use a Toast or create a area in your preview where you want to show such messages. It is entirely up to you how you want to notify the user.
I am trying to get the above look on my tabbed bar in xamarin forms, i tried customizing the tabbed bar using renderer and still was not able to get the expected output
output i am getting
till now this is what i have tried
[assembly: ExportRenderer(typeof(BottomNavTabPage), typeof(BottomNavTabPageRenderer))]
namespace HealthMobile.Droid.Renderers
{
public class BottomNavTabPageRenderer : TabbedPageRenderer
{
private bool _isShiftModeSet;
public BottomNavTabPageRenderer(Context context)
: base(context)
{
}
protected override void OnVisibilityChanged(Android.Views.View changedView, [GeneratedEnum] ViewStates visibility)
{
base.OnVisibilityChanged(changedView, visibility);
var tabs = changedView.FindViewById<TabLayout>(Resource.Id.sliding_tabs);
if (tabs != null)
{
ViewGroup vg = (ViewGroup)tabs.GetChildAt(0);
int tabsCount = vg.ChildCount;
}
}
//protected override void DispatchDraw (global::Android.Graphics.Canvas canvas)
// {
// base.DispatchDraw (canvas);
// SetTabIcons();
// // var tabLayout = (TabLayout)GetChildAt(1);
// }
// private void SetTabIcons()
// {
// var element = this.Element;
// if (null == element)
// {
// return;
// }
// Activity activity = this.Context as Activity;
// if ((null != activity) && (null != activity.ActionBar) && (activity.ActionBar.TabCount > 0))
// {
// for (int i = 0; i < element.Children.Count; i += 1)
// {
// var tab = activity.ActionBar.GetTabAt(i);
// var page = element.Children[i];
// if ((null != tab) && (null != page) && (null != page.Icon)
// && (tab.CustomView == null))
// {
// var resourceId = activity.Resources.GetIdentifier(page.Icon.File.ToLowerInvariant(), "drawable", this.Context.PackageName);
// LinearLayout tabHeader
// = new LinearLayout(activity) { Orientation = Orientation.Vertical };
// ImageView tabImg = new ImageView(activity);
// TextView tabTitle = new TextView(activity);
// tabImg.SetImageResource(resourceId);
// tabTitle.Text = page.Title;
// tabTitle.SetTextColor(Android.Graphics.Color.White);
// tabHeader.AddView(tabTitle);
// tabHeader.AddView(tabImg);
// tab.SetCustomView(tabHeader);
// }
// }
// }
// }
protected override void OnElementChanged(ElementChangedEventArgs<TabbedPage> e)
{
base.OnElementChanged(e);
var childViews = GetAllChildViews(ViewGroup);
//tab.SetIcon(Resource.Drawable.icon);
var scale = Resources.DisplayMetrics.Density;
var paddingDp = 0;
var dpAsPixels = (int)(paddingDp * scale + 0.5f);
foreach (var childView in childViews)
{
if (childView is BottomNavigationItemView tab)
{
//tab.SetPadding(-50, -100, -50, -100);
}
else if (childView is TextView textView)
{
textView.SetTextColor(Android.Graphics.Color.Transparent);
}
}
}
protected override void SetTabIcon(TabLayout.Tab tab, FileImageSource icon)
{
base.SetTabIcon(tab, icon);
}
protected override void OnLayout(bool changed, int l, int t, int r, int b)
{
base.OnLayout(changed, l, t, r, b);
try
{
if (!_isShiftModeSet)
{
var children = GetAllChildViews(ViewGroup);
if (children.SingleOrDefault(x => x is BottomNavigationView) is BottomNavigationView bottomNav)
{
bottomNav.SetShiftMode(false, false);
_isShiftModeSet = true;
}
}
}
catch (Exception e)
{
Console.WriteLine($"Error setting ShiftMode: {e}");
}
}
private List<View> GetAllChildViews(View view)
{
if (!(view is ViewGroup group))
{
return new List<View> {view };
}
var result = new List<View>();
for (int i = 0; i < group.ChildCount; i++)
{
var child = group.GetChildAt(i);
var childList = new List<View> {child};
childList.AddRange(GetAllChildViews(child));
result.AddRange(childList);
}
return result.Distinct().ToList();
}
}
}
i am trying to make this look like this somewhat
output expecting
also i tried setting up the icons but SetTabIcons method never get triggered
I posted couple other questions in regards to Zxing please DONT mark it duplicate just because they are about Zxing..
So, in my Xamarin iOS app, I am using Zxing to detect the barcode. I am using https://github.com/Redth/ZXing.Net.Mobile/tree/master/Samples/iOS as example.
I am using a subview to scan for barcodes. The custom overlay n everything works fine but it is unable to detect the barcode when I'm trying to scan.
Can anyone please help me out where I'm doing wrong or missing something.
CODE
public UIView camView;
AVCaptureScannerView scannerView;
UIActivityIndicatorView loadingView;
UIView loadingBg;
UIView topBg;
UIView bottomBg;
MobileBarcodeScanner scanner;
public event Action<ZXing.Result> OnScannedResult;
public MobileBarcodeScanningOptions ScanningOptions { get; set; }
public override void ViewDidLoad()
{
camView = new UIView(new CGRect(0, 0, this.View.Frame.Width, this.View.Frame.Height / 3)) { BackgroundColor = UIColor.Clear };
scanner = new MobileBarcodeScanner();
Root = new RootElement("ZXingDwatNet.Mobile") {
new Section {
camView
}
};
scannerView = new AVCaptureScannerView(camView.Frame);
camView = scannerView;
loadingBg = camView;// new UIView(this.View.Frame) { BackgroundColor = UIColor.Purple, AutoresizingMask = UIViewAutoresizing.FlexibleDimensions };
loadingView = new UIActivityIndicatorView(UIActivityIndicatorViewStyle.White)
{
AutoresizingMask = UIViewAutoresizing.FlexibleMargins
};
loadingView.Frame = new CGRect((this.View.Frame.Width - loadingView.Frame.Width) / 4,
(this.View.Frame.Height - loadingView.Frame.Height) / 4,
loadingView.Frame.Width / 4,
loadingView.Frame.Height / 4);
loadingBg.AddSubview(loadingView);
View.AddSubview(loadingBg);
loadingView.StartAnimating();
this.View.InsertSubviewBelow(scannerView, loadingView);
this.View.AutoresizingMask = UIViewAutoresizing.FlexibleWidth | UIViewAutoresizing.FlexibleHeight;
}
void HandleScanResult(ZXing.Result result)
{
string msg = "";
if (result != null && !string.IsNullOrEmpty(result.Text))
msg = "Found Barcode: " + result.Text;
else
msg = "Scanning Canceled!";
this.InvokeOnMainThread(() =>
{
var av = new UIAlertView("Barcode Result", msg, null, "OK", null);
av.Show();
});
}
public override void ViewDidAppear(bool animated)
{
//scannerView.OnScannerSetupComplete += HandleOnScannerSetupComplete;
//camView = scannerView;
var options = new MobileBarcodeScanningOptions
{
AutoRotate = false,
UseFrontCameraIfAvailable = false,
TryHarder = true
};
ScanningOptions = options;
if (UIDevice.CurrentDevice.CheckSystemVersion(7, 0))
{
UIApplication.SharedApplication.StatusBarStyle = UIStatusBarStyle.Default;
SetNeedsStatusBarAppearanceUpdate();
}
else
UIApplication.SharedApplication.SetStatusBarStyle(UIStatusBarStyle.BlackTranslucent, false);
Console.WriteLine("Starting to scan...");
Task.Factory.StartNew(() =>
{
BeginInvokeOnMainThread(() => scannerView.StartScanning(result =>
{
//if (!ContinuousScanning)
//{
// Console.WriteLine("Stopping scan...");
// scannerView.StopScanning();
//}
var evt = this.OnScannedResult;
if (evt != null)
evt(result);
}, this.ScanningOptions));
});
}
void HandleOnScannerSetupComplete()
{
BeginInvokeOnMainThread(() =>
{
if (loadingView != null && loadingBg != null && loadingView.IsAnimating)
{
loadingView.StopAnimating();
UIView.BeginAnimations("zoomout");
UIView.SetAnimationDuration(2.0f);
UIView.SetAnimationCurve(UIViewAnimationCurve.EaseOut);
loadingBg.Transform = CGAffineTransform.MakeScale(2.0f, 2.0f);
loadingBg.Alpha = 0.0f;
UIView.CommitAnimations();
loadingBg.RemoveFromSuperview();
}
});
}
NOTE: I'm using Xamarin.Mac, but I believe the intent and/or missteps should be clear enough to most swift Cocoa developers.
PROBLEM:
My custom filter is not being applied to the view it backs.
EXAMPLE
var builtInFilter = new CIColorInvert();
builtInFilter.SetDefaults();
var customFilter = new HazeFilter();
customFilter.SetDefaults();
//In both cases here, the Image and OutputImage properties will have a null value
Layer.Filters = new CIFilter[1]{builtInFilter}; //Works
Layer.Filters = new CIFilter[1]{customFilter}; //Does nothing
The problem isn't that my custom filter isn't capable of doing anything. When I assign its Image property and draw its OutputImage it works as expected.
This tells me that the Kernel and OutputImage method are functioning properly.
Source Code
HazeFilter.cs
public class HazeFilter : CIFilter
{
static CIKernel hazeRemovalKernel;
public HazeFilter () : base()
{
if (hazeRemovalKernel == null) {
hazeRemovalKernel = CIKernel.FromProgramSingle(#"
kernel vec4 myHazeRemovalKernel(sampler src, __color color, float distance, float slope)
{
vec4 t;
float d;
d = destCoord().y * slope + distance;
t = unpremultiply(sample(src, samplerCoord(src)));
t = (t-d*color)/(1.0-d);
return premultiply(t);
}");
}
}
public override void SetDefaults ()
{
base.SetDefaults ();
inputColor = CIColor.FromCGColor (NSColor.Purple.CGColor);
inputDistance = 0.8;
inputSlope = 0.002;
}
CIImage image;
[Export("inputImage")]
public new CIImage Image
{
get { return image; }
set {
WillChangeValue ("inputImage");
image = value;
DidChangeValue ("inputImage");
}
}
CIColor color;
[Export("inputColor")]
public CIColor inputColor
{
get { return color; }
set {
WillChangeValue ("inputColor");
color = value;
DidChangeValue ("inputColor");
}
}
NSNumber distance;
[Export("inputDistance")]
public NSNumber inputDistance{
get { return distance; }
set {
WillChangeValue ("inputDistance");
distance = value;
DidChangeValue ("inputDistance");
}
}
NSNumber slope;
[Export("inputSlope")]
public NSNumber inputSlope{
get { return slope; }
set {
WillChangeValue ("inputSlope");
slope = value;
DidChangeValue ("inputSlope");
}
}
[Export("outputImage")]
public new CIImage OutputImage
{
get
{
if (Image == null)
return null;
var inputSampler = new CISampler (Image);
var argumentArray = NSArray.FromNSObjects (new NSObject[] {
inputSampler,
Runtime.GetNSObject (inputColor.Handle),
inputDistance,
inputSlope
});
return Apply (hazeRemovalKernel, argumentArray, null);
}
}
}
Canvas.cs
public class Canvas : NSView
{
public Canvas (CGRect rect) : base(rect)
{
WantsLayer = true;
Layer.BackgroundColor = NSColor.Clear.CGColor;
Layer.MasksToBounds = true;
Layer.NeedsDisplayOnBoundsChange = true;
LayerUsesCoreImageFilters = true;
}
CIFilter effect;
public CIFilter Effect {
get { return effect; }
set{
effect = value;
Layer.Filters = new CIFilter[1]{Effect};
NeedsDisplay = true;
}
}
}
Instantiation
var builtInFilter = new CIColorInvert();
builtInFilter.SetDefaults();
var customFilter = new HazeFilter();
customFilter.SetDefaults();
wrapPanel.Effect = builtInFilter; //Works as expected
wrapPanel.Effect = customFilter; //Does nothing