Sprite image rendering incorrectly on android tablet - animation

I have a sprite animation in my app and it's working fine for standard phone sized screens but whenever I put it on a tablet the animation gets glitchy and doesn't look right at all. I think it has something to do with the height and widths of the image. I've tried setting them as dp and sp but they still won't render right on the tablet.
Here's my animation xml
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center_horizontal"
android:paddingLeft="100dp" >
<view
android:id="#+id/animation"
android:layout_width="180dp"
android:layout_height="220dp"
class="com.scale.AndroidAnimation" />
</LinearLayout>
Here's my animation code
public class AndroidAnimation extends View{
private Bitmap mAnimation;
private Rect mSRectangle;
private int mNoOfFrames;
private int mCurrentFrame;
private int mSpriteHeight;
private int mSpriteWidth;
public AndroidAnimation(Context context, AttributeSet aSet) {
super(context, aSet);
mSRectangle = new Rect(0,0,0,0);
mCurrentFrame = 0;
}
public void init(Bitmap theBitmap, int Height, int Width, int theFrameCount) {
mAnimation = theBitmap;
mSpriteHeight = Height;
mSpriteWidth = Width;
mSRectangle.top = 0;
mSRectangle.bottom = mSpriteHeight;
mSRectangle.left = 0;
mSRectangle.right = mSpriteWidth;
mNoOfFrames = theFrameCount;
mCurrentFrame = 0;
}
public void update() {
mCurrentFrame++;
mCurrentFrame %= mNoOfFrames;
mSRectangle.left = mCurrentFrame * mSpriteWidth;
mSRectangle.right = mSRectangle.left + mSpriteWidth;
}
#Override
public void onDraw(Canvas canvas) {
super.onDraw(canvas);
Rect dest = new Rect(0, 0, mSpriteWidth, mSpriteHeight);
canvas.drawBitmap(mAnimation, mSRectangle, dest, null);
}
}
Where I initialize it
anim = (AndroidAnimation) findViewById(R.id.animation);
anim.init(BitmapFactory.decodeResource(getResources(), R.drawable.connecting_sprite), 300, 170, 3);
mHandler.removeCallbacks(myTimerTask);
mHandler.postDelayed(myTimerTask, 300);
My handler that progresses it
public void run() {
mHandler.removeCallbacks(myTimerTask);
if (mCurrScreen == WAITING_SCREEN) {
mHandler.postDelayed(myTimerTask, 300);
}
anim.update();
anim.invalidate();
}
Thanks in advance!

It is good way to design seperate layout for tablet.Create layout-xlarge folder in res and put your new layout animation.xml file which is optimized for tablet screen.

Related

How to rotate camera input in 'hands' of 'mediapipe-solutions-examples'

I downloaded mediapipe for android and inside it are android module files mediapipe_repo\mediapipe\mediapipe\examples\android\solutions\hands.
In the 'hands' of mediapipe-solution-example.
enter image description here
This is a question about MainActivity.
Because I want to put the screen horizontally, I want to rotate the camera preview screen like rotating the image through the rotateBitmap function But I don't know how.
enter image description here
I really want to cry..
The following is the MainActivity with the video-related code removed from the original MainActivity.
package com.google.mediapipe.examples.hands;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.os.Build;
import android.os.Bundle;
import android.provider.MediaStore;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.FrameLayout;
import androidx.activity.result.ActivityResultLauncher;
import androidx.activity.result.contract.ActivityResultContracts;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import com.google.mediapipe.formats.proto.LandmarkProto.Landmark;
import com.google.mediapipe.formats.proto.LandmarkProto.NormalizedLandmark;
import com.google.mediapipe.solutioncore.CameraInput;
import com.google.mediapipe.solutioncore.SolutionGlSurfaceView;
import com.google.mediapipe.solutioncore.VideoInput;
import com.google.mediapipe.solutions.hands.HandLandmark;
import com.google.mediapipe.solutions.hands.Hands;
import com.google.mediapipe.solutions.hands.HandsOptions;
import com.google.mediapipe.solutions.hands.HandsResult;
import java.io.IOException;
import java.io.InputStream;
/** Main activity of MediaPipe Hands app. */
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private Hands hands;
// Run the pipeline and the model inference on GPU or CPU.
private static final boolean RUN_ON_GPU = true;
private enum InputSource {
UNKNOWN,
IMAGE,
CAMERA,
}
private InputSource inputSource = InputSource.UNKNOWN;
// Image demo UI and image loader components.
private ActivityResultLauncher<Intent> imageGetter;
private HandsResultImageView imageView;
// Live camera demo UI and camera components.
private CameraInput cameraInput;
private SolutionGlSurfaceView<HandsResult> glSurfaceView;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setupStaticImageDemoUiComponents();
//Live camera
setupLiveDemoUiComponents();
}
#Override
protected void onResume() {
super.onResume();
if (inputSource == InputSource.CAMERA) {
// Restarts the camera and the opengl surface rendering.
cameraInput = new CameraInput(this);
cameraInput.setNewFrameListener(textureFrame -> hands.send(textureFrame));
glSurfaceView.post(this::startCamera);
glSurfaceView.setVisibility(View.VISIBLE);
}
}
#Override
protected void onPause() {
super.onPause();
if (inputSource == InputSource.CAMERA) {
glSurfaceView.setVisibility(View.GONE);
cameraInput.close();
}
}
private Bitmap downscaleBitmap(Bitmap originalBitmap) {
double aspectRatio = (double) originalBitmap.getWidth() / originalBitmap.getHeight();
int width = imageView.getWidth();
int height = imageView.getHeight();
if (((double) imageView.getWidth() / imageView.getHeight()) > aspectRatio) {
width = (int) (height * aspectRatio);
} else {
height = (int) (width / aspectRatio);
}
return Bitmap.createScaledBitmap(originalBitmap, width, height, false);
}
#RequiresApi(api = Build.VERSION_CODES.N)
private Bitmap rotateBitmap(Bitmap inputBitmap, InputStream imageData) throws IOException {
int orientation =
new ExifInterface(imageData)
.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
if (orientation == ExifInterface.ORIENTATION_NORMAL) {
return inputBitmap;
}
Matrix matrix = new Matrix();
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
matrix.postRotate(90);
break;
case ExifInterface.ORIENTATION_ROTATE_180:
matrix.postRotate(180);
break;
case ExifInterface.ORIENTATION_ROTATE_270:
matrix.postRotate(270);
break;
default:
matrix.postRotate(0);
}
return Bitmap.createBitmap(
inputBitmap, 0, 0, inputBitmap.getWidth(), inputBitmap.getHeight(), matrix, true);
}
/** Sets up the UI components for the static image demo. */
private void setupStaticImageDemoUiComponents() {
// The Intent to access gallery and read images as bitmap.
imageGetter =
registerForActivityResult(
new ActivityResultContracts.StartActivityForResult(),
result -> {
Intent resultIntent = result.getData();
if (resultIntent != null) {
if (result.getResultCode() == RESULT_OK) {
Bitmap bitmap = null;
try {
bitmap =
downscaleBitmap(
MediaStore.Images.Media.getBitmap(
this.getContentResolver(), resultIntent.getData()));
} catch (IOException e) {
Log.e(TAG, "Bitmap reading error:" + e);
}
try {
InputStream imageData =
this.getContentResolver().openInputStream(resultIntent.getData());
bitmap = rotateBitmap(bitmap, imageData);
} catch (IOException e) {
Log.e(TAG, "Bitmap rotation error:" + e);
}
if (bitmap != null) {
hands.send(bitmap);
}
}
}
});
Button loadImageButton = findViewById(R.id.button_load_picture);
loadImageButton.setOnClickListener(
v -> {
if (inputSource != InputSource.IMAGE) {
stopCurrentPipeline();
setupStaticImageModePipeline();
}
// Reads images from gallery.
Intent pickImageIntent = new Intent(Intent.ACTION_PICK);
pickImageIntent.setDataAndType(MediaStore.Images.Media.INTERNAL_CONTENT_URI, "image/*");
imageGetter.launch(pickImageIntent);
});
imageView = new HandsResultImageView(this);
}
/** Sets up core workflow for static image mode. */
private void setupStaticImageModePipeline() {
this.inputSource = InputSource.IMAGE;
// Initializes a new MediaPipe Hands solution instance in the static image mode.
hands =
new Hands(
this,
HandsOptions.builder()
.setStaticImageMode(true)
.setMaxNumHands(2)
.setRunOnGpu(RUN_ON_GPU)
.build());
// Connects MediaPipe Hands solution to the user-defined HandsResultImageView.
hands.setResultListener(
handsResult -> {
logWristLandmark(handsResult, /*showPixelValues=*/ true);
imageView.setHandsResult(handsResult);
runOnUiThread(() -> imageView.update());
});
hands.setErrorListener((message, e) -> Log.e(TAG, "MediaPipe Hands error:" + message));
// Updates the preview layout.
FrameLayout frameLayout = findViewById(R.id.preview_display_layout);
frameLayout.removeAllViewsInLayout();
imageView.setImageDrawable(null);
frameLayout.addView(imageView);
imageView.setVisibility(View.VISIBLE);
}
/** Sets up the UI components for the live demo with camera input. */
private void setupLiveDemoUiComponents() {
Button startCameraButton = findViewById(R.id.button_start_camera);
startCameraButton.setOnClickListener(
v -> {
if (inputSource == InputSource.CAMERA) {
return;
}
stopCurrentPipeline();
setupStreamingModePipeline(InputSource.CAMERA);
});
}
/** Sets up core workflow for streaming mode. */
private void setupStreamingModePipeline(InputSource inputSource) {
this.inputSource = inputSource;
// Initializes a new MediaPipe Hands solution instance in the streaming mode.
hands =
new Hands(
this,
HandsOptions.builder()
.setStaticImageMode(false)
.setMaxNumHands(2)
.setRunOnGpu(RUN_ON_GPU)
.build());
hands.setErrorListener((message, e) -> Log.e(TAG, "MediaPipe Hands error:" + message));
if (inputSource == InputSource.CAMERA) {
cameraInput = new CameraInput(this);
cameraInput.setNewFrameListener(textureFrame -> hands.send(textureFrame));
}
// Initializes a new Gl surface view with a user-defined HandsResultGlRenderer.
glSurfaceView =
new SolutionGlSurfaceView<>(this, hands.getGlContext(), hands.getGlMajorVersion());
glSurfaceView.setSolutionResultRenderer(new HandsResultGlRenderer());
glSurfaceView.setRenderInputImage(true);
hands.setResultListener(
handsResult -> {
logWristLandmark(handsResult, /*showPixelValues=*/ false);
glSurfaceView.setRenderData(handsResult);
glSurfaceView.requestRender();
});
// The runnable to start camera after the gl surface view is attached.
// For video input source, videoInput.start() will be called when the video uri is available.
if (inputSource == InputSource.CAMERA) {
glSurfaceView.post(this::startCamera);
}
// Updates the preview layout.
FrameLayout frameLayout = findViewById(R.id.preview_display_layout);
imageView.setVisibility(View.GONE);
frameLayout.removeAllViewsInLayout();
frameLayout.addView(glSurfaceView);
glSurfaceView.setVisibility(View.VISIBLE);
frameLayout.requestLayout();
}
private void startCamera() {
cameraInput.start(
this,
hands.getGlContext(),
CameraInput.CameraFacing.FRONT,
glSurfaceView.getWidth(),
glSurfaceView.getHeight());
}
private void stopCurrentPipeline() {
if (cameraInput != null) {
cameraInput.setNewFrameListener(null);
cameraInput.close();
}
if (glSurfaceView != null) {
glSurfaceView.setVisibility(View.GONE);
}
if (hands != null) {
hands.close();
}
}
private void logWristLandmark(HandsResult result, boolean showPixelValues) {
if (result.multiHandLandmarks().isEmpty()) {
return;
}
NormalizedLandmark wristLandmark =
result.multiHandLandmarks().get(0).getLandmarkList().get(HandLandmark.WRIST);
// For Bitmaps, show the pixel values. For texture inputs, show the normalized coordinates.
if (showPixelValues) {
int width = result.inputBitmap().getWidth();
int height = result.inputBitmap().getHeight();
Log.i(
TAG,
String.format(
"MediaPipe Hand wrist coordinates (pixel values): x=%f, y=%f",
wristLandmark.getX() * width, wristLandmark.getY() * height));
} else {
Log.i(
TAG,
String.format(
"MediaPipe Hand wrist normalized coordinates (value range: [0, 1]): x=%f, y=%f",
wristLandmark.getX(), wristLandmark.getY()));
}
if (result.multiHandWorldLandmarks().isEmpty()) {
return;
}
Landmark wristWorldLandmark =
result.multiHandWorldLandmarks().get(0).getLandmarkList().get(HandLandmark.WRIST);
Log.i(
TAG,
String.format(
"MediaPipe Hand wrist world coordinates (in meters with the origin at the hand's"
+ " approximate geometric center): x=%f m, y=%f m, z=%f m",
wristWorldLandmark.getX(), wristWorldLandmark.getY(), wristWorldLandmark.getZ()));
}
}
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<LinearLayout
android:id="#+id/buttons"
android:layout_width="match_parent"
android:layout_height="wrap_content"
style="?android:attr/buttonBarStyle" android:gravity="center"
android:orientation="horizontal">
<Button
android:id="#+id/button_load_picture"
android:layout_width="wrap_content"
style="?android:attr/buttonBarButtonStyle" android:layout_height="wrap_content"
android:text="#string/load_picture" />
<Button
android:id="#+id/button_start_camera"
android:layout_width="wrap_content"
style="?android:attr/buttonBarButtonStyle" android:layout_height="wrap_content"
android:text="#string/start_camera" />
</LinearLayout>
<FrameLayout
android:id="#+id/preview_display_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</LinearLayout>
I expect how to rotate camera input in 'hands' of 'mediapipe-solutions-examples'

How can I build slanted dashed line in Xamarin application

I want to display a slanted dashed line in my application that would look something like
this. using Line API in xamarin I was able to create dashed line, but I haven't had any luck with making each dash slanted. I also looked at skiasharp library for Xamarin but didn't find anything which can help with slanting.
#jason thank you for your response, I was so much focused on getting "Slanted dashed line" what you suggested never crossed my mind. Although now I have the solution for my issue.
public partial class SlantedDashedView : ContentView
{
private readonly SKPaint paint = new SKPaint
{
Style = SKPaintStyle.Fill,
Color = Color.Red.ToSKColor()
};
private readonly SKRect rect = new SKRect
{
Location = new SKPoint(0, 0),
Size = new SKSize(25, 30)
};
public SlantedDashedView()
{
InitializeComponent();
mainCanvas.InvalidateSurface();
}
private void MainCanvas_PaintSurface(object sender, SKPaintSurfaceEventArgs e)
{
SKImageInfo info = e.Info;
SKSurface surface = e.Surface;
SKCanvas canvas = surface.Canvas;
int currentY = 0;
canvas.Skew(0, -1);
while (currentY < info.Height)
{
canvas.DrawRect(rect, paint);
currentY += 45;
canvas.Translate(0, 45);
}
}
}
Here is the result

How to get the rounded image source or bitmap image for cropping image in elliptical shape in UWP

I need to crop the image in elliptical shape, but I do not want to use Ellipse and fill with ImageBrush as mentioned in this link , instead I need the bitmap itself to be in rounded / elliptical instead of rectangular.
Sometimes I would like to crop in rectangular and sometimes in elliptical, so I cannot use Ellipse and fill.
Is there any alternative solution to this? It would also be better if I can clip the Image in elliptical format.
But the Clip in Image accepts only RectangleGeometry.
you can create a custom UserControl and in that use win2d CanvasControl to display image where you can draw image in any shape that you want, using "CreateLayer" funtion to mask the drawing image with shape. for example
at xaml:
<UserControl
x:Class="UWPClassLib.MyImageControl"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="using:UWPClassLib"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
xmlns:win2d="using:Microsoft.Graphics.Canvas.UI.Xaml"
mc:Ignorable="d">
<win2d:CanvasControl x:Name="w2dCanvas"
Draw="w2dCanvas_Draw" />
</UserControl>
at code behind :
public enum MaskShape
{
rectangle,
circle
}
public sealed partial class MyImageControl : UserControl
{
public WriteableBitmap Bitmap
{
get { return (WriteableBitmap)GetValue(BitmapProperty); }
set { SetValue(BitmapProperty, value); }
}
public static readonly DependencyProperty BitmapProperty = DependencyProperty.Register(nameof(Bitmap), typeof(WriteableBitmap), typeof(MyImageControl), new PropertyMetadata(null, OnBitmapPropertyChanged));
private static void OnBitmapPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var myctrl = (MyImageControl)d;
myctrl.TryCreateResource();
}
public MaskShape Shape
{
get { return (MaskShape)GetValue(ShapeProperty); }
set { SetValue(ShapeProperty, value); }
}
public static readonly DependencyProperty ShapeProperty = DependencyProperty.Register(nameof(Shape), typeof(MaskShape), typeof(MyImageControl), new PropertyMetadata(MaskShape.circle));
private CanvasBitmap Source;
public MyImageControl()
{
this.InitializeComponent();
}
public void Invalidate()
{
w2dCanvas.Invalidate();
}
private void w2dCanvas_Draw(Microsoft.Graphics.Canvas.UI.Xaml.CanvasControl sender, Microsoft.Graphics.Canvas.UI.Xaml.CanvasDrawEventArgs args)
{
using (var session = args.DrawingSession)
{
session.Clear(Colors.Transparent);
if (CheckResourceCreated())
{
using (var mask = GetMaskShape())
using (var layer = session.CreateLayer(1.0f, mask))
{
session.DrawImage(Source);
}
// either you can do that or can use
// session.FillGeometry(mask, imagebrush); //and image brush can be made from source e.g.
// imagebrush = new CanvasImageBrush(w2dCanvas, Source);
}
}
}
private CanvasGeometry GetMaskShape()
{
switch (Shape)
{
default:
case MaskShape.circle:
var center = new System.Numerics.Vector2((float)this.ActualWidth / 2, (float)this.ActualHeight / 2);
var radiusX = (float)this.ActualWidth / 2;
var radiusY = (float)this.ActualHeight / 2;
return CanvasGeometry.CreateEllipse(w2dCanvas, center, radiusX, radiusY);
case MaskShape.rectangle:
return CanvasGeometry.CreateRectangle(w2dCanvas, new Rect(0, 0, this.ActualWidth, this.ActualHeight));
}
}
private bool CheckResourceCreated()
{
if (Source == null)
{
TryCreateResource();
}
return (Source != null);
}
private void TryCreateResource()
{
try
{
if (Bitmap == null)
return;
Source = CanvasBitmap.CreateFromBytes(w2dCanvas, Bitmap.PixelBuffer.ToArray(), Bitmap.PixelWidth, Bitmap.PixelHeight, DirectXPixelFormat.B8G8R8A8UIntNormalized);
}
catch (Exception ex)
{
Debug.WriteLine(ex.Message);
}
}
}
and now you can use it any where to display image in rectangle or circle. you just have to change MaskShape and call invalidate
plese forgive any error since, i wrote it on the go and haven't test it.
its just for an idea

SwapChainBackgroundPanel letterboxing Monogame Windows Store App

I am porting my space shooter game from Windows Phone to Windows Store App. In WP it always play in full portrait orientation.
For the Windows Store app though while in landscape mode, I want to center the game screen with letterboxing on the left and right. The problem is I can't adjust the margin property of SwapChainBackgroundPanel so the game always aligned to the left and the black screen is on the right.
Here's my code
public Game1()
{
graphics = new GraphicsDeviceManager(this);
GamePage.Current.SizeChanged += OnWindowSizeChanged;
Content.RootDirectory = "Content";
}
private void OnWindowSizeChanged(object sender, Windows.UI.Xaml.SizeChangedEventArgs e)
{
var CurrentViewState = Windows.UI.ViewManagement.ApplicationView.Value;
double width = e.NewSize.Width;
double height = e.NewSize.Height;
// using Windows.Graphics.Display;
ResolutionScale resolutionScale = DisplayProperties.ResolutionScale;
string orientation = null;
if (ApplicationView.Value == ApplicationViewState.FullScreenLandscape)
{
orientation = "FullScreenLandscape";
//Does not work because it's start on the center of the screen
//Black screen is on the left and place the game screen on the right
GamePage.Current.HorizontalAlignment = Windows.UI.Xaml.HorizontalAlignment.Center;
//Gives error - WinRT information: Setting 'Margin' property is
//not supported on SwapChainBackgroundPanel.
GamePage.Current.Margin = new Thickness(centerMargin, 0, 0, 0);
}
else if (ApplicationView.Value == ApplicationViewState.FullScreenPortrait)
{
orientation = "FullScreenPortrait";
}
else if (ApplicationView.Value == ApplicationViewState.Filled)
{
orientation = "Filled";
}
else if (ApplicationView.Value == ApplicationViewState.Snapped)
{
orientation = "Snapped";
}
Debug.WriteLine("{0} x {1}. Scale: {2}. Orientation: {3}",
width.ToString(), height.ToString(), resolutionScale.ToString(),
orientation);
}
The GamePage.xaml is the default
<SwapChainBackgroundPanel
x:Class="SpaceShooterXW8.GamePage"
xmlns="http://schemas.microsoft.com/winfx/2006/xaml/presentation"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
xmlns:local="using:SpaceShooterXW8"
xmlns:d="http://schemas.microsoft.com/expression/blend/2008"
xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"
mc:Ignorable="d">
</SwapChainBackgroundPanel>
After some researched I think I've figured it out thanks to this blog post. To those who are in a similar situation, here's what I did.
The beauty of the solution is that the letterboxing is automatically managed by the Resolution class. All I have to do is update the batch.begin() lines in my code to something like
batch.Begin(SpriteSortMode.Deferred,
null, SamplerState.LinearClamp,
null,
null,
null,
Resolution.getTransformationMatrix());
To handle resolution changes as the orientation changed I use this in my Game1.cs
public Game1()
{
graphics = new GraphicsDeviceManager(this);
GamePage.Current.SizeChanged += OnWindowSizeChanged;
Content.RootDirectory = "Content";
Resolution.Init(ref graphics);
Resolution.SetVirtualResolution(480, 800);
}
private void OnWindowSizeChanged(object sender, Windows.UI.Xaml.SizeChangedEventArgs e)
{
var CurrentViewState = Windows.UI.ViewManagement.ApplicationView.Value;
App.AppWidth = (int)e.NewSize.Width;
App.AppHeight = (int)e.NewSize.Height;
Resolution.SetResolution(App.AppWidth, App.AppHeight, true);
}
The initial values of App.AppWidth and App.AppHeight is set in the GamePage.xaml.cs.
public GamePage(string launchArguments)
{
this.InitializeComponent();
App.AppWidth = (int)Window.Current.Bounds.Width;
App.AppHeight = (int)Window.Current.Bounds.Height;
Current = this;
// Create the game.
_game = XamlGame<Game1>.Create(launchArguments, Window.Current.CoreWindow, this);
}
Both are global static property created in the App.xaml.cs
public static int AppWidth { get; set; }
public static int AppHeight { get; set; }
The only problem I've encountered so far, the mouse input does not scale to the screen resolution change. I do not have a touch screen to test unfortunately but I think touch input should scale. If anyone tested touch, please share your findings. Thanks.
Update
I've managed to scale the Mouse input using the following
public static Vector2 ScaleGesture(Vector2 position)
{
int x = (int)(position.X / (float)App.AppWidth * (float)Screen.ScreenWidth);
int y = (int)(position.Y / (float)App.AppHeight * (float)Screen.ScreenHeight);
var scaledPosition = new Vector2(x, y);
return scaledPosition;
}

SlimDX Handling Window Resizing

I'm trying to handle the program window being resized, and the (I think inefficient) code I've flung together below seems to do the trick.
Is there a better way to do this, preferably one that does not create a stutter when resizing the window and which does not constantly use 12-17% of a CPU? I also suspect MessagePump.Run may somehow run before form.Resize finishes setting up the device again, and throw an error.
Thanks!
using System;
using System.Drawing;
using System.Windows.Forms;
using SlimDX;
using SlimDX.Direct3D9;
using SlimDX.Windows;
namespace SlimDX_1
{
struct Vertex
{
public Vector4 Position;
public int Color;
}
static class Program
{
private static VertexBuffer vertices;
private static Device device;
private static RenderForm form;
private static PresentParameters present;
private static VertexDeclaration vertexDecl;
private static VertexElement[] vertexElems;
private static bool wasMinimized = false;
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
form = new RenderForm("Tutorial 1: Basic Window");
init();
form.Resize += (o, e) =>
{
if (form.WindowState == FormWindowState.Minimized)
{
foreach (var item in ObjectTable.Objects)
{
item.Dispose();
}
wasMinimized = true;
}
else
{
foreach (var item in ObjectTable.Objects)
{
item.Dispose();
}
init();
device.SetRenderState(RenderState.FillMode, FillMode.Wireframe);
device.SetRenderState(RenderState.CullMode, Cull.None);
present.BackBufferHeight = form.ClientSize.Height;
present.BackBufferWidth = form.ClientSize.Width;
device.Reset(present);
}
};
MessagePump.Run(form, () =>
{
if (form.WindowState == FormWindowState.Minimized)
{
return;
}
device.Clear(ClearFlags.Target | ClearFlags.ZBuffer, Color.Black, 1.0f, 0);
device.BeginScene();
device.SetStreamSource(0, vertices, 0, 20); // 20 is the size of each vertex
device.VertexDeclaration = vertexDecl;
device.DrawPrimitives(PrimitiveType.TriangleList, 0, 1);
device.EndScene();
device.Present();
});
foreach (var item in ObjectTable.Objects)
{
item.Dispose();
}
}
private static void init()
{
present = new PresentParameters();
//present.EnableAutoDepthStencil = false;
//present.BackBufferCount = 1;
//present.SwapEffect = SwapEffect.Discard;
present.Windowed = true;
present.BackBufferHeight = form.ClientSize.Height;
present.BackBufferWidth = form.ClientSize.Width;
//present.BackBufferFormat = Format.Unknown;
device = new Device(new Direct3D(), 0, DeviceType.Hardware, form.Handle, CreateFlags.HardwareVertexProcessing, present);
vertices = new VertexBuffer(device, 3 * 20, Usage.WriteOnly, VertexFormat.None, Pool.Managed);
vertices.Lock(0, 0, LockFlags.None).WriteRange(new Vertex[]
{
new Vertex() { Color = Color.Red.ToArgb(), Position = new Vector4(400.0f, 100.0f, 0.5f, 1.0f) },
new Vertex() { Color = Color.Blue.ToArgb(), Position = new Vector4(650.0f, 500.0f, 0.5f, 1.0f) },
new Vertex() { Color = Color.Green.ToArgb(), Position = new Vector4(150.0f, 500.0f, 0.5f, 1.0f) }
});
vertices.Unlock();
// specifies the layout of the vertexes
vertexElems = new VertexElement[]
{
new VertexElement(0, 0, DeclarationType.Float4, DeclarationMethod.Default, DeclarationUsage.PositionTransformed, 0),
new VertexElement(0, 16, DeclarationType.Color, DeclarationMethod.Default, DeclarationUsage.Color, 0),
VertexElement.VertexDeclarationEnd
};
vertexDecl = new VertexDeclaration(device, vertexElems);
}
}
}
You're going way above and beyond what you need to do when the window is resized. You're releasing every single DirectX object you've created, including the graphics device, and then recreating everything. This is going to take a comparatively long time, which is why you're seeing performance issues.
In fact, none of your objects need to be released. Simply call the Reset() function on the device to recreate the backbuffer to match the new window size. Check out some of the native Direct3D9 tutorials on window resizing to see how in general how the process works.

Resources