Correct transformation order for scene graph - matrix

I am working on a quick WebGL Engine with a scene graph to quickly prototype my game idea on reddit (https://www.reddit.com/r/gameideas/comments/3dsy8m/revolt/). Now, after I have got some basic rendering done, I can't figure out the correct order, well the one that looks right to most people, that I am supposed to use in order to transform the nodes in the scene graph.
It's hard to explain what is happening but I hope you get a understanding that it just isn't rotating the way that most would expect it to happen in any other engine.
Here is a simplified version of what I am currently doing.
Mat4 = glMatrix 0.9.5
Utils = Custom Utilitys
Node(Render):
#param {parentMatrix}
// Create Local Matrix
self.lMatrix = mat4.create();
mat4.identity(self.lMatrix);
mat4.translate(self.lMatrix, self.position);
mat4.rotate(self.lMatrix, self.rotation[0], [1, 0, 0]);
mat4.rotate(self.lMatrix, self.rotation[1], [0, 1, 0]);
mat4.rotate(self.lMatrix, self.rotation[2], [0, 0, 1]);
var wMatrix = mat4.create();
mat4.identity(wMatrix);
if(parentMatrix){
mat4.multiply(self.lMatrix, parentMatrix, wMatrix);
}
else{
mat4.set(self.lMatrix, wMatrix);
}
// Render
var children = this.children,
numChildren = children.length,
child;
for(var i = 0; i < numChildren; i++){
child = children[i];
child.render(wMatrix);
}
Entity(Render):
#param {parentMatrix}
// Set Transformation matrix
var tMatrix = mat4.create();
mat4.identity(tMatrix);
mat4.translate(tMatrix, self.position);
mat4.rotate(tMatrix, self.rotation[0], [1, 0, 0]);
mat4.rotate(tMatrix, self.rotation[1], [0, 1, 0]);
mat4.rotate(tMatrix, self.rotation[2], [0, 0, 1]);
mat4.scale(tMatrix, self.scale || [1, 1, 1]);
var wMatrix = mat4.create();
mat4.identity(wMatrix);
mat4.multiply(tMatrix, parentMatrix, wMatrix);
Utils.loadTMatrix(wMatrix);
this.texture.bind();
this.mesh.render();

The usual order is SRT, or scale, rotate then translate.
Also I am not sure if you can just do
mat4.rotate(tMatrix, self.rotation[0], [1, 0, 0]);
mat4.rotate(tMatrix, self.rotation[1], [0, 1, 0]);
mat4.rotate(tMatrix, self.rotation[2], [0, 0, 1]);
with euler angles and get the correct result orientation. I dont use euler angles so I dont fully grasp the details. Somebody please correct me if Im wrong. See this page for conversions between euler angle and rotation matrix: http://www.euclideanspace.com/maths/geometry/rotations/conversions/eulerToMatrix/.

I didn't find the original way that I was hoping for because I was previously caching matrices, and was hoping to continue doing it, but now I have found a much easier way after recreating my old engine from scratch.
Engine.prototype.NODE.prototype.render = function(parentMatrix){
var children = this.children,
numChildren = children.length,
child, pos, rot, scale;
// If has set matrix to a copy of it
if(parentMatrix){
this.matrix = mat4.clone(parentMatrix);
}
else{
// Else set it to a identity matrix
mat4.identity(this.matrix);
}
// If matrix needs updating reconstruct it
pos = [this.position.x,
this.position.y,
this.position.z];
rot = [this.rotation.x,
this.rotation.y,
this.rotation.z];
scale = [this.scale.x,
this.scale.y,
this.scale.z];
// Recreate Transformation matrix
mat4.translate(this.matrix, this.matrix, pos);
mat4.rotate(this.matrix, this.matrix, rot[0], [1, 0, 0]);
mat4.rotate(this.matrix, this.matrix, rot[1], [0, 1, 0]);
mat4.rotate(this.matrix, this.matrix, rot[2], [0, 0, 1]);
mat4.scale(this.matrix, this.matrix, scale);
// Render Children with this matrix
for(var i = 0; i < numChildren; i++){
child = children[i];
child.render(this.matrix);
}
}
what I am basically doing is that, if the matrix has a parent (it isn't the root node) then I am starting the matrix off as a clone of its parent, else I am setting the matrix to it's identity matrix. Then applying the regular transformations to it. If I find a way in order to continue caching matrices I will uploaded it as soon as possible.

Related

Construct a rotation matrix in Pytorch

I want to construct a rotation matrix, which have unknown Eular angles. I want to build some regression solution to find the value of Eular angles. My code is here.
roll = yaw = pitch = torch.randn(1,requires_grad=True)
RX = torch.tensor([
[1, 0, 0],
[0, cos(roll), -sin(roll)],
[0, sin(roll), cos(roll)]
],requires_grad=True)
RY = torch.tensor([
[cos(pitch), 0, sin(pitch)],
[0, 1, 0],
[-sin(pitch), 0, cos(pitch)]
],requires_grad=True)
RZ = torch.tensor([
[cos(yaw), -sin(yaw), 0],
[sin(yaw), cos(yaw), 0],
[0, 0, 1]
],requires_grad=True)
R = torch.mm(RZ, RY).requires_grad_()
R = torch.mm(R, RX).requires_grad_()
R = R.mean().requires_grad_()
R.backward()
Matrix cannot differentiate the Euler angles.
There isn't any gradient value of matrix. Can anyone fix my problems? Thanks!
debug result
torch.tensor is viewed as an operation and that is not able to do backpropgation.
A dirty way to fix your code:
roll = torch.randn(1,requires_grad=True)
yaw = torch.randn(1,requires_grad=True)
pitch = torch.randn(1,requires_grad=True)
tensor_0 = torch.zeros(1)
tensor_1 = torch.ones(1)
RX = torch.stack([
torch.stack([tensor_1, tensor_0, tensor_0]),
torch.stack([tensor_0, cos(roll), -sin(roll)]),
torch.stack([tensor_0, sin(roll), cos(roll)])]).reshape(3,3)
RY = torch.stack([
torch.stack([cos(pitch), tensor_0, sin(pitch)]),
torch.stack([tensor_0, tensor_1, tensor_0]),
torch.stack([-sin(pitch), tensor_0, cos(pitch)])]).reshape(3,3)
RZ = torch.stack([
torch.stack([cos(yaw), -sin(yaw), tensor_0]),
torch.stack([sin(yaw), cos(yaw), tensor_0]),
torch.stack([tensor_0, tensor_0, tensor_1])]).reshape(3,3)
R = torch.mm(RZ, RY)
R = torch.mm(R, RX)
R_mean = R.mean()
R_mean.backward()

WebGL rotate function

I'm trying to understand the rotation of the matrices using WebGL.
I got this mat4() matrix and I have to apply these transformations :
m = translate(torsoHeight+1*headHeight, 5, 0.0);
m = mult(m, rotate(theta[head1Id], 1, 0, 0))
m = mult(m, rotate(theta[head2Id], 0, 1, 0));
m = mult(m, translate(0.0, -0.5*headHeight, 0.0));
figure[headId] = createNode( m, head, leftUpperArmId, null);
break;
I did not understand exactly how the mult function works. The first parameter is my matrix.
The theta[] is built in this way :
var theta = [0, 0, 0, 0, 0, 0, 180, 0, 180, 0, 0];
and
var headId = 1;
var head1Id = 1;
var head2Id = 10;
Am I right if I thought that the second parameter is another matrix build with the rotate() function ? In this case how does the rotate function work ?
rotate and translate are functions that create matrices.
rotate looks like it's arguments are (angle, vectorx, vectory, vectorz) to create a matrix rotating points around the given vectory.
mult is the standard mathematical multiplication for 4x4 matrices.
You probably should dig in linear algebra tutorials such as https://open.gl/transformations

Cesium primitives (triangles) shading

I use this code to draw triangles in Cesium:
var mypositions = Cesium.Cartesian3.fromDegreesArrayHeights(triangles);
// unroll 'mypositions' into a flat array here
var numPositions = mypositions.length;
var pos = new Float64Array(numPositions * 3);
var normals = new Float32Array(numPositions * 3);
for (var i = 0; i < numPositions; ++i) {
pos[i * 3] = mypositions[i].x;
pos[i * 3 + 1] = mypositions[i].y;
pos[i * 3 + 2] = mypositions[i].z;
normals[i * 3] = 0.0;
normals[i * 3 + 1] = 0;
normals[i * 3 + 2] = 1.0;
}
console.log(normals)
var geometry = new Cesium.Geometry({
vertexFormat : Cesium.VertexFormat.ALL,
attributes: {
position: new Cesium.GeometryAttribute({
componentDatatype: Cesium.ComponentDatatype.DOUBLE, // not FLOAT
componentsPerAttribute: 3,
values: pos
}),
normal: new Cesium.GeometryAttribute({
componentDatatype: Cesium.ComponentDatatype.FLOAT,
componentsPerAttribute: 3,
values: normals
})
},
// Don't need the following line if no vertices are shared.
// indices: new Uint32Array([0, 1, 2, 3, 4, 5]),
primitiveType: Cesium.PrimitiveType.TRIANGLES,
boundingSphere: Cesium.BoundingSphere.fromVertices(pos)
});
var myInstance = new Cesium.GeometryInstance({
geometry: geometry,
attributes: {
color: new Cesium.ColorGeometryInstanceAttribute(0.0039215697906911,
0.7333329916000366,
0,
1)
},
show: new Cesium.ShowGeometryInstanceAttribute(true)
});
var TIN = viewer.scene.primitives.add(new Cesium.Primitive({
geometryInstances: [myInstance],
asynchronous: false,
appearance: new Cesium.PerInstanceColorAppearance({
closed: true,
translucent: false,
flat: false
//,
//vertexShaderSource: "",
//fragmentShaderSource: ""
})
}));
This is what I get:
I would like to enable shading, so the result should be similar as on figure below:
I tried to write Vertex and Fragment GLSL shader but without success. I am not familiar with GLSL and I was getting a compiling error. Is there any another way to create this kind of shading?
Thanks!
Regardless of the fact that you haven't posted your GLSL shaders or gotten that to work, your problem (once you eventually figure out the GLSL stuff) is that you're setting all your normals to point in the +Z direction instead of actually being normal to each triangle, like the second screenshot of yours shows.
var normals = new Float32Array(numPositions * 3);
for (var i = 0; i < numPositions; ++i) {
pos[i * 3] = mypositions[i].x;
pos[i * 3 + 1] = mypositions[i].y;
pos[i * 3 + 2] = mypositions[i].z;
normals[i * 3] = 0.0;
normals[i * 3 + 1] = 0;
normals[i * 3 + 2] = 1.0;
}
What you need to do instead is set the positions and then for the normals, operate on sets of 3 vertices (a triangle) instead of individual vertices. This way you can actually calculate a surface normal. This is one of many explanations of how to do that:
https://math.stackexchange.com/questions/305642/how-to-find-surface-normal-of-a-triangle
I'm not familiar with Cesium, but I'd imagine there'd be some "default" shader that did basic lighting. If not, then the basis for simple lighting like this is called Lambertian reflectance. Your vertex shader would output a color that is calculated as dot(N, L) where N is the normal of your vertex and L is the vector from that vertex to your light source (or just the negative of the direction of your light if it's a directional/environment/sun/etc light). The fragment shader would simply pass that color back out.

Orbiting a cube in WebGL with glMatrix

https://jsfiddle.net/sepoto/Ln7qvv7w/2/
I have a base set up to display a cube with different colored faces. What I am trying to do is set up a camera and apply a combined X axis and Y axis rotation so that the cube spins around both axis concurrently. There seems to be some problems with the matrices I set up as I can see the blue face doesn't look quite right. There are some examples of how this is done using older versions of glMatrix however the code in the examples no longer works because of some changes in vec4 of the glMatrix library. Does anyone know how this can be done using the latest version of glMatrix as I have attached a CDN to the fiddle?
Thank you!
function drawScene() {
gl.viewport(0,0,gl.viewportWidth, gl.viewportHeight);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
mat4.ortho( mOrtho, -5, 5, 5, -5, 2, -200);
mat4.identity(mMove);
var rotMatrix = mat4.create();
mat4.identity(rotMatrix);
rotMatrix = mat4.fromYRotation(rotMatrix, yRot,rotMatrix);
rotMatrix = mat4.fromXRotation(rotMatrix, xRot,rotMatrix);
mat4.multiply(mMove, rotMatrix, mMove);
setMatrixUniforms();
gl.bindBuffer(gl.ARRAY_BUFFER, triangleVertexPositionBuffer);
gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, triangleVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, triangleColorBuffer);
gl.vertexAttribPointer(shaderProgram.vertexColorAttribute, triangleColorBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLES, 0, triangleVertexPositionBuffer.numItems);
yRot += 0.01;
xRot += 0.01;
}
As the name says, fromYRotation() initializes a matrix to a given rotation. Hence, you need two temporary matrices for the partial rotations, which you can then combine:
var rotMatrix = mat4.create();
var rotMatrixX = mat4.create();
var rotMatrixY = mat4.create();
mat4.fromYRotation(rotMatrixY, yRot);
mat4.fromXRotation(rotMatrixX, xRot);
mat4.multiply(rotMatrix, rotMatrixY, rotMatrixX);
And the reason why your blue face was behaving strangely, was the missing depth test. Enable it in your initialization method:
gl.enable(gl.DEPTH_TEST);
You dont need to use three matrices:
// you should do allocations outside of the renderloop
var rotMat = mat4.create();
// no need to set the matrix to identity as
// fromYRotation resets rotMats contents anyway
mat4.fromYRotation(rotMat, yRot);
mat4.rotateX(rotMat,xRot);

Calculate the vertex while creating terrain from heightmap using ThreeJs

I'm reading "create terrain from heightmap" example from ThreeJs Cookbook
This example load GrandCanyon: http://lh5.ggpht.com/_-B0hFoGrn-w/SvHiYk39yAI/AAAAAAAABOQ/6IGZwifUYGA/GrandCanyon.png
And create a 3D terrain: http://www.smartjava.org/tjscb/02-geometries-meshes/02.06-create-terrain-from-heightmap.html
There are some code pieces I can not understand:
// draw on canvas
ctx.drawImage(img, 0, 0);
var pixel = ctx.getImageData(0, 0, width, depth);
var geom = new THREE.Geometry;
var output = [];
for (var x = 0; x < depth; x++) {
for (var z = 0; z < width; z++) {
// get pixel
// since we're grayscale, we only need one element
var yValue = pixel.data[z * 4 + (depth * x * 4)] / heightOffset;
var vertex = new THREE.Vector3(x * spacingX, yValue, z * spacingZ);
geom.vertices.push(vertex);
}
}
why is yValue calculated with that value ? why don't we use var yValue = pixel.data[z * 4 + (depth * x )] or something like that ?
And do we really need spacingX and spacingZ ?
Source code is here: https://github.com/josdirksen/threejs-cookbook/blob/master/02-geometries-meshes/02.06-create-terrain-from-heightmap.html
Could you please help me ?
Thank you very much!
You don't NEED spacingX and spacingZ, no. You could adjust scale in other ways, like applying a scale matrix to the entire THREE.Geometry after you've populated the vertices. Up to you, really.
As fort the yValue, the indexing is to adjust for the way the data for the texture is laid out. There are four channels, usually RGBA, but in this case we only need one of them as a height.

Resources