OpenGL Point Functionality in WebGL - opengl-es

In OpenGL you can draw define points like this:
glBegin(GL_POINTS);
for(float theta=0, radius=60.0; radius>1.0; theta+=0.1, radius-=0.3){
glColor3f(radius/60.0,0.3,1-(radius/60.0));
glVertex2i(200+radius*cos(theta),200+radius*sin(theta));
}
glEnd();
How do you accomplish this same functionality in WebGL?

The code you wrote really doesn't do much except define some points. To do that in WebGL could do it like this
var colors = [];
var verts = [];
var theta=0
for(var radius=60.0; radius>1.0; radius-=0.3) {
colors.push(radius/60.0, 0.3, 1-(radius/60.0));
verts.push(200+radius*Math.cos(theta),200+radius*Math.sin(theta));
theta+=0.1;
}
var numPoints = colors.length / 3;
That would make 2 JavaScript arrays. You'd then need to put them to WebGLBuffers
var colorBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
After that though you need to write a shader and set it up. Shaders are a huge topic. For your particular data I'm guessing these shader would do
A vertex shader
uniform mat4 u_matrix;
attribute vec4 a_vertex;
attribute vec4 a_color;
varying vec4 v_color;
void main() {
// Set the size of the point
gl_PointSize = 3.0;
// multiply each vertex by a matrix.
gl_Position = u_matrix * a_vertex;
// pass the color to the fragment shader
v_color = a_color;
}
A fragment shader
precision mediump float;
varying vec4 v_color;
void main() {
gl_FragColor = v_color;
}
Next you need to initialize the shaders and parameters. I'm going to assume I put the shaders in script tags with ids "vshader" and "fshader" and use this boilerplate code to load them.
var program = createProgramFromScriptTags(gl, "vshader", "fshader");
gl.useProgram(program);
// look up the locations for the inputs to our shaders.
var u_matLoc = gl.getUniformLocation(program, "u_matrix");
var colorLoc = gl.getAttribLocation(program, "a_color");
var vertLoc = gl.getAttribLocation(program, "a_vertex");
// Set the matrix to some that makes 1 unit 1 pixel.
gl.uniformMatrix4fv(u_matLoc, false, [
2 / width, 0, 0, 0,
0, 2 / height, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
]);
// Tell the shader how to get data out of the buffers.
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
gl.vertexAttribPointer(colorLoc, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(colorLoc);
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.vertexAttribPointer(vertLoc, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vertLoc);
and finally draw the points
gl.drawArrays(gl.POINTS, 0, numPoints);
Here's a snippet
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
if (!gl) {
alert("no WebGL");
//return;
}
var colors = [];
var verts = [];
var theta=0
for(var radius=160.0; radius>1.0; radius-=0.3) {
colors.push(radius/160.0, 0.3, 1-(radius/160.0));
verts.push(radius*Math.cos(theta),radius*Math.sin(theta));
theta+=0.1;
}
var numPoints = colors.length / 3;
var colorBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
var program = twgl.createProgramFromScripts(gl, ["vshader", "fshader"]);
gl.useProgram(program);
// look up the locations for the inputs to our shaders.
var u_matLoc = gl.getUniformLocation(program, "u_matrix");
var colorLoc = gl.getAttribLocation(program, "a_color");
var vertLoc = gl.getAttribLocation(program, "a_vertex");
function draw() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.clearColor(1.0, 1.0, 1.0, 1.0);
// Set the matrix to some that makes 1 unit 1 pixel.
gl.uniformMatrix4fv(u_matLoc, false, [
2 / canvas.width, 0, 0, 0,
0, -2 / canvas.height, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
]);
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
gl.vertexAttribPointer(colorLoc, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(colorLoc);
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.vertexAttribPointer(vertLoc, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vertLoc);
gl.drawArrays(gl.POINTS, 0, numPoints);
requestAnimationFrame(draw, canvas);
}
draw();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/3.x/twgl.min.js"></script>
<script id="vshader" type="whatever">
uniform mat4 u_matrix;
attribute vec4 a_vertex;
attribute vec4 a_color;
varying vec4 v_color;
void main() {
// Set the size of the point
gl_PointSize = length(a_vertex) * 0.1;
// multiply each vertex by a matrix.
gl_Position = u_matrix * a_vertex;
// pass the color to the fragment shader
v_color = a_color;
}
</script>
<script id="fshader" type="whatever">
precision mediump float;
varying vec4 v_color;
void main() {
gl_FragColor = v_color;
}
</script>
<canvas id="c" width="400" height="400"></canvas>
you might find these WebGL tutorials helpful.

WebGL is based on OpenGL ES 2.0 (see here), which dropped immediate-mode support.
This specification describes an additional rendering context and support objects for the HTML 5 canvas element [CANVAS]. This context allows rendering using an API that conforms closely to the OpenGL ES 2.0 API.
You'll need to use vertex buffers to store vertex data. See here1 for a good explanation of how things work in retained mode. And there for a nice small example to get you started.
1: Kudos to whoever posted this here.

Related

GLSL vertex shader gl_Position for gl.POINTS with large gl.PointSize

I'm trying to render a lot of points with large gl_PointSize. Looks vertex shader will automatically discard gl.POINTS rendering if the position is out if[-1, 1].
In my case, if the point's position is a little bit out of [-1, 1], part of it still should be shown on the canvas. Any way to let the shader keep rendering point, when position out of [-1, 1]?
Here's the code to draw a point with position a little bit off the canvas. But it is expect to show nearly half of it on canvas.
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
var positions = [
-1.0001, -1
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
....
gl.enableVertexAttribArray(positionAttributeLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.vertexAttribPointer(positionAttributeLocation, size, type, normalize, stride, offset);
gl.drawArrays(gl.POINTS, 0, 1);
In my vertex shader, I have
....
gl_PointSize = 32.0;
....
I am unable to repeat your issue. Drawing outside the canvas with POINTS seems to work for me
From the OpenGL ES 2.0 spec section 2.13
If the primitive under consideration is a point, then clipping discards it if it lies outside the near or far clip plane; otherwise it is passed unchanged.
Are you seeing different results?
Of course be aware that both WebGL and OpenGL ES 2.0 are only required to support a max point size of 1.0. It looks like most support at least 60
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
void main() {
gl_Position = position;
gl_PointSize = 64.0;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(1, 0, 0, 1);
}
`;
const program = twgl.createProgram(gl, [vs, fs]);
const positionLoc = gl.getAttribLocation(program, 'position');
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1.001, -1.001,
1.001, -1.001,
-1.001, 1.001,
1.001, 1.001,
0, 0,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
gl.useProgram(program);
gl.drawArrays(gl.POINTS, 0, 5);
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
Update
According to this thread this is an issue with OpenGL vs OpenGL ES
The OpenGL spec says
If the primitive under consideration is a point, then clipping passes it unchanged if it lies within the clip volume; otherwise, it is discarded.
It's subtly different from the OpenGL ES spec. Effectively OpenGL clips the points, OpenGL ES does not. Even stranger though is that many OpenGL drivers don't clip like the spec claims they are supposed to.
The short version of that means you can't count on whether or not the points are not clipped in WebGL so you might want to consider drawing your own quads instead. You can make your vertex shader expand the quads by whatever value you're currently using for gl_PointSize and either use GPU instancing or manual instancing to draw lots of points. For each POINT position if you're using GPU instancing then there is one position per point just like it is now. If you're using manual instancing then you need to repeat the position for each vertex or add a point Id and put your positions in a texture if you don't want to repeat the positions in the attributes.
Example of using GPU instancing
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const ext = gl.getExtension('ANGLE_instanced_arrays');
if (!ext) {
alert('need ANGLE_instanced_arrays');
}
const vs = `
attribute vec4 position; // center point
attribute vec2 cornerPosition; // the corners (-0.5 to 0.5)
uniform vec2 resolution;
varying vec3 pointCoord; // only if you need gl_PointCoord substitute
void main() {
// do the normal thing (can mult by matrix or whatever here
gl_Position = position;
float pointSize = 64.0;
// -- point emulation
gl_Position.xy += cornerPosition * (pointSize * 2.0 - 1.0) /
resolution * gl_Position.w;
// only if you need gl_PointCoord substitute
pointCoord = vec3(cornerPosition * 0.5, gl_Position.z);
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(1, 0, 0, 1);
}
`;
const programInfo = twgl.createProgram(gl, [vs, fs]);
const program = twgl.createProgram(gl, [vs, fs]);
const positionLoc = gl.getAttribLocation(program, 'position');
const cornerPositionLoc = gl.getAttribLocation(program, 'cornerPosition');
const resolutionLoc = gl.getUniformLocation(program, 'resolution');
{
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1.001, -1.001,
1.001, -1.001,
-1.001, 1.001,
1.001, 1.001,
0, 0,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 2, gl.FLOAT, false, 0, 0);
ext.vertexAttribDivisorANGLE(positionLoc, 1);
}
{
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-0.5, -0.5,
0.5, -0.5,
-0.5, 0.5,
-0.5, 0.5,
0.5, -0.5,
0.5, 0.5,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(cornerPositionLoc);
gl.vertexAttribPointer(cornerPositionLoc, 2, gl.FLOAT, false, 0, 0);
}
gl.useProgram(program);
gl.uniform2f(resolutionLoc, gl.canvas.width, gl.canvas.height);
ext.drawArraysInstancedANGLE(gl.TRIANGLES, 0, 6, 5); // 5 points, 6 verts per point
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
You should see this create the same image as above. It has the advantage that it is not subject to the point size limit and works around the driver bugs. Test on your own hardware and see if it solves your issue.
Just remember if you're not using vertex array object then you probably need to reset the attribute divisor back to zero before trying to render something else.
ext.vertexAttribDivisorANGLE(positionLoc, 0);
One more example just to test
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const ext = gl.getExtension('ANGLE_instanced_arrays');
if (!ext) {
alert('need ANGLE_instanced_arrays');
}
const vs = `
attribute vec4 position; // center point
attribute vec2 cornerPosition; // the corners (-0.5 to 0.5)
uniform vec2 resolution;
uniform mat4 matrix;
varying vec3 pointCoord; // only if you need gl_PointCoord substitute
void main() {
// do the normal thing (can mult by matrix or whatever here
gl_Position = matrix * position;
float pointSize = 20.0 / gl_Position.w;
// -- point emulation
gl_Position.xy += cornerPosition * (pointSize * 2.0 - 1.0) /
resolution * gl_Position.w;
// only if you need gl_PointCoord substitute
pointCoord = vec3(cornerPosition * 0.5, gl_Position.z);
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(1, 0, 0, 1);
}
`;
const programInfo = twgl.createProgram(gl, [vs, fs]);
const program = twgl.createProgram(gl, [vs, fs]);
const positionLoc = gl.getAttribLocation(program, 'position');
const cornerPositionLoc = gl.getAttribLocation(program, 'cornerPosition');
const resolutionLoc = gl.getUniformLocation(program, 'resolution');
const matrixLoc = gl.getUniformLocation(program, 'matrix');
const numPoints = 100;
{
// adapted from http://stackoverflow.com/a/26127012/128511
function fibonacciSphere(samples, i) {
const rnd = 1.;
const offset = 2. / samples;
const increment = Math.PI * (3. - Math.sqrt(5.));
// for i in range(samples):
const y = ((i * offset) - 1.) + (offset / 2.);
const r = Math.sqrt(1. - Math.pow(y ,2.));
const phi = (i + rnd % samples) * increment;
const x = Math.cos(phi) * r;
const z = Math.sin(phi) * r;
return [x, y, z];
}
const positions = [];
for (let i = 0; i < numPoints; ++i) {
positions.push(...fibonacciSphere(numPoints, i));
}
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLoc);
gl.vertexAttribPointer(positionLoc, 3, gl.FLOAT, false, 0, 0);
ext.vertexAttribDivisorANGLE(positionLoc, 1);
}
{
const buf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-0.5, -0.5,
0.5, -0.5,
-0.5, 0.5,
-0.5, 0.5,
0.5, -0.5,
0.5, 0.5,
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(cornerPositionLoc);
gl.vertexAttribPointer(cornerPositionLoc, 2, gl.FLOAT, false, 0, 0);
}
function render(ms) {
const secs = ms * 0.001;
const mat = m4.perspective(
60 * Math.PI / 180,
gl.canvas.clientWidth / gl.canvas.clientHeight,
0.1,
100);
m4.translate(mat, [0, 0, -2.11 + Math.sin(secs)], mat);
m4.rotateX(mat, secs, mat);
m4.rotateY(mat, secs * 0.93, mat);
gl.useProgram(program);
gl.uniform2f(resolutionLoc, gl.canvas.width, gl.canvas.height);
gl.uniformMatrix4fv(matrixLoc, false, mat);
// 6 verts per point
ext.drawArraysInstancedANGLE(gl.TRIANGLES, 0, 6, numPoints);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>

GLSL: Points disappear when using gl-matrix projection + model view matrices

I'm working on learning WebGL and am confused by a very elementary problem to do with model-view and projection matrices.
In the snippet below, if I use the following line to set the point positions, the points render:
gl_Position = vec4(aVertexPosition, 1.0);
However, if I try to use the projection matrix and the model view matrix, the points disappear:
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
If uPMatrix and uMVMatrix are identity matrices (as I thought they should be, as I'm not intentionally manipulating them), then the gl_Position value should be set and the points should be rendered (the following works):
gl_Position = mat4(1.0) * mat4(1.0) * vec4(aVertexPosition, 1.0);
However, logging the uPMatrix and uMVMatrix values in the render() loop, I can see that the projection matrix (uPMatrix) values are not an identity matrix! Does anyone know why this is the case? Full snippet below:
var canvas,
gl,
fs,
vs,
glProgram,
vertexBuffer,
vertexIndexBuffer,
colorBuffer,
positionVal,
colorVal,
points = [],
mvMatrix = mat4.create(),
pMatrix = mat4.create(),
angle = 0.00;
function initWebgl() {
canvas = document.querySelector('canvas');
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
try {
gl = canvas.getContext('webgl')
} catch(err) {
alert('Your browser does not support Webgl')
}
// set the default background color
gl.clearColor(0.9, 0.9, 0.9, 1.0)
gl.clear(gl.COLOR_BUFFER_BIT|gl.DEPTH_BUFFER_BIT);
gl.enable(gl.DEPTH_TEST);
}
function initCamera() {
// set camera area, fov, near clip, far clip, and translation
gl.viewport(0, 0, canvas.width, canvas.height)
mat4.perspective(45, canvas.width/canvas.height, 0.1, 100.0, pMatrix);
mat4.identity(mvMatrix);
mat4.translate(mvMatrix, [0, 0, 0]);
}
function initShaders() {
vs = buildShader('#shader-vs', gl.VERTEX_SHADER)
fs = buildShader('#shader-fs', gl.FRAGMENT_SHADER)
}
function buildShader(selector, type) {
var src = document.querySelector(selector).innerHTML;
var shader = gl.createShader(type)
gl.shaderSource(shader, src)
gl.compileShader(shader)
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.warn('Shader error', selector, gl.getShaderInfoLog(shader))
}
return shader;
}
function initProgram() {
glProgram = gl.createProgram()
gl.attachShader(glProgram, vs)
gl.attachShader(glProgram, fs)
gl.linkProgram(glProgram)
if (!gl.getProgramParameter(glProgram, gl.LINK_STATUS)) {
console.warn('Program link error')
}
gl.useProgram(glProgram)
}
function updatePositions() {
mat4.identity(mvMatrix)
//mat4.translate(mvMatrix, [0.0, 0.0, 0.0])
//mat4.rotate(mvMatrix, angle, [0.0, 0.0, 0.0])
angle += 0.01;
}
function getBuffers() {
// vertex buffer
points = [
-0.5, 0.5, 0.0,
0.0, 0.0, 0.0,
-0.5, -0.5, 0.0,
]
var vertexData = new Float32Array(points);
vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexData, gl.STATIC_DRAW)
}
function drawBuffers() {
// identify and bind vertex position attributes
var aVertexPosition = gl.getAttribLocation(glProgram, 'aVertexPosition')
gl.enableVertexAttribArray(aVertexPosition)
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer)
gl.vertexAttribPointer(aVertexPosition, 3, gl.FLOAT, false, 0.0, 0.0)
// draw the data
gl.drawArrays(gl.POINTS, 0, 3)
}
function getMatrixUniforms() {
glProgram.pMatrixUniform = gl.getUniformLocation(glProgram, 'uPMatrix')
glProgram.mvMatrixUniform = gl.getUniformLocation(glProgram, 'uMVMatrix')
}
function setMatrixUniforms() {
gl.uniformMatrix4fv(glProgram.pMatrixUniform, false, pMatrix)
gl.uniformMatrix4fv(glProgram.mvMatrixUniform, false, mvMatrix)
}
function render() {
updatePositions()
drawBuffers()
setMatrixUniforms()
requestAnimationFrame(render, canvas)
}
initWebgl()
initCamera()
initShaders()
initProgram()
getMatrixUniforms()
getBuffers()
render()
* {
margin: 0;
padding: 0;
}
body, html {
height: 100%;
width: 100%;
overflow: hidden;
background: skyblue;
}
<script src="https://rawgit.com/duhaime/955402641534b89babd41c8de8bc91f6/raw/5d86d54f7237f4cf2b206dcf0a3d453ba95acd1d/gl-matrix.js"></script>
<script id='shader-vs' type='x-shader/x-vertex'>
attribute vec3 aVertexPosition;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec3 vPos;
void main() {
gl_PointSize = 5.0;
// this works
gl_Position = vec4(aVertexPosition, 1.0);
// this makes all points disappear
//gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vPos = vec3(aVertexPosition);
}
</script>
<script id='shader-fs' type='x-shader/x-fragment'>
varying highp vec3 vPos;
void main() {
highp vec3 blue = vec3(1.0, 1.0, 1.0);
highp vec3 red = vec3(1.0, 0.0, 0.0);
gl_FragColor = vec4(mix(blue, red, 1.0), 1.0);
}
</script>
<canvas />
The gl-matrix library initializes matrices with zeros, and I was only transforming the model-view matrix, not the projection matrix, to an identity matrix. I should have been initializing the projection matrix as an identity matrix:
function initCamera() {
// set camera area, fov, near clip, far clip, and translation
gl.viewport(0, 0, canvas.width, canvas.height)
mat4.perspective(45, canvas.width/canvas.height, 0.1, 100.0, pMatrix);
mat4.identity(mvMatrix);
mat4.identity(pMatrix);
mat4.translate(mvMatrix, [0, 0, 0]);
}
[Is it customary for users to initialize their mv and p matrices to identity matrices?]

Is there a way of handling Framebuffer Objects (FBO's) in Three.js?

I'm learning to manipulate postion values in the GPU using textures a.k.a., Framebuffer Objects (FBO's), while using Three.js. I've been using this question as a starting place, and this example by #mrdoob and #zz85, as well as this old thread.
However, the examples are quite dated (examples use three.js rev.55 vs. current rev.80), so I'm needing make a fair number of revisions and reworks to the code. Before I get in too deep I wanted to pause and ask if any way of handling FBO's has already been written into the Three.js code base, or if I've overlooked an updated script somewhere. Thanks!
If not, I'll do my darnedest and perhaps post the result here if it seems generally useful.
In case you like to "pop the hood", I wanted to share an absolutely minimal example of a FBO scene in THREE.js. Hopefully the inline comments help spell out how this comes together:
// specify the container where we'll render the scene
var elem = document.querySelector('body'),
elemW = elem.clientWidth,
elemH = elem.clientHeight
// generate a scene object
var scene = new THREE.Scene();
// generate a camera
var camera = new THREE.PerspectiveCamera(75, elemW/elemH, 0.001, 100);
// generate a renderer
var renderer = new THREE.WebGLRenderer({antialias: true, alpha: true});
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(elemW, elemH);
elem.appendChild(renderer.domElement);
// generate controls
var controls = new THREE.TrackballControls(camera, renderer.domElement);
// position camera and controls
camera.position.set(0.5, 0.5, -5);
controls.target = new THREE.Vector3(0.5, 0.5, 0);
/**
* FBO
**/
// verify browser agent supports "frame buffer object" features
gl = renderer.getContext();
if (!gl.getExtension('OES_texture_float') ||
gl.getParameter(gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS) == 0) {
alert(' * Cannot create FBO :(');
}
// set initial positions of `w*h` particles
var w = h = 256,
i = 0,
data = new Float32Array(w*h*3);
for (var x=0; x<w; x++) {
for (var y=0; y<h; y++) {
data[i++] = x/w;
data[i++] = y/h;
data[i++] = 0;
}
}
// feed those positions into a data texture
var dataTex = new THREE.DataTexture(data, w, h, THREE.RGBFormat, THREE.FloatType);
dataTex.minFilter = THREE.NearestFilter;
dataTex.magFilter = THREE.NearestFilter;
dataTex.needsUpdate = true;
// add the data texture with positions to a material for the simulation
var simMaterial = new THREE.RawShaderMaterial({
uniforms: { posTex: { type: 't', value: dataTex }, },
vertexShader: document.querySelector('#sim-vs').textContent,
fragmentShader: document.querySelector('#sim-fs').textContent,
});
// delete dataTex; it isn't used after initializing point positions
delete dataTex;
THREE.FBO = function(w, simMat) {
this.scene = new THREE.Scene();
this.camera = new THREE.OrthographicCamera(-w/2, w/2, w/2, -w/2, -1, 1);
this.scene.add(new THREE.Mesh(new THREE.PlaneGeometry(w, w), simMat));
};
// create a scene where we'll render the positional attributes
var fbo = new THREE.FBO(w, simMaterial);
// create render targets a + b to which the simulation will be rendered
var renderTargetA = new THREE.WebGLRenderTarget(w, h, {
wrapS: THREE.RepeatWrapping,
wrapT: THREE.RepeatWrapping,
minFilter: THREE.NearestFilter,
magFilter: THREE.NearestFilter,
format: THREE.RGBFormat,
type: THREE.FloatType,
stencilBuffer: false,
});
// a second render target lets us store input + output positional states
renderTargetB = renderTargetA.clone();
// render the positions to the render targets
renderer.render(fbo.scene, fbo.camera, renderTargetA, false);
renderer.render(fbo.scene, fbo.camera, renderTargetB, false);
// store the uv attrs; each is x,y and identifies a given point's
// position data within the positional texture; must be scaled 0:1!
var geo = new THREE.BufferGeometry(),
arr = new Float32Array(w*h*3);
for (var i=0; i<arr.length; i++) {
arr[i++] = (i%w)/w;
arr[i++] = Math.floor(i/w)/h;
arr[i++] = 0;
}
geo.addAttribute('position', new THREE.BufferAttribute(arr, 3, true))
// create material the user sees
var material = new THREE.RawShaderMaterial({
uniforms: {
posMap: { type: 't', value: null }, // `posMap` is set each render
},
vertexShader: document.querySelector('#ui-vert').textContent,
fragmentShader: document.querySelector('#ui-frag').textContent,
transparent: true,
});
// add the points the user sees to the scene
var mesh = new THREE.Points(geo, material);
scene.add(mesh);
function render() {
// at the start of the render block, A is one frame behind B
var oldA = renderTargetA; // store A, the penultimate state
renderTargetA = renderTargetB; // advance A to the updated state
renderTargetB = oldA; // set B to the penultimate state
// pass the updated positional values to the simulation
simMaterial.uniforms.posTex.value = renderTargetA.texture;
// run a frame and store the new positional values in renderTargetB
renderer.render(fbo.scene, fbo.camera, renderTargetB, false);
// pass the new positional values to the scene users see
material.uniforms.posMap.value = renderTargetB.texture;
// render the scene users see as normal
renderer.render(scene, camera);
controls.update();
requestAnimationFrame(render);
};
render();
html, body { width: 100%; height: 100%; background: #000; }
body { margin: 0; overflow: hidden; }
canvas { width: 100%; height: 100%; }
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/101/three.min.js"></script>
<script src="https://threejs.org/examples/js/controls/TrackballControls.js"></script>
<!-- The simulation shaders update positional attributes -->
<script id='sim-vs' type='x-shader/x-vert'>
precision mediump float;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
attribute vec2 uv; // x,y offsets of each point in texture
attribute vec3 position;
varying vec2 vUv;
void main() {
vUv = vec2(uv.x, 1.0 - uv.y);
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
</script>
<script id='sim-fs' type='x-shader/x-frag'>
precision mediump float;
uniform sampler2D posTex;
varying vec2 vUv;
void main() {
// read the supplied x,y,z vert positions
vec3 pos = texture2D(posTex, vUv).xyz;
// update the positional attributes here!
pos.x += cos(pos.y) / 100.0;
pos.y += tan(pos.x) / 100.0;
// render the new positional attributes
gl_FragColor = vec4(pos, 1.0);
}
</script>
<!-- The ui shaders render what the user sees -->
<script id='ui-vert' type='x-shader/x-vert'>
precision mediump float;
uniform sampler2D posMap; // contains positional data read from sim-fs
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
attribute vec2 position;
void main() {
// read this particle's position, which is stored as a pixel color
vec3 pos = texture2D(posMap, position.xy).xyz;
// project this particle
vec4 mvPosition = modelViewMatrix * vec4(pos, 1.0);
gl_Position = projectionMatrix * mvPosition;
// set the size of each particle
gl_PointSize = 0.3 / -mvPosition.z;
}
</script>
<script id='ui-frag' type='x-shader/x-frag'>
precision mediump float;
void main() {
gl_FragColor = vec4(0.0, 0.5, 1.5, 1.0);
}
</script>
I just discovered that there is a way in Three.js to handle Frame Buffer Objects (FBOs) to calculate things like changing position data using the GPU; it is called the THREE.GPUComputationRenderer. There is an excellent flock of birds example here that demonstrates how to pass a number of variables by rendering their values into textures to be used in the final shader.

Use a GLSL shader built in a sandbox with my WebGL code

I'm very new to shaders and have modified a shader on glsl.heroku.com to suit my needs, see it here - this is what I want my 3D Object to look like: Demo. Now my question is how do I import it into my project using native webgl (View my broken code here). The shader code is as follows:
precision mediump float;
varying vec2 surfacePosition;
void main( void ) {
float intensity = 3.0; // Lower number = more 'glow'
vec3 light_color = vec3(0.4, 0.3, 0.1); // RGB, proportional values, higher increases intensity
float master_scale = 0.01; // Change the size of the effect
float c = master_scale/(length(surfacePosition) * length(surfacePosition));
gl_FragColor = vec4(vec3(pow(c, intensity))*light_color, 1.0);
}
That's the fragment shader, so how do I build the correct vertex shader to go with it and pass through the surfacePosition variable (and what is the surface position variable???) using my broken code in the link above.
Without looking at the source I'm going to guess surfacePosition is just the vertex position * some matrix.
Vertex shader
attribute vec4 a_position;
varying vec2 surfacePosition;
uniform mat4 mat;
void main() {
gl_Position = a_position;
surfacePosition = (a_position * mat).xy;
}
setup
var verts = [
1, 1,
-1, 1,
-1, -1,
1, 1,
-1, -1,
1, -1,
];
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
...
var loc = gl.getUniformLocation(program, "mat");
var mat = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
];
mat[5] = canvas.height/canvas.width;
gl.uniformMatrix4fv(loc, false, mat);
gl.drawArrays(gl.TRIANGLES, 0, 6);
The matrix lets you adjust the rendering aspect based on the size of the canvas as well as implement pan and zoom. See this article.
here's a snippet
var countElem = document.getElementById("t");
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
var program = twgl.createProgramFromScripts(
gl, ["vshader", "fshader"], ["a_position"]);
gl.useProgram(program);
var verts = [
1, 1,
-1, 1,
-1, -1,
1, 1,
-1, -1,
1, -1,
];
var vertBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(verts), gl.STATIC_DRAW);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
var loc = gl.getUniformLocation(program, "mat");
var mat = [
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1,
];
// index 0 = x scale (ie, aspect & zoom)
// index 5 = y scale (ie, aspect & zoom)
// index 12 = x offset (pan x)
// index 13 = y offset (pan y)
mat[5] = canvas.height/canvas.width;
gl.uniformMatrix4fv(loc, false, mat);
gl.drawArrays(gl.TRIANGLES, 0, 6);
<script src="https://twgljs.org/dist/3.x/twgl.min.js"></script>
<script id="vshader" type="whatever">
attribute vec4 a_position;
varying vec2 surfacePosition;
uniform mat4 mat;
void main() {
gl_Position = a_position;
surfacePosition = (a_position * mat).xy;
}
</script>
<script id="fshader" type="whatever">
precision mediump float;
varying vec2 surfacePosition;
void main() {
float intensity = 3.0; // Lower number = more 'glow'
vec3 light_color = vec3(0.4, 0.3, 0.1); // RGB, proportional values, higher increases intensity
float master_scale = 0.01; // Change the size of the effect
float c = master_scale/(length(surfacePosition) * length(surfacePosition));
gl_FragColor = vec4(vec3(pow(c, intensity))*light_color, 1.0);
}
</script>
<canvas id="c"></canvas>

Drawing an image using WebGL

Please could anyone explain how to draw an image on a WebGL canvas? At the moment, on a regular '2d' canvas, I'm using this:
var canvas = document.getElementById("canvas");
var cxt = canvas.getContext('2d');
img.onload = function() {
cxt.drawImage(img, 0, 0, canvas.width, canvas.height);
}
img.src = "data:image/jpeg;base64," + base64var;
With WebGL it seems you have to use textures, etc. Can anyone explain how I would adapt this code for WebGL? Thanks for the help! :)
If it was up to me I'd do it with a unit quad and a matrix like this
Given these shaders
vertex shader
attribute vec2 a_position;
uniform mat3 u_matrix;
varying vec2 v_texCoord;
void main() {
gl_Position = vec4(u_matrix * vec3(a_position, 1), 1);
// because we're using a unit quad we can just use
// the same data for our texcoords.
v_texCoord = a_position;
}
fragment shader
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
</script>
I'd create a unit quad and then fill out a 3x3 matrix to translate, rotate, and scale it where I needed it to be
var dstX = 20;
var dstY = 30;
var dstWidth = 64;
var dstHeight = 64;
// convert dst pixel coords to clipspace coords
var clipX = dstX / gl.canvas.width * 2 - 1;
var clipY = dstY / gl.canvas.height * -2 + 1;
var clipWidth = dstWidth / gl.canvas.width * 2;
var clipHeight = dstHeight / gl.canvas.height * -2;
// build a matrix that will stretch our
// unit quad to our desired size and location
gl.uniformMatrix3fv(u_matrixLoc, false, [
clipWidth, 0, 0,
0, clipHeight, 0,
clipX, clipY, 1,
]);
"use strict";
window.onload = main;
function main() {
var image = new Image();
// using a dataURL because stackoverflow
image.src = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAAM9JREFUeNrs2+EJgzAQBtBccTIXcQ8HcA8XcbV0gjZiONKS9/1VAnl43KExaq2lJxHRt0B/4tvF1v5eZfIAAAAAAICZE60+2erz53EN3cC2r11zghIAAAAAAAAzzwGllJ/u89lzghIAAAAAAAATZ8nus71zRPb6SgAAAAAAAJgDnif7fUH2+koAAAAAAACYA/Jy4/u9OUAJAAAAAACAMYkb9/z1OcHzuJwTBAAAAAAAAB7OAa0+v+3r0P8GW33eEwAAAAAAAAB8zBsAAP//AwB6eysS2pA5KAAAAABJRU5ErkJggg=="; // MUST BE SAME DOMAIN!!!
image.onload = function() {
render(image);
}
}
function render(image) {
// Get A WebGL context
var canvas = document.getElementById("c");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
// look up uniform locations
var u_imageLoc = gl.getUniformLocation(program, "u_image");
var u_matrixLoc = gl.getUniformLocation(program, "u_matrix");
// provide texture coordinates for the rectangle.
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
// Upload the image into the texture.
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
var dstX = 20;
var dstY = 30;
var dstWidth = 64;
var dstHeight = 64;
// convert dst pixel coords to clipspace coords
var clipX = dstX / gl.canvas.width * 2 - 1;
var clipY = dstY / gl.canvas.height * -2 + 1;
var clipWidth = dstWidth / gl.canvas.width * 2;
var clipHeight = dstHeight / gl.canvas.height * -2;
// build a matrix that will stretch our
// unit quad to our desired size and location
gl.uniformMatrix3fv(u_matrixLoc, false, [
clipWidth, 0, 0,
0, clipHeight, 0,
clipX, clipY, 1,
]);
// Draw the rectangle.
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
canvas {
border: 1px solid black;
}
<script src="//webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<canvas id="c"></canvas>
<!-- vertex shader -->
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec2 a_position;
uniform vec2 u_resolution;
uniform mat3 u_matrix;
varying vec2 v_texCoord;
void main() {
gl_Position = vec4(u_matrix * vec3(a_position, 1), 1);
v_texCoord = a_position;
}
</script>
<!-- fragment shader -->
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
</script>
Here's some articles that will explain the the matrix math
Here is a self-contained script that does what you want, if that helps. All you need to provide is image.jpg.
<canvas id="cvs" width="1024" height="768"></canvas>
<script>
var img, tex, vloc, tloc, vertexBuff, texBuff;
var cvs3d = document.getElementById('cvs');
var ctx3d = cvs3d.getContext('experimental-webgl');
var uLoc;
// create shaders
var vertexShaderSrc =
"attribute vec2 aVertex;" +
"attribute vec2 aUV;" +
"varying vec2 vTex;" +
"uniform vec2 pos;" +
"void main(void) {" +
" gl_Position = vec4(aVertex + pos, 0.0, 1.0);" +
" vTex = aUV;" +
"}";
var fragmentShaderSrc =
"precision highp float;" +
"varying vec2 vTex;" +
"uniform sampler2D sampler0;" +
"void main(void){" +
" gl_FragColor = texture2D(sampler0, vTex);"+
"}";
var vertShaderObj = ctx3d.createShader(ctx3d.VERTEX_SHADER);
var fragShaderObj = ctx3d.createShader(ctx3d.FRAGMENT_SHADER);
ctx3d.shaderSource(vertShaderObj, vertexShaderSrc);
ctx3d.shaderSource(fragShaderObj, fragmentShaderSrc);
ctx3d.compileShader(vertShaderObj);
ctx3d.compileShader(fragShaderObj);
var progObj = ctx3d.createProgram();
ctx3d.attachShader(progObj, vertShaderObj);
ctx3d.attachShader(progObj, fragShaderObj);
ctx3d.linkProgram(progObj);
ctx3d.useProgram(progObj);
ctx3d.viewport(0, 0, 1024, 768);
vertexBuff = ctx3d.createBuffer();
ctx3d.bindBuffer(ctx3d.ARRAY_BUFFER, vertexBuff);
ctx3d.bufferData(ctx3d.ARRAY_BUFFER, new Float32Array([-1, 1, -1, -1, 1, -1, 1, 1]), ctx3d.STATIC_DRAW);
texBuff = ctx3d.createBuffer();
ctx3d.bindBuffer(ctx3d.ARRAY_BUFFER, texBuff);
ctx3d.bufferData(ctx3d.ARRAY_BUFFER, new Float32Array([0, 1, 0, 0, 1, 0, 1, 1]), ctx3d.STATIC_DRAW);
vloc = ctx3d.getAttribLocation(progObj, "aVertex");
tloc = ctx3d.getAttribLocation(progObj, "aUV");
uLoc = ctx3d.getUniformLocation(progObj, "pos");
img = new Image();
img.src = "image.jpg";
img.onload = function(){
tex = ctx3d.createTexture();
ctx3d.bindTexture(ctx3d.TEXTURE_2D, tex);
ctx3d.texParameteri(ctx3d.TEXTURE_2D, ctx3d.TEXTURE_MIN_FILTER, ctx3d.NEAREST);
ctx3d.texParameteri(ctx3d.TEXTURE_2D, ctx3d.TEXTURE_MAG_FILTER, ctx3d.NEAREST);
ctx3d.texImage2D(ctx3d.TEXTURE_2D, 0, ctx3d.RGBA, ctx3d.RGBA, ctx3d.UNSIGNED_BYTE, this);
ctx3d.enableVertexAttribArray(vloc);
ctx3d.bindBuffer(ctx3d.ARRAY_BUFFER, vertexBuff);
ctx3d.vertexAttribPointer(vloc, 2, ctx3d.FLOAT, false, 0, 0);
ctx3d.enableVertexAttribArray(tloc);
ctx3d.bindBuffer(ctx3d.ARRAY_BUFFER, texBuff);
ctx3d.bindTexture(ctx3d.TEXTURE_2D, tex);
ctx3d.vertexAttribPointer(tloc, 2, ctx3d.FLOAT, false, 0, 0);
ctx3d.drawArrays(ctx3d.TRIANGLE_FAN, 0, 4);
};
</script>

Resources