Canvas/WebGL 2D tilemap grid artifacts - html5-canvas

I am creating a simple 2D web game that works with your typical tile map and sprites.
The twist is that I want smooth camera controls, both translation and scaling (zooming).
I tried using both the Canvas 2D API, and WebGL, and in both I simply cannot avoid the bleeding grid line artifacts, while also supporting zooming properly.
If it matters, all of my tiles are of size 1, and scaled to whatever size is needed, all of their coordinates are integers, and I am using a texture atlas.
Here's an example picture using my WebGL code, where the thin red/white lines are not wanted.
I remember writing sprite tile maps years ago with desktop GL, ironically using similar code (more or less equivalent to what I could do with WebGL 2), and it never had any of these issues.
I am considering to try DOM based elements next, but I fear it will not feel or look smooth.

One solution is to draw the tiles in the fragment shader
So you have your map, say a Uint32Array. Break it down into units of 4 bytes each. First 2 bytes are the tile ID, last byte is flags
As you walk across the quad for each pixel you lookup in the tilemap texture which tile it is, then you use that to compute UV coordinates to get pixels from that tile out of the texture of tiles. If your texture of tiles has gl.NEAREST sampling set then you'll never get any bleeding
Note that unlike traditional tilemaps the ids of each tile is the X,Y coordinate of the tile in the tile texture. In other words if your tile texture has 16x8 tiles across and you want your map to show the tile 7 over and 4 down then the id of that tile is 7,4 (first byte 7, second byte 4) where as in a traditional CPU based system the tile id would probably be 4*16+7 or 71 (the 71st tile). You could add code to the shader to do more traditional indexing but since the shader has to convert the id into 2d texture coords it just seemed easier to use 2d ids.
const vs = `
attribute vec4 position;
//attribute vec4 texcoord; - since position is a unit square just use it for texcoords
uniform mat4 u_matrix;
uniform mat4 u_texMatrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * position;
// v_texcoord = (u_texMatrix * texccord).xy;
v_texcoord = (u_texMatrix * position).xy;
}
`;
const fs = `
precision highp float;
uniform sampler2D u_tilemap;
uniform sampler2D u_tiles;
uniform vec2 u_tilemapSize;
uniform vec2 u_tilesetSize;
varying vec2 v_texcoord;
void main() {
vec2 tilemapCoord = floor(v_texcoord);
vec2 texcoord = fract(v_texcoord);
vec2 tileFoo = fract((tilemapCoord + vec2(0.5, 0.5)) / u_tilemapSize);
vec4 tile = floor(texture2D(u_tilemap, tileFoo) * 256.0);
float flags = tile.w;
float xflip = step(128.0, flags);
flags = flags - xflip * 128.0;
float yflip = step(64.0, flags);
flags = flags - yflip * 64.0;
float xySwap = step(32.0, flags);
if (xflip > 0.0) {
texcoord = vec2(1.0 - texcoord.x, texcoord.y);
}
if (yflip > 0.0) {
texcoord = vec2(texcoord.x, 1.0 - texcoord.y);
}
if (xySwap > 0.0) {
texcoord = texcoord.yx;
}
vec2 tileCoord = (tile.xy + texcoord) / u_tilesetSize;
vec4 color = texture2D(u_tiles, tileCoord);
if (color.a <= 0.1) {
discard;
}
gl_FragColor = color;
}
`;
const tileWidth = 32;
const tileHeight = 32;
const tilesAcross = 8;
const tilesDown = 4;
const m4 = twgl.m4;
const gl = document.querySelector('#c').getContext('webgl');
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// gl.createBuffer, bindBuffer, bufferData
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
position: {
numComponents: 2,
data: [
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1,
],
},
});
function r(min, max) {
if (max === undefined) {
max = min;
min = 0;
}
return min + (max - min) * Math.random();
}
// make some tiles
const ctx = document.createElement('canvas').getContext('2d');
ctx.canvas.width = tileWidth * tilesAcross;
ctx.canvas.height = tileHeight * tilesDown;
ctx.font = "bold 24px sans-serif";
ctx.textAlign = "center";
ctx.textBaseline = "middle";
const f = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ~';
for (let y = 0; y < tilesDown; ++y) {
for (let x = 0; x < tilesAcross; ++x) {
const color = `hsl(${r(360) | 0},${r(50,100)}%,50%)`;
ctx.fillStyle = color;
const tx = x * tileWidth;
const ty = y * tileHeight;
ctx.fillRect(tx, ty, tileWidth, tileHeight);
ctx.fillStyle = "#FFF";
ctx.fillText(f.substr(y * 8 + x, 1), tx + tileWidth * .5, ty + tileHeight * .5);
}
}
document.body.appendChild(ctx.canvas);
const tileTexture = twgl.createTexture(gl, {
src: ctx.canvas,
minMag: gl.NEAREST,
});
// make a tilemap
const mapWidth = 400;
const mapHeight = 300;
const tilemap = new Uint32Array(mapWidth * mapHeight);
const tilemapU8 = new Uint8Array(tilemap.buffer);
const totalTiles = tilesAcross * tilesDown;
for (let i = 0; i < tilemap.length; ++i) {
const off = i * 4;
// mostly tile 9
const tileId = r(10) < 1
? (r(totalTiles) | 0)
: 9;
tilemapU8[off + 0] = tileId % tilesAcross;
tilemapU8[off + 1] = tileId / tilesAcross | 0;
const xFlip = r(2) | 0;
const yFlip = r(2) | 0;
const xySwap = r(2) | 0;
tilemapU8[off + 3] =
(xFlip ? 128 : 0) |
(yFlip ? 64 : 0) |
(xySwap ? 32 : 0) ;
}
const mapTexture = twgl.createTexture(gl, {
src: tilemapU8,
width: mapWidth,
minMag: gl.NEAREST,
});
function ease(t) {
return Math.cos(t) * .5 + .5;
}
function lerp(a, b, t) {
return a + (b - a) * t;
}
function easeLerp(a, b, t) {
return lerp(a, b, ease(t));
}
function render(time) {
time *= 0.001; // convert to seconds;
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0, 1, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
const mat = m4.ortho(0, gl.canvas.width, gl.canvas.height, 0, -1, 1);
m4.scale(mat, [gl.canvas.width, gl.canvas.height, 1], mat);
const scaleX = easeLerp(.5, 2, time * 1.1);
const scaleY = easeLerp(.5, 2, time * 1.1);
const dispScaleX = 1;
const dispScaleY = 1;
// origin of scale/rotation
const originX = gl.canvas.width * .5;
const originY = gl.canvas.height * .5;
// scroll position in pixels
const scrollX = time % (mapWidth * tileWidth );
const scrollY = time % (mapHeight * tileHeight);
const rotation = time;
const tmat = m4.identity();
m4.translate(tmat, [scrollX, scrollY, 0], tmat);
m4.rotateZ(tmat, rotation, tmat);
m4.scale(tmat, [
gl.canvas.width / tileWidth / scaleX * (dispScaleX),
gl.canvas.height / tileHeight / scaleY * (dispScaleY),
1,
], tmat);
m4.translate(tmat, [
-originX / gl.canvas.width,
-originY / gl.canvas.height,
0,
], tmat);
twgl.setUniforms(programInfo, {
u_matrix: mat,
u_texMatrix: tmat,
u_tilemap: mapTexture,
u_tiles: tileTexture,
u_tilemapSize: [mapWidth, mapHeight],
u_tilesetSize: [tilesAcross, tilesDown],
});
gl.drawArrays(gl.TRIANGLES, 0, 6);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
canvas { border: 1px solid black; }
<canvas id="c"></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

Related

OpenGL ES 2.0 - Fisheye shader displays a grey image

I've been trying to use a fisheye shader from Shadertoy.
I've added my own frame resolution, and changed some keywords (texture -> texture2D, fragColor -> gl_FragColor) but that's it.
I don't really know why it doesn't work and how to debug it..
As a result I get a unicolor grey image.
Here's the code of my fragment shader :
precision mediump float;
uniform vec4 v_Color;
uniform sampler2D u_Texture;
varying vec2 v_TexCoordinate;
#define RESOLUTION_WIDTH 375.0
#define RESOLUTION_HEIGHT 211.0
#define POWER 2.0
void main() {
vec2 fragCoord = v_TexCoordinate;
vec2 iResolution = vec2(RESOLUTION_WIDTH, RESOLUTION_HEIGHT);
vec2 p = fragCoord.xy / iResolution.x; // normalized coords with some cheat
float prop = iResolution.x / iResolution.y;
vec2 m = vec2(0.5, 0.5 / prop); // center coords
vec2 d = p - m; // vector from center to current fragment
float r = sqrt(dot(d, d)); // distance of pixel from center
float power = POWER;
float bind; // radius of 1:1 effect
if (power > 0.0)
bind = sqrt(dot(m, m)); // stick to corners
else {
if (prop < 1.0)
bind = m.x;
else
bind = m.y;
} // stick to borders
// Weird formulas
vec2 uv;
if (power > 0.0) // fisheye
uv = m + normalize(d) * tan(r * power) * bind / tan( bind * power);
else if (power < 0.0) // antifisheye
uv = m + normalize(d) * atan(r * -power * 10.0) * bind / atan(-power * bind * 10.0);
else uv = p; // no effect for power = 1.0
vec3 col = texture2D(u_Texture, vec2(uv.x, -uv.y * prop)).xyz; // Second part of cheat
gl_FragColor = vec4(col, 1.0);
}
Here's my original shader to display an image that works perfectly :
precision mediump float;
uniform vec4 v_Color;
uniform sampler2D u_Texture;
varying vec2 v_TexCoordinate;
void main() {
// premultiplied alpha
vec4 texColor = texture2D(u_Texture, v_TexCoordinate);
// Scale the texture RGB by the vertex color
texColor.rgb *= v_Color.rgb;
// Scale the texture RGBA by the vertex alpha to reinstate premultiplication
gl_FragColor = texColor * v_Color.a;
}
Here's the link to the expected result on ShaderToy :
ShaderToy fisheye
Original result image :
With my shader :
With Rabbid76 solution :
With power = 1.1 :
With solution n2 and power = 10 (bigger image to see better) :
There's some background behind the text, don't pay attention to it ;)
In your shader code fragCoord is assumed to be a window coordinate, were the minimum is (0, 0) and the maximum is the width and height of the viewport. But in your code v_TexCoordinate is assigned to fragCoord. v_TexCoordinate is the texture corodiante in range [0, 1].
Use gl_FragCoord instead of v_TexCoordinate:
// vec2 fragCoord = v_TexCoordinate; <--- delete
vec2 fragCoord = gl_FragCoord.xy;
vec2 p = fragCoord.xy / iResolution.x;
Or skip dividing by the window resolution:
vec2 fragCoord = v_TexCoordinate;
// vec2 p = fragCoord.xy / iResolution.x; <-- delete
vec2 p = fragCoord.xy * vec2(1.0, iResolution.y/iResolution.x);
If the aspect ratio correction is not needed, then it can be even done:
vec2 p = v_TexCoordinate.xy;
See the WebGL example, where I use your original shader code and applied the suggested changes:
(function loadscene() {
var gl, canvas, prog, bufObj = {};
var texture;
function render(deltaMS) {
texture.bound = texture.bound || texture.bind( 0 );
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
ShProg.Use( progDraw );
ShProg.SetF2( progDraw, "resolution", vp_size );
ShProg.SetI1( progDraw, "u_texture", 0 );
VertexBuffer.Draw( bufRect );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "texture-canvas");
gl = canvas.getContext( "experimental-webgl" );
//gl = canvas.getContext( "webgl2" );
if ( !gl )
return;
progDraw = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progDraw.inPos = gl.getAttribLocation( progDraw.progObj, "inPos" );
if ( progDraw.progObj == 0 )
return;
bufRect = VertexBuffer.Create(
[ { data : [ -1, -1, 1, -1, 1, 1, -1, 1 ], attrSize : 2, attrLoc : progDraw.inPos } ],
[ 0, 1, 2, 0, 2, 3 ] );
texture = new Texture( "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/supermario.jpg" );
texture.bound = false;
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
vp_size[0] = vp_size[1] = Math.min(vp_size[0], vp_size[1]);
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
}
var ShProg = {
Create: function (shaderList) {
var shaderObjs = [];
for (var i_sh = 0; i_sh < shaderList.length; ++i_sh) {
var shderObj = this.Compile(shaderList[i_sh].source, shaderList[i_sh].stage);
if (shderObj) shaderObjs.push(shderObj);
}
var prog = {}
prog.progObj = this.Link(shaderObjs)
if (prog.progObj) {
prog.attrInx = {};
var noOfAttributes = gl.getProgramParameter(prog.progObj, gl.ACTIVE_ATTRIBUTES);
for (var i_n = 0; i_n < noOfAttributes; ++i_n) {
var name = gl.getActiveAttrib(prog.progObj, i_n).name;
prog.attrInx[name] = gl.getAttribLocation(prog.progObj, name);
}
prog.uniLoc = {};
var noOfUniforms = gl.getProgramParameter(prog.progObj, gl.ACTIVE_UNIFORMS);
for (var i_n = 0; i_n < noOfUniforms; ++i_n) {
var name = gl.getActiveUniform(prog.progObj, i_n).name;
prog.uniLoc[name] = gl.getUniformLocation(prog.progObj, name);
}
}
return prog;
},
AttrI: function (prog, name) { return prog.attrInx[name]; },
UniformL: function (prog, name) { return prog.uniLoc[name]; },
Use: function (prog) { gl.useProgram(prog.progObj); },
SetI1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1i(prog.uniLoc[name], val); },
SetF1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1f(prog.uniLoc[name], val); },
SetF2: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform2fv(prog.uniLoc[name], arr); },
SetF3: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform3fv(prog.uniLoc[name], arr); },
SetF4: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform4fv(prog.uniLoc[name], arr); },
SetM33: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix3fv(prog.uniLoc[name], false, mat); },
SetM44: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix4fv(prog.uniLoc[name], false, mat); },
Compile: function (source, shaderStage) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader(shaderStage);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
var status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
},
Link: function (shaderObjs) {
var prog = gl.createProgram();
for (var i_sh = 0; i_sh < shaderObjs.length; ++i_sh)
gl.attachShader(prog, shaderObjs[i_sh]);
gl.linkProgram(prog);
status = gl.getProgramParameter(prog, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(prog));
return status ? prog : null;
} };
var VertexBuffer = {
Create: function(attribs, indices, type) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length, primitive_type: type ? type : gl.TRIANGLES };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc, no_of: attribs[i].data.length/attribs[i].attrSize });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ARRAY_BUFFER, null);
if ( buffer.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
}
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
if ( bufObj.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitive_type, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
}
else
gl.drawArrays(bufObj.primitive_type, 0, bufObj.attr[0].no_of );
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
} };
class Texture {
constructor( name, dflt ) {
let texture = this;
this.dflt = dflt || [128,128,128,255]
let image = { "cx": this.dflt.w || 1, "cy": this.dflt.h || 1, "plane": this.dflt.p || this.dflt };
this.size = [image.cx, image.cy];
this.dummyObj = Texture.createTexture2D( image, true )
this.image = new Image(64,64);
this.image.setAttribute('crossorigin', 'anonymous');
this.image.onload = function () {
let cx = 1 << 31 - Math.clz32(texture.image.naturalWidth);
if ( cx < texture.image.naturalWidth ) cx *= 2;
let cy = 1 << 31 - Math.clz32(texture.image.naturalHeight);
if ( cy < texture.image.naturalHeight ) cy *= 2;
var canvas = document.createElement( 'canvas' );
canvas.width = cx;
canvas.height = cy;
var context = canvas.getContext( '2d' );
context.drawImage( texture.image, 0, 0, canvas.width, canvas.height );
texture.textureObj = Texture.createTexture2D( canvas, true );
texture.size = [cx, cy];
}
this.image.src = name;
}
static createTexture2D( image, flipY ) {
let t = gl.createTexture();
gl.activeTexture( gl.TEXTURE0 );
gl.bindTexture( gl.TEXTURE_2D, t );
gl.pixelStorei( gl.UNPACK_FLIP_Y_WEBGL, flipY != undefined && flipY == true );
if ( image.cx && image.cy && image.plane )
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, image.cx, image.cy, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(image.plane) );
else
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT );
gl.bindTexture( gl.TEXTURE_2D, null );
return t;
}
bind( texUnit = 0 ) {
gl.activeTexture( gl.TEXTURE0 + texUnit );
if ( this.textureObj ) {
gl.bindTexture( gl.TEXTURE_2D, this.textureObj );
return true;
}
gl.bindTexture( gl.TEXTURE_2D, this.dummyObj );
return false;
}
};
initScene();
})();
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
void main()
{
gl_Position = vec4( inPos.xy, 0.0, 1.0 );
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
uniform vec2 resolution;
uniform sampler2D u_Texture;
#define RESOLUTION_WIDTH 375.0
#define RESOLUTION_HEIGHT 211.0
#define POWER 2.0
void main( void )
{
vec2 fragCoord = gl_FragCoord.xy;
vec2 iResolution = resolution;
//vec2 fragCoord = v_TexCoordinate;
//vec2 iResolution = vec2(RESOLUTION_WIDTH, RESOLUTION_HEIGHT);
vec2 p = fragCoord.xy / iResolution.x; // normalized coords with some cheat
float prop = iResolution.x / iResolution.y;
vec2 m = vec2(0.5, 0.5 / prop); // center coords
vec2 d = p - m; // vector from center to current fragment
float r = sqrt(dot(d, d)); // distance of pixel from center
float power = POWER;
float bind; // radius of 1:1 effect
if (power > 0.0)
bind = sqrt(dot(m, m)); // stick to corners
else {
if (prop < 1.0)
bind = m.x;
else
bind = m.y;
} // stick to borders
// Weird formulas
vec2 uv;
if (power > 0.0) // fisheye
uv = m + normalize(d) * tan(r * power) * bind / tan( bind * power);
else if (power < 0.0) // antifisheye
uv = m + normalize(d) * atan(r * -power * 10.0) * bind / atan(-power * bind * 10.0);
else uv = p; // no effect for power = 1.0
vec3 col = texture2D(u_Texture, vec2(uv.x, -uv.y * prop)).xyz; // Second part of cheat
gl_FragColor = vec4(col, 1.0);
}
</script>
<body>
<canvas id="texture-canvas" style="border: none"></canvas>
</body>

Three.js: Determining World coordinates of mouse position

I have a Three.js scene with points and am trying to figure out the relationship between my points' positions and screen coordinates. I thought I could use the function #WestLangley provided to a previous question but implementing this function has raised some confusion.
In the scene below, I'm storing the x coordinates of the left and right-most points in world.bb.x, and am logging the world coordinates of the cursor each time the mouse moves. However, when I mouse to the left and right-most points, the world coordinates do not match the min or max x-coordinate values in world.bb.x, which is what I expected.
Do others know what I can do to figure out the world coordinates of my cursor at any given time? Any help others can offer is greatly appreciated!
function World() {
this.scene = this.getScene();
this.camera = this.getCamera();
this.renderer = this.getRenderer();
this.controls = this.getControls();
this.color = new THREE.Color();
this.addPoints();
this.render();
}
World.prototype.getScene = function() {
var scene = new THREE.Scene();
scene.background = new THREE.Color(0xefefef);
return scene;
}
World.prototype.getCamera = function() {
var renderSize = getRenderSize(),
aspectRatio = renderSize.w / renderSize.h,
camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 100000);
camera.position.set(0, 1, -10);
return camera;
}
World.prototype.getRenderer = function() {
var renderSize = getRenderSize(),
renderer = new THREE.WebGLRenderer({antialias: true});
renderer.setPixelRatio(window.devicePixelRatio); // retina displays
renderer.setSize(renderSize.w, renderSize.h); // set w,h
find('#gl-target').appendChild(renderer.domElement);
return renderer;
}
World.prototype.getControls = function() {
var controls = new THREE.TrackballControls(this.camera, this.renderer.domElement);
controls.zoomSpeed = 0.4;
controls.panSpeed = 0.4;
return controls;
}
World.prototype.render = function() {
requestAnimationFrame(this.render.bind(this));
this.renderer.render(this.scene, this.camera);
this.controls.update();
}
World.prototype.getMouseWorldCoords = function(e) {
var vector = new THREE.Vector3(),
camera = world.camera,
x = (e.clientX / window.innerWidth) * 2 - 1,
y = (e.clientY / window.innerHeight) * 2 + 1;
vector.set(x, y, 0.5);
vector.unproject(camera);
var direction = vector.sub(camera.position).normalize(),
distance = - camera.position.z / direction.z,
scaled = direction.multiplyScalar(distance),
coords = camera.position.clone().add(scaled);
return {
x: coords.x, y: coords.y,
};
}
World.prototype.addPoints = function() {
// this geometry builds a blueprint and many copies of the blueprint
var IBG = THREE.InstancedBufferGeometry,
BA = THREE.BufferAttribute,
IBA = THREE.InstancedBufferAttribute,
Vec3 = THREE.Vector3,
Arr = Float32Array;
// add data for each observation; n = num observations
var geometry = new IBG(),
n = 10000,
rootN = n**(1/2),
// find max min for each dim to center camera
xMax = Number.NEGATIVE_INFINITY,
xMin = Number.POSITIVE_INFINITY,
yMax = Number.NEGATIVE_INFINITY,
yMin = Number.POSITIVE_INFINITY;
var translations = new Arr(n * 3),
colors = new Arr(n * 3),
uidColors = new Arr(n * 3),
translationIterator = 0,
colorIterator = 0,
uidColorIterator = 0;
var colorMap = this.getColorMap();
for (var i=0; i<n; i++) {
var x = Math.sin(i) * 4,
y = Math.floor(i / (n/20)) * 0.3,
color = colorMap[ Math.floor(i / (n/20)) ],
uidColor = this.color.setHex(i + 1);
if (x > xMax) xMax = x;
if (x < xMin) xMin = x;
if (y > yMax) yMax = y;
if (y < yMin) yMin = y;
translations[translationIterator++] = x;
translations[translationIterator++] = y;
translations[translationIterator++] = 0;
colors[colorIterator++] = color.r / 255;
colors[colorIterator++] = color.g / 255;
colors[colorIterator++] = color.b / 255;
uidColors[uidColorIterator++] = uidColor.r;
uidColors[uidColorIterator++] = uidColor.g;
uidColors[uidColorIterator++] = uidColor.b;
}
// store the min and max coords in each dimension
this.bb = {
x: {
min: xMin,
max: xMax,
},
y: {
min: yMin,
max: yMax,
}
}
// center the camera
this.center = {
x: (xMax + xMin) / 2,
y: (yMax + yMin) / 2
}
this.camera.position.set(this.center.x, this.center.y, -6);
this.camera.lookAt(this.center.x, this.center.y, 0);
this.controls.target = new Vec3(this.center.x, this.center.y, 0);
// add attributes
geometry.addAttribute('position', new BA( new Arr([0, 0, 0]), 3));
geometry.addAttribute('translation', new IBA(translations, 3, 1) );
geometry.addAttribute('color', new IBA(colors, 3, 1) );
geometry.addAttribute('uidColor', new IBA(uidColors, 3, 1) );
var material = new THREE.RawShaderMaterial({
vertexShader: find('#vertex-shader').textContent,
fragmentShader: find('#fragment-shader').textContent,
});
var mesh = new THREE.Points(geometry, material);
mesh.frustumCulled = false; // prevent the mesh from being clipped on drag
this.scene.add(mesh);
}
World.prototype.getColorMap = function() {
function toHex(c) {
var hex = c.toString(16);
return hex.length == 1 ? '0' + hex : hex;
}
function rgbToHex(r, g, b) {
return '#' + toHex(r) + toHex(g) + toHex(b);
}
function hexToRgb(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16),
} : null;
}
var hexes = [
'#fe4445','#ff583b','#ff6a2f','#ff7a20','#ff8800',
'#ff9512','#ffa31f','#ffaf2a','#ffbb34',
'#cfc522','#99cc01',
'#91c14a','#85b66e','#73ac8f','#57a3ac','#0099cb',
'#14a0d1','#20a7d8','#2aaedf','#33b5e6'
]
var colorMap = {};
hexes.forEach(function(c, idx) { colorMap[idx] = hexToRgb(c) })
return colorMap;
}
/**
* Helpers
**/
function getRenderSize() {
var elem = find('#gl-target');
return {
w: elem.clientWidth,
h: elem.clientHeight,
}
}
function find(selector) {
return document.querySelector(selector);
}
/**
* Main
**/
var world = new World();
world.controls.enabled = false;
find('canvas').addEventListener('mousemove', function(e) {
find('#bar').style.left = e.clientX + 'px';
var coords = world.getMouseWorldCoords(e);
console.log(coords, world.bb.x);
})
html, body {
width: 100%;
height: 100%;
background: #000;
}
body {
margin: 0;
overflow: hidden;
}
canvas {
width: 100%;
height: 100%;
}
.gl-container {
position: relative;
}
#gl-target {
width:700px;
height:400px
}
#bar {
width: 1px;
height: 100%;
display: inline-block;
position: absolute;
left: 30px;
background: red;
}
<script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/95/three.min.js'></script>
<script src='https://rawgit.com/YaleDHLab/pix-plot/master/assets/js/trackball-controls.js'></script>
<script type='x-shader/x-vertex' id='vertex-shader'>
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
attribute vec3 translation;
#ifdef PICKING
attribute vec3 uidColor;
varying vec3 vUidColor;
#else
attribute vec3 color;
#endif
varying vec3 vColor;
void main() {
#ifdef PICKING
vUidColor = uidColor;
#else
vColor = color;
#endif
// set point position
vec3 raw = position + translation;
vec4 pos = projectionMatrix * modelViewMatrix * vec4(raw, 1.0);
gl_Position = pos;
// set point size
gl_PointSize = 10.0;
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float;
#ifdef PICKING
varying vec3 vUidColor;
#else
varying vec3 vColor;
#endif
void main() {
// make point circular
vec2 coord = gl_PointCoord - vec2(0.5);
if (length(coord) > 0.5) discard;
// color the point
#ifdef PICKING
gl_FragColor = vec4(vUidColor, 1.0);
#else
gl_FragColor = vec4(vColor, 1.0);
#endif
}
</script>
<div class='gl-container'>
<div id='bar'></div>
<div id='gl-target'></div>
</div>
Aha, instead of dividing the event x and y coordinates by the window width (which only applies to canvases that extend through the full window height and width), I need to divide the event x and y coordinates by the canvas's width and height!
function World() {
this.scene = this.getScene();
this.camera = this.getCamera();
this.renderer = this.getRenderer();
this.color = new THREE.Color();
this.addPoints();
this.render();
}
World.prototype.getScene = function() {
var scene = new THREE.Scene();
scene.background = new THREE.Color(0xefefef);
return scene;
}
World.prototype.getCamera = function() {
var renderSize = getRenderSize(),
aspectRatio = renderSize.w / renderSize.h,
camera = new THREE.PerspectiveCamera(75, aspectRatio, 0.1, 100000);
camera.position.set(0, 1, -10);
return camera;
}
World.prototype.getRenderer = function() {
var renderSize = getRenderSize(),
renderer = new THREE.WebGLRenderer({antialias: true});
renderer.setPixelRatio(window.devicePixelRatio); // retina displays
renderer.setSize(renderSize.w, renderSize.h); // set w,h
find('#gl-target').appendChild(renderer.domElement);
return renderer;
}
World.prototype.render = function() {
requestAnimationFrame(this.render.bind(this));
this.renderer.render(this.scene, this.camera);
}
World.prototype.getMouseWorldCoords = function(e) {
var elem = find('#gl-target'),
vector = new THREE.Vector3(),
camera = world.camera,
x = (e.clientX / elem.clientWidth) * 2 - 1,
y = (e.clientY / elem.clientHeight) * 2 + 1;
vector.set(x, y, 0.5);
vector.unproject(camera);
var direction = vector.sub(camera.position).normalize(),
distance = - camera.position.z / direction.z,
scaled = direction.multiplyScalar(distance),
coords = camera.position.clone().add(scaled);
return {
x: coords.x,
y: coords.y,
};
}
World.prototype.addPoints = function() {
// this geometry builds a blueprint and many copies of the blueprint
var IBG = THREE.InstancedBufferGeometry,
BA = THREE.BufferAttribute,
IBA = THREE.InstancedBufferAttribute,
Vec3 = THREE.Vector3,
Arr = Float32Array;
// add data for each observation; n = num observations
var geometry = new IBG(),
n = 10000,
rootN = n**(1/2),
// find max min for each dim to center camera
xMax = Number.NEGATIVE_INFINITY,
xMin = Number.POSITIVE_INFINITY,
yMax = Number.NEGATIVE_INFINITY,
yMin = Number.POSITIVE_INFINITY;
var translations = new Arr(n * 3),
colors = new Arr(n * 3),
uidColors = new Arr(n * 3),
translationIterator = 0,
colorIterator = 0,
uidColorIterator = 0;
var colorMap = this.getColorMap();
for (var i=0; i<n; i++) {
var x = Math.sin(i) * 4,
y = Math.floor(i / (n/20)) * 0.3,
color = colorMap[ Math.floor(i / (n/20)) ],
uidColor = this.color.setHex(i + 1);
if (x > xMax) xMax = x;
if (x < xMin) xMin = x;
if (y > yMax) yMax = y;
if (y < yMin) yMin = y;
translations[translationIterator++] = x;
translations[translationIterator++] = y;
translations[translationIterator++] = 0;
colors[colorIterator++] = color.r / 255;
colors[colorIterator++] = color.g / 255;
colors[colorIterator++] = color.b / 255;
uidColors[uidColorIterator++] = uidColor.r;
uidColors[uidColorIterator++] = uidColor.g;
uidColors[uidColorIterator++] = uidColor.b;
}
// store the min and max coords in each dimension
this.bb = {
x: {
min: xMin,
max: xMax,
},
y: {
min: yMin,
max: yMax,
}
}
// center the camera
this.center = {
x: (xMax + xMin) / 2,
y: (yMax + yMin) / 2
}
this.camera.position.set(this.center.x, this.center.y, -6);
this.camera.lookAt(this.center.x, this.center.y, 0);
// add attributes
geometry.addAttribute('position', new BA( new Arr([0, 0, 0]), 3));
geometry.addAttribute('translation', new IBA(translations, 3, 1) );
geometry.addAttribute('color', new IBA(colors, 3, 1) );
geometry.addAttribute('uidColor', new IBA(uidColors, 3, 1) );
var material = new THREE.RawShaderMaterial({
vertexShader: find('#vertex-shader').textContent,
fragmentShader: find('#fragment-shader').textContent,
});
var mesh = new THREE.Points(geometry, material);
mesh.frustumCulled = false; // prevent the mesh from being clipped on drag
this.scene.add(mesh);
}
World.prototype.getColorMap = function() {
function toHex(c) {
var hex = c.toString(16);
return hex.length == 1 ? '0' + hex : hex;
}
function rgbToHex(r, g, b) {
return '#' + toHex(r) + toHex(g) + toHex(b);
}
function hexToRgb(hex) {
var result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16),
} : null;
}
var hexes = [
'#fe4445','#ff583b','#ff6a2f','#ff7a20','#ff8800',
'#ff9512','#ffa31f','#ffaf2a','#ffbb34',
'#cfc522','#99cc01',
'#91c14a','#85b66e','#73ac8f','#57a3ac','#0099cb',
'#14a0d1','#20a7d8','#2aaedf','#33b5e6'
]
var colorMap = {};
hexes.forEach(function(c, idx) { colorMap[idx] = hexToRgb(c) })
return colorMap;
}
/**
* Helpers
**/
function getRenderSize() {
var elem = find('#gl-target');
return {
w: elem.clientWidth,
h: elem.clientHeight,
}
}
function find(selector) {
return document.querySelector(selector);
}
/**
* Main
**/
var world = new World();
find('canvas').addEventListener('mousemove', function(e) {
find('#bar').style.left = e.clientX + 'px';
var coords = world.getMouseWorldCoords(e);
console.log(coords, world.bb.x);
})
html, body {
width: 100%;
height: 100%;
background: #000;
}
body {
margin: 0;
overflow: hidden;
}
canvas {
width: 100%;
height: 100%;
}
.gl-container {
position: relative;
}
#gl-target {
width:700px;
height:400px
}
#bar {
width: 1px;
height: 100%;
display: inline-block;
position: absolute;
left: 30px;
background: red;
}
<div class='gl-container'>
<div id='bar'></div>
<div id='gl-target'></div>
</div>
<script src='https://cdnjs.cloudflare.com/ajax/libs/three.js/95/three.min.js'></script>
<script type='x-shader/x-vertex' id='vertex-shader'>
precision highp float;
uniform mat4 modelViewMatrix;
uniform mat4 projectionMatrix;
attribute vec3 position;
attribute vec3 translation;
#ifdef PICKING
attribute vec3 uidColor;
varying vec3 vUidColor;
#else
attribute vec3 color;
#endif
varying vec3 vColor;
void main() {
#ifdef PICKING
vUidColor = uidColor;
#else
vColor = color;
#endif
// set point position
vec3 raw = position + translation;
vec4 pos = projectionMatrix * modelViewMatrix * vec4(raw, 1.0);
gl_Position = pos;
// set point size
gl_PointSize = 10.0;
}
</script>
<script type='x-shader/x-fragment' id='fragment-shader'>
precision highp float;
#ifdef PICKING
varying vec3 vUidColor;
#else
varying vec3 vColor;
#endif
void main() {
// make point circular
vec2 coord = gl_PointCoord - vec2(0.5);
if (length(coord) > 0.5) discard;
// color the point
#ifdef PICKING
gl_FragColor = vec4(vUidColor, 1.0);
#else
gl_FragColor = vec4(vColor, 1.0);
#endif
}
</script>

How can textures with transparent spots be correctly applied to multiple stacked plane instances in threejs?

I'm creating 512 instances of the same 1x1 plane with a texture that has transparent areas. The planes are randomly spread around the origin like the image below.
How can the planes in front be drawn after the planes behind so that the transparency of the planes in front take into account the output of the planes from behind?
(with depthTest disabled)
(with depthTest normal)
For reference, the transparency disabled version of the instanced geometry. This proves that the planes are correctly positioned.
Update:
Adding code as asked:
import {
Mesh,
ShaderMaterial,
Vector3,
PlaneBufferGeometry,
EdgesGeometry,
LineBasicMaterial,
LineSegments,
InstancedBufferAttribute,
UniformsLib,
BufferAttribute,
TextureLoader,
InstancedBufferGeometry,
DoubleSide,
} from 'three'
import path from 'path'
import fs from 'fs'
import {
randomValueBetween,
} from '../../utils'
const vertexShader = fs.readFileSync(path.resolve(__dirname, './assets/vertex.glsl'), 'utf8')
const fragmentShader = fs.readFileSync(path.resolve(__dirname, './assets/fragment.glsl'), 'utf8')
const createInstancedAtrributes = (geometry, instanceCount) => {
const startseed = new InstancedBufferAttribute(new Float32Array(instanceCount * 1), 1)
const scale = new InstancedBufferAttribute(new Float32Array(instanceCount * 3), 3)
const offset = new InstancedBufferAttribute(new Float32Array(instanceCount * 2), 2)
const orientationY = new InstancedBufferAttribute(new Float32Array(instanceCount), 1)
const baseScale = 0.5
for (let i = 0; i < instanceCount; i += 1) {
scale.setXYZ(i,
baseScale * randomValueBetween(0.8, 1.3, 1),
baseScale * randomValueBetween(0.8, 1.3, 1),
baseScale * randomValueBetween(0.8, 1.3, 1),
)
orientationY.setX(i, randomValueBetween(0.0, 1.0, 3))
startseed.setX(i, randomValueBetween(1, 3, 1))
}
for (let i = 0; i < instanceCount / 4; i += 4) {
const randomX = randomValueBetween(-3.5, 3.5, 1)
const randomY = randomValueBetween(-3.5, 3.5, 1)
offset.setXY(i, randomX, randomY)
}
geometry.addAttribute('scale', scale)
geometry.addAttribute('offset', offset)
geometry.addAttribute('startseed', offset)
geometry.addAttribute('orientationY', offset)
return { scale, offset }
}
const createInstancedGeometry = (instancePerUnitCount) => {
const geometry = new InstancedBufferGeometry()
geometry.maxInstancedCount = instancePerUnitCount
const shape = new PlaneBufferGeometry(1, 1, 1, 3)
const data = shape.attributes
geometry.addAttribute('position', new BufferAttribute(new Float32Array(data.position.array), 3))
geometry.addAttribute('uv', new BufferAttribute(new Float32Array(data.uv.array), 2))
geometry.addAttribute('normal', new BufferAttribute(new Float32Array(data.normal.array), 3))
geometry.setIndex(new BufferAttribute(new Uint16Array(shape.index.array), 1))
shape.dispose()
createInstancedAtrributes(geometry, instancePerUnitCount)
return geometry
}
export default class GrassDeform extends Mesh {
constructor() {
const geometry = createInstancedGeometry(8 * 256)
const uniforms = {
uTime: {
type: 'f',
value: 0,
},
uMap: {
type: 't',
value: null,
},
}
const textureLoader = new TextureLoader()
textureLoader.load(path.resolve(__dirname, './assets/grass-texture-01.png'), (t) => {
uniforms.uMap.value = t
})
const material = new ShaderMaterial({
uniforms: Object.assign({},
UniformsLib.ambient,
UniformsLib.lights,
uniforms,
),
vertexShader,
fragmentShader,
lights: true,
transparent: true,
side: DoubleSide,
})
super(geometry, material)
this.geometry = geometry
this.material = material
this.up = new Vector3(0, 0, 1)
const lineGeo = new EdgesGeometry(geometry) // or WireframeGeometry
const mat = new LineBasicMaterial({ color: 0xffffff, linewidth: 2 })
const wireframe = new LineSegments(lineGeo, mat)
this.add(wireframe)
this.frustumCulled = false
}
update({ ellapsedTime }) {
this.material.uniforms.uTime.value = ellapsedTime
}
}
And the object is added to the scene like this:
const grass2 = new GrassDeform2()
grass2.position.set(-1, 0, 0.50)
grass2.rotateX(Math.PI / 2)
scene.add(grass2)
dirLight.target = grass2
const animate = (ellapsedTime = 0) => {
stats.begin()
grass2.update({ ellapsedTime })
/// other scene stuff
renderer.render(scene, playerController.camera)
requestAnimationFrame(animate)
}
animate()
The vertex shader:
#if NUM_DIR_LIGHTS > 0
struct DirectionalLight {
vec3 direction;
vec3 color;
int shadow;
float shadowBias;
float shadowRadius;
vec2 shadowMapSize;
};
uniform DirectionalLight directionalLights[ NUM_DIR_LIGHTS ];
#endif
uniform float uTime;
attribute vec2 offset;
attribute vec3 scale;
attribute float startseed;
varying vec2 vUv;
varying vec3 vPosition;
varying vec3 vDirectionalLightDirection;
varying vec3 vDirectionalLightColor;
varying vec3 uNormal;
void main() {
vec3 pos = position * scale;
pos.x += offset.x;
pos.z += offset.y;
pos.y += (scale.y - 1.0) * 0.5;
pos.y = orientationY
vPosition = pos;
uNormal = normal;
vUv = uv;
uNormal = normal;
vDirectionalLightDirection = directionalLights[0].direction;
vDirectionalLightColor = directionalLights[0].color;
float variation = startseed + uTime * 0.002;
float pass = (0.5 + pos.y) * 0.05;
pos.x += sin(pass + variation) * pass;
pos.z += cos(pass + variation + 0.01) * pass;
pos.y += sin(pass + variation - 0.01) * pass;
gl_Position = projectionMatrix * modelViewMatrix * vec4(pos,1.0);
}
And the fragment shader (has some extra stuff for light, not added for now):
uniform sampler2D uMap;
varying vec2 vUv;
varying vec3 vPosition;
varying vec3 vDirectionalLightDirection;
varying vec3 vDirectionalLightColor;
varying vec3 uNormal;
void main() {
vec4 map = texture2D(uMap, vUv);
vec3 lightVector = normalize((vDirectionalLightDirection) - vPosition);
float dotNL = dot( uNormal, lightVector );
vec3 baseColor = map.rgb;
vec3 lightedColor = vDirectionalLightColor * 0.6 * dotNL;
if ( map.a < 0.5 ) discard; //!!! THIS WAS THE LINE NEEDED TO SOLVE THE ISSUE
gl_FragColor = vec4( map.rgb , 1 );
}
After applying the change from the final result, the scene looks right!
You can solve your problem with alpha testing. Use a pattern like the following in your fragment shader:
vec4 texelColor = texture2D( map, vUv );
if ( texelColor.a < 0.5 ) discard;
Your material will no longer need to have transparent = true, since you appear to be using a cut-out in which the texture alpha is either 0 or 1.
three.js r.88

Three.js camera control not working & WebGL shader

I found nice water simulation from codepen and modified it with help from other thread here (can't find it anymore though).
I have used three.js couple of times before, but now I just can't comprehend why camera positioning/rotation/aspect/etc isn't working. No matter what coordinates or angle I give to camera and use updateProjectionMatrix nothing happens, camera just stays in one place.
I commented out resize events etc, since they don't do anything also.
Entire code:
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r70/three.min.js"></script>
<style type="text/css">
body {
overflow: hidden;
margin: 0;
height: 100%;
}
</style>
<title></title>
<script type='text/javascript'>//<![CDATA[
window.onload=function(){
// init camera, scene, renderer
var scene, camera, renderer;
scene = new THREE.Scene();
var fov = 75,
aspect = window.innerWidth / window.innerHeight;
camera = new THREE.PerspectiveCamera(fov, aspect, 0.1, 1000);
camera.position.z = 200;
camera.rotate.z = 1.5707963268;
camera.updateProjectionMatrix();
renderer = new THREE.WebGLRenderer();
renderer.setClearColor(0xc4c4c4);
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var clock = new THREE.Clock();
var tuniform = {
time: {
type: 'f',
value: 0.1
},
resolution: {
type: 'v2',
value: new THREE.Vector2()
},
mouse: {
type: 'v4',
value: new THREE.Vector2()
}
};
// Mouse position in - 1 to 1
renderer.domElement.addEventListener('mousedown', function(e) {
//var canvas = renderer.domElement;
//var rect = canvas.getBoundingClientRect();
//tuniform.mouse.value.x = (e.clientX - rect.left) / window.innerWidth * 2 - 1;
//tuniform.mouse.value.y = (e.clientY - rect.top) / window.innerHeight * -2 + 1;
});
renderer.domElement.addEventListener('mouseup', function(e) {
//var canvas = renderer.domElement;
//var rect = canvas.getBoundingClientRect();
//tuniform.mouse.value.z = (e.clientX - rect.left) / window.innerWidth * 2 - 1;
//tuniform.mouse.value.w = (e.clientY - rect.top) / window.innerHeight * -2 + 1;
});
// resize canvas function
window.addEventListener('resize',function() {
//camera.aspect = window.innerWidth / window.innerHeight;
//camera.updateProjectionMatrix();
//renderer.setSize(window.innerWidth, window.innerHeight);
});
tuniform.resolution.value.x = window.innerWidth;
tuniform.resolution.value.y = window.innerHeight;
// Create Plane
var material = new THREE.ShaderMaterial({
uniforms: tuniform,
vertexShader: document.getElementById('vertex-shader').textContent,
fragmentShader: document.getElementById('fragment-shader').textContent
});
var mesh = new THREE.Mesh(
new THREE.PlaneBufferGeometry(window.innerWidth, window.innerHeight, 40), material
);
scene.add(mesh);
// draw animation
function render(time) {
tuniform.time.value += clock.getDelta();
requestAnimationFrame(render);
renderer.render(scene, camera);
}
render();
}//]]>
</script>
</head>
<body>
<!-- THIS is OPENGL Shading language scripts -->
<script id="vertex-shader" type="no-js">
void main() {
gl_Position = vec4( position, 1.0 );
}
</script>
<script id="fragment-shader" type="no-js">
#ifdef GL_ES
precision mediump float;
#endif
uniform float time;
uniform vec2 mouse;
uniform vec2 resolution;
varying vec2 surfacePosition;
const int NUM_STEPS = 8;
const float PI = 3.1415;
const float EPSILON = 1e-3;
float EPSILON_NRM = 0.1 / resolution.x;
// sea
const int ITER_GEOMETRY = 3;
const int ITER_FRAGMENT = 5;
const float SEA_HEIGHT = 0.6;
const float SEA_CHOPPY = 2.0;
const float SEA_SPEED = 0.5;
const float SEA_FREQ = 0.16;
const vec3 SEA_BASE = vec3(0.1,0.19,0.22); //meren pohjaväri
const vec3 SEA_WATER_COLOR = vec3(0.8,0.9,0.6);
const float SKY_INTENSITY = 1.0;
#define SEA_TIME time * SEA_SPEED
// math
mat4 fromEuler(vec3 ang) {
vec2 a1 = vec2(sin(ang.x),cos(ang.x));
vec2 a2 = vec2(sin(ang.y),cos(ang.y));
vec2 a3 = vec2(sin(ang.z),cos(ang.z));
mat4 m;
m[0] = vec4(a1.y*a3.y+a1.x*a2.x*a3.x,a1.y*a2.x*a3.x+a3.y*a1.x,-a2.y*a3.x,0.0);
m[1] = vec4(-a2.y*a1.x,a1.y*a2.y,a2.x,0.0);
m[2] = vec4(a3.y*a1.x*a2.x+a1.y*a3.x,a1.x*a3.x-a1.y*a3.y*a2.x,a2.y*a3.y,0.0);
m[3] = vec4(0.0,0.0,0.0,1.0);
return m;
}
vec3 rotate(vec3 v, mat4 m) {
return vec3(dot(v,m[0].xyz),dot(v,m[1].xyz),dot(v,m[2].xyz));
}
float hash( vec2 p ) {
float h = dot(p,vec2(127.1,311.7));
return fract(sin(h)*43758.5453123);
}
float noise( in vec2 p ) {
vec2 i = floor( p );
vec2 f = fract( p );
vec2 u = f*f*(3.0-2.0*f);
return -1.0+2.0*mix( mix( hash( i + vec2(0.0,0.0) ),
hash( i + vec2(1.0,0.0) ), u.x),
mix( hash( i + vec2(0.0,1.0) ),
hash( i + vec2(1.0,1.0) ), u.x), u.y);
}
// lighting
float diffuse(vec3 n,vec3 l,float p) { return pow(dot(n,l) * 0.4 + 0.6,p); }
float specular(vec3 n,vec3 l,vec3 e,float s) {
float nrm = (s + 8.0) / (3.1415 * 8.0);
return pow(max(dot(reflect(e,n),l),0.0),s) * nrm;
}
// sky
vec3 sky_color(vec3 e) {
e.y = max(e.y,0.0);
vec3 ret;
ret.x = pow(1.0-e.y,2.0);
ret.y = 1.0-e.y;
ret.z = 0.6+(1.0-e.y)*0.4;
return ret * SKY_INTENSITY;
}
// sea
float sea_octave(vec2 uv, float choppy) {
uv += noise(uv);
vec2 wv = 1.0-abs(sin(uv));
vec2 swv = abs(cos(uv));
wv = mix(wv,swv,wv);
return pow(1.0-pow(wv.x * wv.y,0.65),choppy);
}
float map(vec3 p) {
float freq = SEA_FREQ;
float amp = SEA_HEIGHT;
float choppy = SEA_CHOPPY;
vec2 uv = p.xz; uv.x *= 0.75;
mat2 m = mat2(1.6,1.2,-1.2,1.6);
float d, h = 0.0;
for(int i = 0; i < ITER_GEOMETRY; i++) {
d = sea_octave((uv+SEA_TIME)*freq,choppy);
d += sea_octave((uv-SEA_TIME)*freq,choppy);
h += d * amp;
uv *= m; freq *= 1.9; amp *= 0.22;
choppy = mix(choppy,1.0,0.2);
}
return p.y - h;
}
float map_detailed(vec3 p) {
float freq = SEA_FREQ;
float amp = SEA_HEIGHT;
float choppy = SEA_CHOPPY;
vec2 uv = p.xz; uv.x *= 0.75;
mat2 m = mat2(1.6,1.2,-1.2,1.6);
float d, h = 0.0;
for(int i = 0; i < ITER_FRAGMENT; i++) {
d = sea_octave((uv+SEA_TIME)*freq,choppy);
d += sea_octave((uv-SEA_TIME)*freq,choppy);
h += d * amp;
uv *= m; freq *= 1.9; amp *= 0.22;
choppy = mix(choppy,1.0,0.2);
}
return p.y - h;
}
vec3 sea_color(in vec3 p, in vec3 n, in vec3 eye, in vec3 dist) {
float fresnel_o = 1.0 - max(dot(n,-eye),0.0);
float fresnel = pow(fresnel_o,3.0) * 0.65;
// reflection
vec3 refl = sky_color(reflect(eye,n));
// color
vec3 ret = SEA_BASE;
ret = mix(ret,refl,fresnel);
// wave peaks
float atten = max(1.0 - dot(dist,dist) * 0.001, 0.0);
ret += SEA_WATER_COLOR * (p.y - SEA_HEIGHT) * 0.18 * atten;
return ret;
}
// tracing
vec3 getNormal(vec3 p, float eps) {
vec3 n;
n.y = map_detailed(p);
n.x = map_detailed(vec3(p.x+eps,p.y,p.z)) - n.y;
n.z = map_detailed(vec3(p.x,p.y,p.z+eps)) - n.y;
n.y = eps;
return normalize(n);
}
float hftracing(vec3 ori, vec3 dir, out vec3 p) {
float tm = 0.0;
float tx = 1000.0;
float hx = map(ori + dir * tx);
if(hx > 0.0) return tx;
float hm = map(ori + dir * tm);
float tmid = 0.0;
for(int i = 0; i < NUM_STEPS; i++) {
tmid = mix(tm,tx, hm/(hm-hx));
p = ori + dir * tmid;
float hmid = map(p);
if(hmid < 0.0) {
tx = tmid;
hx = hmid;
} else {
tm = tmid;
hm = hmid;
}
}
return tmid;
}
// main
void main(void) {
vec2 uv = gl_FragCoord.xy / resolution.xy;
uv = 1.0 - uv * 2.0;
uv.x *= resolution.x / resolution.y;
//uv = (surfacePosition+vec2(0., .5))*17. + 5E-3*(pow(length(surfacePosition+vec2(0. ,0.5)), -2.));
uv.y *= -1.;
//uv.y += -2.;
// ray
vec3 ang = vec3(0.0,0.003, pow(time, 0.6));
ang = vec3(0.0,clamp(2.0-mouse.y*0.01,-0.3,PI),mouse.x*0.01);
vec3 ori = vec3(0.0,3.5,time*.05);
vec3 dir = normalize(vec3(uv.xy,-2.0));
dir.z -= length(uv) * 0.15;
//dir = rotate(normalize(dir),ang);
// tracing
vec3 p;
float dens = hftracing(ori,dir,p);
vec3 dist = p - ori;
vec3 n = getNormal(p, dot(dist,dist)*EPSILON_NRM);
// color
vec3 color = sea_color(p,n,dir,dist);
vec3 light = normalize(vec3(0.0,1.0,0.8));
color += vec3(diffuse(n,light,80.0) * SEA_WATER_COLOR) * 0.12;
color += vec3(specular(n,light,dir,60.0));
// post
color = mix(sky_color(dir),color,pow(smoothstep(0.0,-0.05,dir.y),0.3));
color = pow(color,vec3(0.75));
gl_FragColor = vec4(color,1.0);
}
</script>
<script>
// tell the embed parent frame the height of the content
if (window.parent && window.parent.parent){
window.parent.parent.postMessage(["resultsFrame", {
height: document.body.getBoundingClientRect().height,
slug: "uz6yo2w3"
}], "*")
}
</script>
</body>
</html>
There are so much try in this code that it's not even clear to me what you're trying to do, but I can give some hints :
Change the camera.rotate to camera.rotation l. 30
Your mouse events are commented. If you want to rotate the camera with the mouse, you're gonna have to add a mousemove event ;
By the way, l. 50 you send a vec4 but loads it as a vec2 l. 126 ;
When the window is resized, you may also want to update the new resolution to the shader ;
l. 304, change the Z component of the camera's origin from vec3 ori = vec3(0.0, 3.5, time * 5.0); to vec3 ori = vec3(0.0, 3.5, time * 5.0); so you can see the camera moving along the sea ;
l. 306, instead of dir = rotate(normalize(dir), ang); add the initial dir = normalize(dir) * fromEuler(ang); (ang is the angle of the camera) ;
l. 149, change your mat4 fromEuler(vec3 ang){...} to the initial mat3 fromEuler(vec3 ang){...} function ;
l. 301, just put vec3 ang = vec3(0.0, 0.0, 0.0); and play with it. You may use mouse coordinates in this function, depending on how you want the user interact with the camera.

Fragment shader - determine min/max values for the entire (monochrome) image and use them for further pixel manipulations

I'd like to normalize monochrome image pixels in that way the minimum value is black, the maximum is white and values in between are spread proportionally.
Currently I do it in canvas in two steps, but I believe it should be faster in WebGL.
I can imagine manipulating colors via fragment shader, but I couldn't find any efficient way for (1) determining the actual range of the image, nor (2) approach for passing this info to another fragment shader, which could then perform that grey level normalization.
Seems like you could generate progressively smaller textures in your fragment shader and in each texture write out min and max. So for example if you have a 16x16 texture then for every 2x2 pixels write out 1 pixels that represent the max.
vec4 c00 = texture2D(sampler, uv);
vec4 c10 = texture2D(sampler, uv + vec2(onePixelRight, 0));
vec4 c01 = texture2D(sampler, uv + vec2(0, onePixelUp));
vec4 c11 = texture2D(sampler, uv + vec2(onePixelRight, onePixelUp);
gl_FragColor = max(max(c00, c10), max(c01, c11));
Repeat until you get to 1x1 pixel. Do the same for min. When you're done you'll have 2 1x1 pixel textures. Either read them with readPixels or pass them to another shader as your range.
It might be faster to use larger chunks, instead of 2x2 do 8x8 or 16x16 areas but keep reducing until you get to 1x1 pixels
In pseudo code.
// setup
textures = [];
framebuffers = [];
cellSize = 16
maxDimension = max(width, height)
w = width
h = height
while w > 1 || h > 1
w = max(1, w / cellSize)
h = max(1, h / cellSize)
textures.push(create Texture of size w, h)
framebuffers.push(create framebuffer and attach texture)
}
// computation
bind original image as input texture
foreach(framebuffer)
bind framebuffer
render to framebuffer with max GLSL shader above
bind texture of current framebuffer as input to next iteration
}
Now the last framebuffer as a 1x1 pixel texture with the max value in it.
"use strict";
var cellSize = 2;
// make a texture as our source
var ctx = document.createElement("canvas").getContext("2d");
ctx.fillStyle = "rgb(12, 34, 56)";
ctx.fillRect(20, 30, 1, 1);
ctx.fillStyle = "rgb(254, 243, 232)";
ctx.fillRect(270, 140, 1, 1);
var canvas = document.createElement("canvas");
var m4 = twgl.m4;
var gl = canvas.getContext("webgl");
var fsSrc = document.getElementById("max-fs").text.replace("$(cellSize)s", cellSize);
var programInfo = twgl.createProgramInfo(gl, ["vs", fsSrc]);
var unitQuadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
var framebufferInfo = twgl.createFramebufferInfo(gl);
var srcTex = twgl.createTexture(gl, {
src: ctx.canvas,
min: gl.NEAREST,
mag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
});
var framebuffers = [];
var w = ctx.canvas.width;
var h = ctx.canvas.height;
while (w > 1 || h > 1) {
w = Math.max(1, (w + cellSize - 1) / cellSize | 0);
h = Math.max(1, (h + cellSize - 1) / cellSize | 0);
// creates a framebuffer and creates and attaches an RGBA/UNSIGNED texture
var fb = twgl.createFramebufferInfo(gl, [
{ min: gl.NEAREST, max: gl.NEAREST, wrap: gl.CLAMP_TO_EDGE },
], w, h);
framebuffers.push(fb);
}
var uniforms = {
u_srcResolution: [ctx.canvas.width, ctx.canvas.height],
u_texture: srcTex,
};
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, unitQuadBufferInfo);
var w = ctx.canvas.width;
var h = ctx.canvas.height;
framebuffers.forEach(function(fbi, ndx) {
w = Math.max(1, (w + cellSize - 1) / cellSize | 0);
h = Math.max(1, (h + cellSize - 1) / cellSize | 0);
uniforms.u_dstResolution = [w, h];
twgl.bindFramebufferInfo(gl, fbi);
twgl.setUniforms(programInfo, uniforms);
twgl.drawBufferInfo(gl, unitQuadBufferInfo);
uniforms.u_texture = fbi.attachments[0];
uniforms.u_srcResolution = [w, h];
});
var p = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, p);
log("max: ", p[0], p[1], p[2]);
function log() {
var elem = document.createElement("pre");
elem.appendChild(document.createTextNode(Array.prototype.join.call(arguments, " ")));
document.body.appendChild(elem);
}
<script id="vs" type="not-js">
attribute vec4 position;
void main() {
gl_Position = position;
}
</script>
<script id="max-fs" type="not-js">
precision mediump float;
#define CELL_SIZE $(cellSize)s
uniform sampler2D u_texture;
uniform vec2 u_srcResolution;
uniform vec2 u_dstResolution;
void main() {
// compute the first pixel the source cell
vec2 srcPixel = floor(gl_FragCoord.xy) * float(CELL_SIZE);
// one pixel in source
vec2 onePixel = vec2(1) / u_srcResolution;
// uv for first pixel in cell. +0.5 for center of pixel
vec2 uv = (srcPixel + 0.5) * onePixel;
vec4 maxColor = vec4(0);
for (int y = 0; y < CELL_SIZE; ++y) {
for (int x = 0; x < CELL_SIZE; ++x) {
maxColor = max(maxColor, texture2D(u_texture, uv + vec2(x, y) * onePixel));
}
}
gl_FragColor = maxColor;
}
</script>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
Also if you have WEBGL_draw_buffers support you do both min and max at the same time writing to 2 different framebuffer attachments
"use strict";
var cellSize = 2;
// make a texture as our source
var ctx = document.createElement("canvas").getContext("2d");
ctx.fillStyle = "rgb(128, 128, 128)";
ctx.fillRect(0, 0, ctx.canvas.width, ctx.canvas.height);
ctx.fillStyle = "rgb(12, 34, 56)";
ctx.fillRect(20, 30, 1, 1);
ctx.fillStyle = "rgb(254, 243, 232)";
ctx.fillRect(270, 140, 1, 1);
var canvas = document.createElement("canvas");
var m4 = twgl.m4;
var gl = canvas.getContext("webgl");
var ext = gl.getExtension("WEBGL_draw_buffers");
if (!ext) {
alert("sample requires WEBGL_draw_buffers");
}
var fsSrc = document.querySelector("#minmax-fs").text.replace("$(cellSize)s", cellSize);
var programInfo = twgl.createProgramInfo(gl, ["vs", fsSrc]);
var unitQuadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
var srcTex = twgl.createTexture(gl, {
src: ctx.canvas,
min: gl.NEAREST,
mag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
});
var framebuffers = [];
var w = ctx.canvas.width;
var h = ctx.canvas.height;
while (w > 1 || h > 1) {
w = Math.max(1, (w + cellSize - 1) / cellSize | 0);
h = Math.max(1, (h + cellSize - 1) / cellSize | 0);
// creates a framebuffer and creates and attaches 2 RGBA/UNSIGNED textures
var fbi = twgl.createFramebufferInfo(gl, [
{ min: gl.NEAREST, mag: gl.NEAREST, wrap: gl.CLAMP_TO_EDGE, },
{ min: gl.NEAREST, mag: gl.NEAREST, wrap: gl.CLAMP_TO_EDGE, },
], w, h);
ext.drawBuffersWEBGL([ext.COLOR_ATTACHMENT0_WEBGL, ext.COLOR_ATTACHMENT1_WEBGL]);
framebuffers.push(fbi);
}
// need separate FBs to read the output
var lastFBI = framebuffers[framebuffers.length - 1];
var minFBI = twgl.createFramebufferInfo(gl, [
{ attachment: lastFBI.attachments[0] }
], 1, 1);
var maxFBI = twgl.createFramebufferInfo(gl, [
{ attachment: lastFBI.attachments[1] }
], 1, 1);
var uniforms = {
u_srcResolution: [ctx.canvas.width, ctx.canvas.height],
u_minTexture: srcTex,
u_maxTexture: srcTex,
};
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, unitQuadBufferInfo);
var w = ctx.canvas.width;
var h = ctx.canvas.height;
framebuffers.forEach(function(fbi, ndx) {
w = Math.max(1, (w + cellSize - 1) / cellSize | 0);
h = Math.max(1, (h + cellSize - 1) / cellSize | 0);
uniforms.u_dstResolution = [w, h];
twgl.bindFramebufferInfo(gl, fbi);
twgl.setUniforms(programInfo, uniforms);
twgl.drawBufferInfo(gl, unitQuadBufferInfo);
uniforms.u_minTexture = fbi.attachments[0];
uniforms.u_maxTexture = fbi.attachments[1];
uniforms.u_srcResolution = [w, h];
});
var p = new Uint8Array(4);
twgl.bindFramebufferInfo(gl, minFBI);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, p);
log("min: ", p[0], p[1], p[2]);
twgl.bindFramebufferInfo(gl, maxFBI);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, p);
log("max: ", p[0], p[1], p[2]);
function log() {
var elem = document.createElement("pre");
elem.appendChild(document.createTextNode(Array.prototype.join.call(arguments, " ")));
document.body.appendChild(elem);
}
<script id="vs" type="not-js">
attribute vec4 position;
void main() {
gl_Position = position;
}
</script>
<script id="minmax-fs" type="not-js">
#extension GL_EXT_draw_buffers : require
precision mediump float;
#define CELL_SIZE $(cellSize)s
uniform sampler2D u_minTexture;
uniform sampler2D u_maxTexture;
uniform vec2 u_srcResolution;
uniform vec2 u_dstResolution;
void main() {
// compute the first pixel the source cell
vec2 srcPixel = floor(gl_FragCoord.xy) * float(CELL_SIZE);
// one pixel in source
vec2 onePixel = vec2(1) / u_srcResolution;
// uv for first pixel in cell. +0.5 for center of pixel
vec2 uv = (srcPixel + 0.5) / u_srcResolution;
vec4 minColor = vec4(1);
vec4 maxColor = vec4(0);
for (int y = 0; y < CELL_SIZE; ++y) {
for (int x = 0; x < CELL_SIZE; ++x) {
vec2 off = uv + vec2(x, y) * onePixel;
minColor = min(minColor, texture2D(u_minTexture, off));
maxColor = max(maxColor, texture2D(u_maxTexture, off));
}
}
gl_FragData[0] = minColor;
gl_FragData[1] = maxColor;
}
</script>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
Now that you have the answer you can pass it to another shader to "contrastify" your texture
If you read out the values then
uniform vec4 u_minColor;
uniform vec4 u_maxColor;
uniform sampler2D u_texture;
...
vec4 color = texture2D(u_texture, uv);
vec4 range = u_maxColor - u_minColor;
gl_FragColor = (color - u_minColor) * range;
If you just want to pass in the textures without reading them out then
uniform sampler2D u_minColor;
uniform sampler2D u_maxColor;
uniform sampler2D u_texture;
...
vec4 minColor = texture2D(u_minColor, vec2(0));
vec4 maxColor = texture2D(u_maxColor, vec2(0));
vec4 color = texture2D(u_texture, uv);
vec4 range = maxColor - minColor;
gl_FragColor = vec4(((color - minColor) / range).rgb, 1);
I don't know if one is better than the other. I'd assume reading from a texture is slower than reading from a uniform but for a shader this small the performance difference might be minimal
"use strict";
var cellSize = 16;
var canvas = document.createElement("canvas");
var m4 = twgl.m4;
var gl = canvas.getContext("webgl");
var ext = gl.getExtension("WEBGL_draw_buffers");
if (!ext) {
alert("sample requires WEBGL_draw_buffers");
}
var fsSrc = document.querySelector("#minmax-fs").text.replace("$(cellSize)s", cellSize);
var programInfo = twgl.createProgramInfo(gl, ["vs", fsSrc]);
var contrastProgramInfo = twgl.createProgramInfo(gl, ["vs", "contrastify-fs"]);
var unitQuadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
var srcTex = twgl.createTexture(gl, {
src: "http://i.imgur.com/rItAVSG.jpg",
crossOrigin: "",
min: gl.NEAREST,
mag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
}, function(err, srcTex, img) {
img.style.width = "300px";
img.style.height = "150px";
log("before");
document.body.appendChild(img);
log("after");
document.body.appendChild(canvas);
var framebuffers = [];
var w = img.width;
var h = img.height;
while (w > 1 || h > 1) {
w = Math.max(1, (w + cellSize - 1) / cellSize | 0);
h = Math.max(1, (h + cellSize - 1) / cellSize | 0);
// creates a framebuffer and creates and attaches 2 RGBA/UNSIGNED textures
var fbi = twgl.createFramebufferInfo(gl, [
{ min: gl.NEAREST, mag: gl.NEAREST, wrap: gl.CLAMP_TO_EDGE, },
{ min: gl.NEAREST, mag: gl.NEAREST, wrap: gl.CLAMP_TO_EDGE, },
], w, h);
ext.drawBuffersWEBGL([ext.COLOR_ATTACHMENT0_WEBGL, ext.COLOR_ATTACHMENT1_WEBGL]);
framebuffers.push(fbi);
}
// need separate FBs to read the output
var lastFBI = framebuffers[framebuffers.length - 1];
var minFBI = twgl.createFramebufferInfo(gl, [
{ attachment: lastFBI.attachments[0] }
], 1, 1);
var maxFBI = twgl.createFramebufferInfo(gl, [
{ attachment: lastFBI.attachments[1] }
], 1, 1);
var uniforms = {
u_srcResolution: [img.width, img.height],
u_minTexture: srcTex,
u_maxTexture: srcTex,
};
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, unitQuadBufferInfo);
var w = img.width;
var h = img.height;
framebuffers.forEach(function(fbi, ndx) {
w = Math.max(1, (w + cellSize - 1) / cellSize | 0);
h = Math.max(1, (h + cellSize - 1) / cellSize | 0);
uniforms.u_dstResolution = [w, h];
twgl.bindFramebufferInfo(gl, fbi);
twgl.setUniforms(programInfo, uniforms);
twgl.drawBufferInfo(gl, unitQuadBufferInfo);
uniforms.u_minTexture = fbi.attachments[0];
uniforms.u_maxTexture = fbi.attachments[1];
uniforms.u_srcResolution = [w, h];
});
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(contrastProgramInfo.program);
twgl.setUniforms(contrastProgramInfo, {
u_resolution: [img.width, img.height],
u_texture: srcTex,
u_minColor: fbi.attachments[0],
u_maxColor: fbi.attachments[1],
});
twgl.drawBufferInfo(gl, unitQuadBufferInfo);
});
function log() {
var elem = document.createElement("pre");
elem.appendChild(document.createTextNode(Array.prototype.join.call(arguments, " ")));
document.body.appendChild(elem);
}
img, canvas { margin: 5px; border: 1px solid black; }
<script id="vs" type="not-js">
attribute vec4 position;
void main() {
gl_Position = position;
}
</script>
<script id="minmax-fs" type="not-js">
#extension GL_EXT_draw_buffers : require
precision mediump float;
#define CELL_SIZE $(cellSize)s
uniform sampler2D u_minTexture;
uniform sampler2D u_maxTexture;
uniform vec2 u_srcResolution;
uniform vec2 u_dstResolution;
void main() {
// compute the first pixel the source cell
vec2 srcPixel = floor(gl_FragCoord.xy) * float(CELL_SIZE);
// one pixel in source
vec2 onePixel = vec2(1) / u_srcResolution;
// uv for first pixel in cell. +0.5 for center of pixel
vec2 uv = (srcPixel + 0.5) / u_srcResolution;
vec4 minColor = vec4(1);
vec4 maxColor = vec4(0);
for (int y = 0; y < CELL_SIZE; ++y) {
for (int x = 0; x < CELL_SIZE; ++x) {
vec2 off = uv + vec2(x, y) * onePixel;
minColor = min(minColor, texture2D(u_minTexture, off));
maxColor = max(maxColor, texture2D(u_maxTexture, off));
}
}
gl_FragData[0] = minColor;
gl_FragData[1] = maxColor;
}
</script>
<script id="contrastify-fs" type="not-fs">
precision mediump float;
uniform sampler2D u_minColor;
uniform sampler2D u_maxColor;
uniform sampler2D u_texture;
uniform vec2 u_resolution;
void main() {
vec2 uv = gl_FragCoord.xy / u_resolution;
uv.y = 1.0 - uv.y;
vec4 minColor = texture2D(u_minColor, vec2(0));
vec4 maxColor = texture2D(u_maxColor, vec2(0));
vec4 color = texture2D(u_texture, uv);
vec4 range = maxColor - minColor;
gl_FragColor = vec4(((color - minColor) / range).rgb, 1);
}
</script>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
As for monochrome just change the src textures to gl.LUMINANCE

Resources