Dart WebGL not rendering - opengl-es

I'm trying to write something with WebGL and Dart but I'm having problems. This is my source code.
This is my main file:
library warmup;
import 'dart:html';
import 'dart:math';
import 'dart:web_gl' as WebGL;
import 'dart:typed_data';
import 'package:vector_math/vector_math.dart';
part 'graphics.dart';
WebGL.RenderingContext gl;
CanvasElement canvas;
QuadRenderer renderer;
Random random = new Random();
void main() {
querySelector("#paragraf").setInnerHtml("HELLO!");
canvas = querySelector("#game_canvas");
new Game().start();
}
class Game {
Texture test = new Texture("tex/test.png");
void start() {
gl = canvas.getContext3d();
if (gl == null) {
print("No WebGL!");
}
Texture.loadAll();
renderer = new QuadRenderer();
renderer.projMatrix = makeOrthographicMatrix(0, canvas.width, 0, canvas.height, -1, 1);
gl.disable(WebGL.DEPTH_TEST);
gl.enable(WebGL.BLEND);
gl.blendFunc(WebGL.SRC_ALPHA, WebGL.ONE_MINUS_SRC_ALPHA);
gl.clearColor(0.0, 0.0, 0.0, 1.0);
window.requestAnimationFrame(_update);
}
int now, last = new DateTime.now().millisecondsSinceEpoch;
double unprocessedFrames;
void _update(double time) {
now = new DateTime.now().millisecondsSinceEpoch;
unprocessedFrames = (now - last) * 60 / 1000;
while (unprocessedFrames > 1.0) {
tick();
unprocessedFrames--;
}
renderGame();
window.requestAnimationFrame(_update);
}
void tick() {
}
void renderGame() {
gl.viewport(0, 0, canvas.width, canvas.height);
gl.clear(WebGL.COLOR_BUFFER_BIT);
renderer.bindTexture(test);
renderer.renderQuad(new Vector2(canvas.width / 2.0, canvas.height / 2.0), 128, 128, 0, 0, false);
}
}
This is my graphics.dart file:
part of warmup;
class Texture {
static List<Texture> _pending = new List<Texture>();
String url;
int width, height;
WebGL.Texture data;
bool loaded = false;
Texture(this.url) {
if (gl == null) {
_pending.add(this);
}
else {
load();
}
}
static void loadAll() {
_pending.forEach((e) => e.load());
_pending.clear();
}
void load() {
ImageElement img = new ImageElement();
data = gl.createTexture();
img.onLoad.listen((e) {
gl.bindTexture(WebGL.TEXTURE_2D, data);
gl.texImage2DImage(WebGL.TEXTURE_2D, 0, WebGL.RGBA, WebGL.RGBA, WebGL.UNSIGNED_BYTE, img);
gl.texParameteri(WebGL.TEXTURE_2D, WebGL.TEXTURE_MIN_FILTER, WebGL.NEAREST);
gl.texParameteri(WebGL.TEXTURE_2D, WebGL.TEXTURE_MAG_FILTER, WebGL.NEAREST);
width = img.width;
height = img.height;
loaded = true;
});
img.src = url;
}
}
class Vertex {
Vector2 pos;
Vector4 color;
Vector2 texCoord;
static int elementBytes = 8;
static int posElementCount = 2;
static int colorElementCount = 4;
static int textureElementCount = 2;
static int posBytesCount = posElementCount * elementBytes;
static int colorByteCount = colorElementCount * elementBytes;
static int textureByteCount = textureElementCount * elementBytes;
static int posByteOffset = 0;
static int colorByteOffset = posByteOffset + posBytesCount;
static int textureByteOffset = colorByteOffset + colorByteCount;
static int elementCount = posElementCount +
colorElementCount + textureElementCount;
static int stride = posBytesCount + colorByteCount +
textureByteCount;
Vertex() {
pos = new Vector2(0.0, 0.0);
color = new Vector4(1.0, 1.0, 1.0, 1.0);
texCoord = new Vector2(0.0, 0.0);
}
List<double> getElements() {
List<double> result;
result = [pos.x, pos.y, color.r, color.g, color.b, color.a, texCoord.x, texCoord.y];
return result;
}
}
class QuadRenderer {
String _vsSource = """
precision highp float;
attribute vec2 a_pos;
attribute vec2 a_texCoord;
attribute vec4 a_color;
uniform mat4 proj;
uniform mat4 model;
varying vec2 v_texCoord;
varying vec4 v_pos;
varying vec4 v_color;
void main() {
v_pos = proj * model * vec4(a_pos, 0, 1);
v_color = a_color;
gl_Position = proj * model * vec4(a_pos, 0, 1);
}
""", _fsSource = """
precision highp float;
uniform sampler2D texture;
varying vec2 v_texCoord;
varying vec4 v_pos;
varying vec4 v_color;
void main() {
vec4 texColor = texture2D(texture, v_texCoord);
gl_FragColor = vec4(1, 1, 1, 1);
}
""";
WebGL.Shader vs, fs;
WebGL.Program program;
WebGL.Buffer vab, iab;
int posLocation;
Matrix4 projMatrix;
Vertex v0, v1, v2, v3;
WebGL.UniformLocation projLocation, modelLocation;
Texture texture;
void renderQuad(Vector2 pos, num w, num h, num uo, num vo, bool normalize, {Vector4 color, double rotation}) {
if (!texture.loaded) return;
if (color == null) color = new Vector4(1.0, 1.0, 1.0, 1.0);
if (rotation == null) rotation = 0.0;
Matrix4 model = new Matrix4.identity();
model.translate(pos.x, pos.y);
model.scale(w*1.0, h*1.0, 0.0);
gl.uniformMatrix4fv(modelLocation, false, model.storage);
gl.uniformMatrix4fv(projLocation, false, projMatrix.storage);
if (normalize) {
uo /= texture.width;
vo /= texture.height;
}
v0.color = v1.color = v2.color = v3.color = color;
v0.texCoord = new Vector2(uo + w * 1.0, vo + h * 1.0); v1.texCoord = new Vector2(uo + w * 1.0, vo * 1.0);
v2.texCoord = new Vector2(uo * 1.0, vo * 1.0); v3.texCoord = new Vector2(uo * 1.0, vo + h * 1.0);
_compileVertices();
gl.drawElements(WebGL.TRIANGLES, 6, WebGL.UNSIGNED_SHORT, 0);
}
void bindTexture(Texture tex) {
texture = tex;
gl.bindTexture(WebGL.TEXTURE_2D, texture.data);
}
void _compileVertices() {
List<Vertex> vertices = [v0, v1, v2, v3];
Float32List vertexBuffer = new Float32List(vertices.length * Vertex.elementCount);
for (int i = 0; i < vertices.length; i++) {
Vertex vertex = vertices[i];
vertexBuffer.setAll(i * Vertex.elementCount, vertex.getElements());
}
gl.bindBuffer(WebGL.ARRAY_BUFFER, vab);
gl.bufferData(WebGL.ARRAY_BUFFER, vertexBuffer, WebGL.STATIC_DRAW);
gl.vertexAttribPointer(0, Vertex.posElementCount, WebGL.FLOAT, false, Vertex.stride, Vertex.posByteOffset);
gl.vertexAttribPointer(1, Vertex.colorElementCount, WebGL.FLOAT, false, Vertex.stride, Vertex.colorByteOffset);
gl.vertexAttribPointer(2, Vertex.textureElementCount, WebGL.FLOAT, false, Vertex.stride, Vertex.textureByteOffset);
}
QuadRenderer() {
vs = gl.createShader(WebGL.VERTEX_SHADER);
gl.shaderSource(vs, _vsSource);
gl.compileShader(vs);
if (!gl.getShaderParameter(vs, WebGL.COMPILE_STATUS)) {
throw gl.getShaderInfoLog(vs);
}
fs = gl.createShader(WebGL.FRAGMENT_SHADER);
gl.shaderSource(fs, _fsSource);
gl.compileShader(fs);
if (!gl.getShaderParameter(fs, WebGL.COMPILE_STATUS)) {
throw gl.getShaderInfoLog(fs);
}
program = gl.createProgram();
gl.attachShader(program, vs);
gl.attachShader(program, fs);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, WebGL.LINK_STATUS)) {
throw gl.getProgramInfoLog(program);
}
projLocation = gl.getUniformLocation(program, "proj");
modelLocation = gl.getUniformLocation(program, "model");
gl.useProgram(program);
v0 = new Vertex(); v1 = new Vertex(); v2 = new Vertex(); v3 = new Vertex();
v0.pos = new Vector2(0.5, 0.5); v1.pos = new Vector2(0.5, -0.5);
v2.pos = new Vector2(-0.5, -0.5); v3.pos = new Vector2(-0.5, 0.5);
gl.bindAttribLocation(program, 0, "a_pos");
gl.bindAttribLocation(program, 1, "a_color");
gl.bindAttribLocation(program, 2, "a_texCoord");
List<Vertex> vertices = [v0, v1, v2, v3];
Float32List vertexBuffer = new Float32List(vertices.length * Vertex.elementCount);
for (int i = 0; i < vertices.length; i++) {
Vertex vertex = vertices[i];
vertexBuffer.setAll(i * Vertex.elementCount, vertex.getElements());
}
vab = gl.createBuffer();
gl.bindBuffer(WebGL.ARRAY_BUFFER, vab);
gl.bufferData(WebGL.ARRAY_BUFFER, vertexBuffer, WebGL.STATIC_DRAW);
gl.vertexAttribPointer(0, Vertex.posElementCount, WebGL.FLOAT, false, Vertex.stride, Vertex.posByteOffset);
gl.vertexAttribPointer(1, Vertex.colorElementCount, WebGL.FLOAT, false, Vertex.stride, Vertex.colorByteOffset);
gl.vertexAttribPointer(2, Vertex.textureElementCount, WebGL.FLOAT, false, Vertex.stride, Vertex.textureByteOffset);
Int16List indexBuffer = new Int16List(6);
indexBuffer.setAll(0, [0, 1, 2, 0, 2, 3]);
iab = gl.createBuffer();
gl.bindBuffer(WebGL.ELEMENT_ARRAY_BUFFER, iab);
gl.bufferData(WebGL.ELEMENT_ARRAY_BUFFER, indexBuffer, WebGL.STATIC_DRAW);
}
}
The problem was that the render call renderGame() doesn't work. I know that WebGL work, cause the clear color works fine. I do not know what the problem is, so any suggestion is appreciated.

How about using a rendering library or game engine like Pixi Dart or StageXL instead of plain WebGL?
Those two libraries also abstract the renderer backend (use WebGL whenever possible and use canvas as a fallback).
Hope that helps if you're thinking on developing a game :)

Related

apply LUT to an image GLSL

I am very new to CG and am trying to implement a fragment shader that applies a png LUT to a picture, but I don't get the expected result, right now my code makes the picture very blue-ish.
Here is an example LUT :
[![enter image description here][1]][1]
When I apply the LUT using the following code to some image the whole picture just turns very blue-ish.
Code :
precision mediump float;
uniform sampler2D u_image;
uniform sampler2D u_lut;
// LUT resolution for one component (4, 8, 16, ...)
uniform float u_resolution;
layout(location = 0) out vec4 fragColor;
in vec2 v_uv;
void main(void)
{
vec2 tiles = vec2(u_resolution, u_resolution);
vec2 tilesSize = vec2(u_resolution * u_resolution);
vec3 imageColor = texture(u_image, v_uv).rgb;
// min and max are used to interpolate between 2 tiles in the LUT
float index = imageColor.b * (tiles.x * tiles.y - 1.0);
float index_min = min(u_resolution - 2.0, floor(index));
float index_max = index_min + 1.0;
vec2 tileIndex_min;
tileIndex_min.y = floor(index_min / tiles.x);
tileIndex_min.x = floor(index_min - tileIndex_min.y * tiles.x);
vec2 tileIndex_max;
tileIndex_max.y = floor(index_max / tiles.x);
tileIndex_max.x = floor(index_max - tileIndex_max.y * tiles.x);
vec2 tileUV = mix(0.5/tilesSize, (tilesSize - 0.5)/tilesSize, imageColor.rg);
vec2 tableUV_1 = tileIndex_min / tiles + tileUV / tiles;
vec2 tableUV_2 = tileIndex_max / tiles + tileUV / tiles;
vec3 lookUpColor_1 = texture(u_lut, tableUV_1).rgb;
vec3 lookUpColor_2 = texture(u_lut, tableUV_2).rgb;
vec3 lookUpColor = mix(lookUpColor_1, lookUpColor_2, index - index_min);
fragColor = vec4(lookUpColor, 1.0);
}
Since you're using WebGL2 you can just use a 3D texture
#version 300 es
precision highp float;
in vec2 vUV;
uniform sampler2D uImage;
uniform mediump sampler3D uLUT;
out vec4 outColor;
void main() {
vec4 color = texture(uImage, vUV);
vec3 lutSize = vec3(textureSize(uLUT, 0));
vec3 uvw = (color.rgb * float(lutSize - 1.0) + 0.5) / lutSize;
outColor = texture(uLUT, uvw);
}
And you can use UNPACK_ROW_LENGTH and UNPACK_SKIP_PIXELS to load slice of a PNG into a 3D texture
function createLUTTexture(gl, img, filter, size = 8) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_3D, tex);
gl.texStorage3D(gl.TEXTURE_3D, 1, gl.RGBA8, size, size, size);
// grab slices
for (let z = 0; z < size; ++z) {
gl.pixelStorei(gl.UNPACK_SKIP_PIXELS, z * size);
gl.pixelStorei(gl.UNPACK_ROW_LENGTH, img.width);
gl.texSubImage3D(
gl.TEXTURE_3D,
0, // mip level
0, // x
0, // y
z, // z
size, // width,
size, // height,
1, // depth
gl.RGBA,
gl.UNSIGNED_BYTE,
img,
);
}
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_MIN_FILTER, filter);
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_MAG_FILTER, filter);
return tex;
}
Example:
const fs = `#version 300 es
precision highp float;
in vec2 vUV;
uniform sampler2D uImage;
uniform mediump sampler3D uLUT;
out vec4 outColor;
void main() {
vec4 color = texture(uImage, vUV);
vec3 lutSize = vec3(textureSize(uLUT, 0));
vec3 uvw = (color.rgb * float(lutSize - 1.0) + 0.5) / lutSize;
outColor = texture(uLUT, uvw);
}
`;
const vs = `#version 300 es
in vec4 position;
in vec2 texcoord;
out vec2 vUV;
void main() {
gl_Position = position;
vUV = texcoord;
}
`;
const lutURLs = [
'default.png',
'bgy.png',
'-black-white.png',
'blues.png',
'color-negative.png',
'funky-contrast.png',
'googley.png',
'high-contrast-bw.png',
'hue-minus-60.png',
'hue-plus-60.png',
'hue-plus-180.png',
'infrared.png',
'inverse.png',
'monochrome.png',
'nightvision.png',
'-posterize-3-lab.png',
'-posterize-3-rgb.png',
'-posterize-4-lab.png',
'-posterize-more.png',
'-posterize.png',
'radioactive.png',
'red-to-cyan.png',
'saturated.png',
'sepia.png',
'thermal.png',
];
let luts = {};
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
async function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
alert('need WebGL2');
return;
}
const img = await loadImage('https://i.imgur.com/CwQSMv9.jpg');
document.querySelector('#img').append(img);
const imgTexture = twgl.createTexture(gl, {src: img, yFlip: true});
// compile shaders, link program, lookup locatios
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for
// a plane with positions, and texcoords
const bufferInfo = twgl.primitives.createXYQuadBufferInfo(gl, 2);
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
gl.activeTexture(gl.TEXTURE0 + 1);
for (;;) {
for (let name of lutURLs) {
let lut = luts[name];
if (!lut) {
let url = name;
let filter = gl.LINEAR;
if (url.startsWith('-')) {
filter = gl.NEAREST;
url = url.substr(1);
}
const lutImg = await loadImage(`https://webglsamples.org/color-adjust/adjustments/${url}`);
lut = {
name: url,
texture: createLUTTexture(gl, lutImg, filter),
};
luts[name] = lut;
}
document.querySelector('#info').textContent = lut.name;
// calls gl.uniformXXX, gl.activeTexture, gl.bindTexture
twgl.setUniformsAndBindTextures(programInfo, {
uImg: imgTexture,
uLUT: lut.texture,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
await wait(1000);
}
}
}
main();
function createLUTTexture(gl, img, filter, size = 8) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_3D, tex);
gl.texStorage3D(gl.TEXTURE_3D, 1, gl.RGBA8, size, size, size);
// grab slices
for (let z = 0; z < size; ++z) {
gl.pixelStorei(gl.UNPACK_SKIP_PIXELS, z * size);
gl.pixelStorei(gl.UNPACK_ROW_LENGTH, img.width);
gl.pixelStorei(gl.UNPACK_SKIP_PIXELS, z * size);
gl.pixelStorei(gl.UNPACK_ROW_LENGTH, img.width);
gl.texSubImage3D(
gl.TEXTURE_3D,
0, // mip level
0, // x
0, // y
z, // z
size, // width,
size, // height,
1, // depth
gl.RGBA,
gl.UNSIGNED_BYTE,
img,
);
}
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_MIN_FILTER, filter);
gl.texParameteri(gl.TEXTURE_3D, gl.TEXTURE_MAG_FILTER, filter);
return tex;
}
function loadImage(url) {
return new Promise((resolve, reject) => {
const img = new Image();
img.onerror = reject;
img.onload = () => resolve(img);
img.crossOrigin = "anonymous";
img.src = url;
});
}
.split { display: flex; }
.split>div { padding: 5px; }
img { width: 150px; }
<div class="split">
<div>
<div id="img"></div>
<div>original</div>
</div>
<div>
<canvas width="150" height="198"></canvas>
<div>LUT Applied: <span id="info"></span></div>
</div>
</div>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
As for doing it in 2D there's this which as a video explaining it linked at the top. There's also this if you want to look at a shader that works.

OpenGL ES 2.0 - Fisheye shader displays a grey image

I've been trying to use a fisheye shader from Shadertoy.
I've added my own frame resolution, and changed some keywords (texture -> texture2D, fragColor -> gl_FragColor) but that's it.
I don't really know why it doesn't work and how to debug it..
As a result I get a unicolor grey image.
Here's the code of my fragment shader :
precision mediump float;
uniform vec4 v_Color;
uniform sampler2D u_Texture;
varying vec2 v_TexCoordinate;
#define RESOLUTION_WIDTH 375.0
#define RESOLUTION_HEIGHT 211.0
#define POWER 2.0
void main() {
vec2 fragCoord = v_TexCoordinate;
vec2 iResolution = vec2(RESOLUTION_WIDTH, RESOLUTION_HEIGHT);
vec2 p = fragCoord.xy / iResolution.x; // normalized coords with some cheat
float prop = iResolution.x / iResolution.y;
vec2 m = vec2(0.5, 0.5 / prop); // center coords
vec2 d = p - m; // vector from center to current fragment
float r = sqrt(dot(d, d)); // distance of pixel from center
float power = POWER;
float bind; // radius of 1:1 effect
if (power > 0.0)
bind = sqrt(dot(m, m)); // stick to corners
else {
if (prop < 1.0)
bind = m.x;
else
bind = m.y;
} // stick to borders
// Weird formulas
vec2 uv;
if (power > 0.0) // fisheye
uv = m + normalize(d) * tan(r * power) * bind / tan( bind * power);
else if (power < 0.0) // antifisheye
uv = m + normalize(d) * atan(r * -power * 10.0) * bind / atan(-power * bind * 10.0);
else uv = p; // no effect for power = 1.0
vec3 col = texture2D(u_Texture, vec2(uv.x, -uv.y * prop)).xyz; // Second part of cheat
gl_FragColor = vec4(col, 1.0);
}
Here's my original shader to display an image that works perfectly :
precision mediump float;
uniform vec4 v_Color;
uniform sampler2D u_Texture;
varying vec2 v_TexCoordinate;
void main() {
// premultiplied alpha
vec4 texColor = texture2D(u_Texture, v_TexCoordinate);
// Scale the texture RGB by the vertex color
texColor.rgb *= v_Color.rgb;
// Scale the texture RGBA by the vertex alpha to reinstate premultiplication
gl_FragColor = texColor * v_Color.a;
}
Here's the link to the expected result on ShaderToy :
ShaderToy fisheye
Original result image :
With my shader :
With Rabbid76 solution :
With power = 1.1 :
With solution n2 and power = 10 (bigger image to see better) :
There's some background behind the text, don't pay attention to it ;)
In your shader code fragCoord is assumed to be a window coordinate, were the minimum is (0, 0) and the maximum is the width and height of the viewport. But in your code v_TexCoordinate is assigned to fragCoord. v_TexCoordinate is the texture corodiante in range [0, 1].
Use gl_FragCoord instead of v_TexCoordinate:
// vec2 fragCoord = v_TexCoordinate; <--- delete
vec2 fragCoord = gl_FragCoord.xy;
vec2 p = fragCoord.xy / iResolution.x;
Or skip dividing by the window resolution:
vec2 fragCoord = v_TexCoordinate;
// vec2 p = fragCoord.xy / iResolution.x; <-- delete
vec2 p = fragCoord.xy * vec2(1.0, iResolution.y/iResolution.x);
If the aspect ratio correction is not needed, then it can be even done:
vec2 p = v_TexCoordinate.xy;
See the WebGL example, where I use your original shader code and applied the suggested changes:
(function loadscene() {
var gl, canvas, prog, bufObj = {};
var texture;
function render(deltaMS) {
texture.bound = texture.bound || texture.bind( 0 );
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
ShProg.Use( progDraw );
ShProg.SetF2( progDraw, "resolution", vp_size );
ShProg.SetI1( progDraw, "u_texture", 0 );
VertexBuffer.Draw( bufRect );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "texture-canvas");
gl = canvas.getContext( "experimental-webgl" );
//gl = canvas.getContext( "webgl2" );
if ( !gl )
return;
progDraw = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progDraw.inPos = gl.getAttribLocation( progDraw.progObj, "inPos" );
if ( progDraw.progObj == 0 )
return;
bufRect = VertexBuffer.Create(
[ { data : [ -1, -1, 1, -1, 1, 1, -1, 1 ], attrSize : 2, attrLoc : progDraw.inPos } ],
[ 0, 1, 2, 0, 2, 3 ] );
texture = new Texture( "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/supermario.jpg" );
texture.bound = false;
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
vp_size[0] = vp_size[1] = Math.min(vp_size[0], vp_size[1]);
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
}
var ShProg = {
Create: function (shaderList) {
var shaderObjs = [];
for (var i_sh = 0; i_sh < shaderList.length; ++i_sh) {
var shderObj = this.Compile(shaderList[i_sh].source, shaderList[i_sh].stage);
if (shderObj) shaderObjs.push(shderObj);
}
var prog = {}
prog.progObj = this.Link(shaderObjs)
if (prog.progObj) {
prog.attrInx = {};
var noOfAttributes = gl.getProgramParameter(prog.progObj, gl.ACTIVE_ATTRIBUTES);
for (var i_n = 0; i_n < noOfAttributes; ++i_n) {
var name = gl.getActiveAttrib(prog.progObj, i_n).name;
prog.attrInx[name] = gl.getAttribLocation(prog.progObj, name);
}
prog.uniLoc = {};
var noOfUniforms = gl.getProgramParameter(prog.progObj, gl.ACTIVE_UNIFORMS);
for (var i_n = 0; i_n < noOfUniforms; ++i_n) {
var name = gl.getActiveUniform(prog.progObj, i_n).name;
prog.uniLoc[name] = gl.getUniformLocation(prog.progObj, name);
}
}
return prog;
},
AttrI: function (prog, name) { return prog.attrInx[name]; },
UniformL: function (prog, name) { return prog.uniLoc[name]; },
Use: function (prog) { gl.useProgram(prog.progObj); },
SetI1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1i(prog.uniLoc[name], val); },
SetF1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1f(prog.uniLoc[name], val); },
SetF2: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform2fv(prog.uniLoc[name], arr); },
SetF3: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform3fv(prog.uniLoc[name], arr); },
SetF4: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform4fv(prog.uniLoc[name], arr); },
SetM33: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix3fv(prog.uniLoc[name], false, mat); },
SetM44: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix4fv(prog.uniLoc[name], false, mat); },
Compile: function (source, shaderStage) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader(shaderStage);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
var status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
},
Link: function (shaderObjs) {
var prog = gl.createProgram();
for (var i_sh = 0; i_sh < shaderObjs.length; ++i_sh)
gl.attachShader(prog, shaderObjs[i_sh]);
gl.linkProgram(prog);
status = gl.getProgramParameter(prog, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(prog));
return status ? prog : null;
} };
var VertexBuffer = {
Create: function(attribs, indices, type) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length, primitive_type: type ? type : gl.TRIANGLES };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc, no_of: attribs[i].data.length/attribs[i].attrSize });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ARRAY_BUFFER, null);
if ( buffer.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
}
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
if ( bufObj.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitive_type, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
}
else
gl.drawArrays(bufObj.primitive_type, 0, bufObj.attr[0].no_of );
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
} };
class Texture {
constructor( name, dflt ) {
let texture = this;
this.dflt = dflt || [128,128,128,255]
let image = { "cx": this.dflt.w || 1, "cy": this.dflt.h || 1, "plane": this.dflt.p || this.dflt };
this.size = [image.cx, image.cy];
this.dummyObj = Texture.createTexture2D( image, true )
this.image = new Image(64,64);
this.image.setAttribute('crossorigin', 'anonymous');
this.image.onload = function () {
let cx = 1 << 31 - Math.clz32(texture.image.naturalWidth);
if ( cx < texture.image.naturalWidth ) cx *= 2;
let cy = 1 << 31 - Math.clz32(texture.image.naturalHeight);
if ( cy < texture.image.naturalHeight ) cy *= 2;
var canvas = document.createElement( 'canvas' );
canvas.width = cx;
canvas.height = cy;
var context = canvas.getContext( '2d' );
context.drawImage( texture.image, 0, 0, canvas.width, canvas.height );
texture.textureObj = Texture.createTexture2D( canvas, true );
texture.size = [cx, cy];
}
this.image.src = name;
}
static createTexture2D( image, flipY ) {
let t = gl.createTexture();
gl.activeTexture( gl.TEXTURE0 );
gl.bindTexture( gl.TEXTURE_2D, t );
gl.pixelStorei( gl.UNPACK_FLIP_Y_WEBGL, flipY != undefined && flipY == true );
if ( image.cx && image.cy && image.plane )
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, image.cx, image.cy, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(image.plane) );
else
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT );
gl.bindTexture( gl.TEXTURE_2D, null );
return t;
}
bind( texUnit = 0 ) {
gl.activeTexture( gl.TEXTURE0 + texUnit );
if ( this.textureObj ) {
gl.bindTexture( gl.TEXTURE_2D, this.textureObj );
return true;
}
gl.bindTexture( gl.TEXTURE_2D, this.dummyObj );
return false;
}
};
initScene();
})();
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
void main()
{
gl_Position = vec4( inPos.xy, 0.0, 1.0 );
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
uniform vec2 resolution;
uniform sampler2D u_Texture;
#define RESOLUTION_WIDTH 375.0
#define RESOLUTION_HEIGHT 211.0
#define POWER 2.0
void main( void )
{
vec2 fragCoord = gl_FragCoord.xy;
vec2 iResolution = resolution;
//vec2 fragCoord = v_TexCoordinate;
//vec2 iResolution = vec2(RESOLUTION_WIDTH, RESOLUTION_HEIGHT);
vec2 p = fragCoord.xy / iResolution.x; // normalized coords with some cheat
float prop = iResolution.x / iResolution.y;
vec2 m = vec2(0.5, 0.5 / prop); // center coords
vec2 d = p - m; // vector from center to current fragment
float r = sqrt(dot(d, d)); // distance of pixel from center
float power = POWER;
float bind; // radius of 1:1 effect
if (power > 0.0)
bind = sqrt(dot(m, m)); // stick to corners
else {
if (prop < 1.0)
bind = m.x;
else
bind = m.y;
} // stick to borders
// Weird formulas
vec2 uv;
if (power > 0.0) // fisheye
uv = m + normalize(d) * tan(r * power) * bind / tan( bind * power);
else if (power < 0.0) // antifisheye
uv = m + normalize(d) * atan(r * -power * 10.0) * bind / atan(-power * bind * 10.0);
else uv = p; // no effect for power = 1.0
vec3 col = texture2D(u_Texture, vec2(uv.x, -uv.y * prop)).xyz; // Second part of cheat
gl_FragColor = vec4(col, 1.0);
}
</script>
<body>
<canvas id="texture-canvas" style="border: none"></canvas>
</body>

Canvas/WebGL 2D tilemap grid artifacts

I am creating a simple 2D web game that works with your typical tile map and sprites.
The twist is that I want smooth camera controls, both translation and scaling (zooming).
I tried using both the Canvas 2D API, and WebGL, and in both I simply cannot avoid the bleeding grid line artifacts, while also supporting zooming properly.
If it matters, all of my tiles are of size 1, and scaled to whatever size is needed, all of their coordinates are integers, and I am using a texture atlas.
Here's an example picture using my WebGL code, where the thin red/white lines are not wanted.
I remember writing sprite tile maps years ago with desktop GL, ironically using similar code (more or less equivalent to what I could do with WebGL 2), and it never had any of these issues.
I am considering to try DOM based elements next, but I fear it will not feel or look smooth.
One solution is to draw the tiles in the fragment shader
So you have your map, say a Uint32Array. Break it down into units of 4 bytes each. First 2 bytes are the tile ID, last byte is flags
As you walk across the quad for each pixel you lookup in the tilemap texture which tile it is, then you use that to compute UV coordinates to get pixels from that tile out of the texture of tiles. If your texture of tiles has gl.NEAREST sampling set then you'll never get any bleeding
Note that unlike traditional tilemaps the ids of each tile is the X,Y coordinate of the tile in the tile texture. In other words if your tile texture has 16x8 tiles across and you want your map to show the tile 7 over and 4 down then the id of that tile is 7,4 (first byte 7, second byte 4) where as in a traditional CPU based system the tile id would probably be 4*16+7 or 71 (the 71st tile). You could add code to the shader to do more traditional indexing but since the shader has to convert the id into 2d texture coords it just seemed easier to use 2d ids.
const vs = `
attribute vec4 position;
//attribute vec4 texcoord; - since position is a unit square just use it for texcoords
uniform mat4 u_matrix;
uniform mat4 u_texMatrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * position;
// v_texcoord = (u_texMatrix * texccord).xy;
v_texcoord = (u_texMatrix * position).xy;
}
`;
const fs = `
precision highp float;
uniform sampler2D u_tilemap;
uniform sampler2D u_tiles;
uniform vec2 u_tilemapSize;
uniform vec2 u_tilesetSize;
varying vec2 v_texcoord;
void main() {
vec2 tilemapCoord = floor(v_texcoord);
vec2 texcoord = fract(v_texcoord);
vec2 tileFoo = fract((tilemapCoord + vec2(0.5, 0.5)) / u_tilemapSize);
vec4 tile = floor(texture2D(u_tilemap, tileFoo) * 256.0);
float flags = tile.w;
float xflip = step(128.0, flags);
flags = flags - xflip * 128.0;
float yflip = step(64.0, flags);
flags = flags - yflip * 64.0;
float xySwap = step(32.0, flags);
if (xflip > 0.0) {
texcoord = vec2(1.0 - texcoord.x, texcoord.y);
}
if (yflip > 0.0) {
texcoord = vec2(texcoord.x, 1.0 - texcoord.y);
}
if (xySwap > 0.0) {
texcoord = texcoord.yx;
}
vec2 tileCoord = (tile.xy + texcoord) / u_tilesetSize;
vec4 color = texture2D(u_tiles, tileCoord);
if (color.a <= 0.1) {
discard;
}
gl_FragColor = color;
}
`;
const tileWidth = 32;
const tileHeight = 32;
const tilesAcross = 8;
const tilesDown = 4;
const m4 = twgl.m4;
const gl = document.querySelector('#c').getContext('webgl');
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// gl.createBuffer, bindBuffer, bufferData
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
position: {
numComponents: 2,
data: [
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1,
],
},
});
function r(min, max) {
if (max === undefined) {
max = min;
min = 0;
}
return min + (max - min) * Math.random();
}
// make some tiles
const ctx = document.createElement('canvas').getContext('2d');
ctx.canvas.width = tileWidth * tilesAcross;
ctx.canvas.height = tileHeight * tilesDown;
ctx.font = "bold 24px sans-serif";
ctx.textAlign = "center";
ctx.textBaseline = "middle";
const f = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ~';
for (let y = 0; y < tilesDown; ++y) {
for (let x = 0; x < tilesAcross; ++x) {
const color = `hsl(${r(360) | 0},${r(50,100)}%,50%)`;
ctx.fillStyle = color;
const tx = x * tileWidth;
const ty = y * tileHeight;
ctx.fillRect(tx, ty, tileWidth, tileHeight);
ctx.fillStyle = "#FFF";
ctx.fillText(f.substr(y * 8 + x, 1), tx + tileWidth * .5, ty + tileHeight * .5);
}
}
document.body.appendChild(ctx.canvas);
const tileTexture = twgl.createTexture(gl, {
src: ctx.canvas,
minMag: gl.NEAREST,
});
// make a tilemap
const mapWidth = 400;
const mapHeight = 300;
const tilemap = new Uint32Array(mapWidth * mapHeight);
const tilemapU8 = new Uint8Array(tilemap.buffer);
const totalTiles = tilesAcross * tilesDown;
for (let i = 0; i < tilemap.length; ++i) {
const off = i * 4;
// mostly tile 9
const tileId = r(10) < 1
? (r(totalTiles) | 0)
: 9;
tilemapU8[off + 0] = tileId % tilesAcross;
tilemapU8[off + 1] = tileId / tilesAcross | 0;
const xFlip = r(2) | 0;
const yFlip = r(2) | 0;
const xySwap = r(2) | 0;
tilemapU8[off + 3] =
(xFlip ? 128 : 0) |
(yFlip ? 64 : 0) |
(xySwap ? 32 : 0) ;
}
const mapTexture = twgl.createTexture(gl, {
src: tilemapU8,
width: mapWidth,
minMag: gl.NEAREST,
});
function ease(t) {
return Math.cos(t) * .5 + .5;
}
function lerp(a, b, t) {
return a + (b - a) * t;
}
function easeLerp(a, b, t) {
return lerp(a, b, ease(t));
}
function render(time) {
time *= 0.001; // convert to seconds;
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0, 1, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
const mat = m4.ortho(0, gl.canvas.width, gl.canvas.height, 0, -1, 1);
m4.scale(mat, [gl.canvas.width, gl.canvas.height, 1], mat);
const scaleX = easeLerp(.5, 2, time * 1.1);
const scaleY = easeLerp(.5, 2, time * 1.1);
const dispScaleX = 1;
const dispScaleY = 1;
// origin of scale/rotation
const originX = gl.canvas.width * .5;
const originY = gl.canvas.height * .5;
// scroll position in pixels
const scrollX = time % (mapWidth * tileWidth );
const scrollY = time % (mapHeight * tileHeight);
const rotation = time;
const tmat = m4.identity();
m4.translate(tmat, [scrollX, scrollY, 0], tmat);
m4.rotateZ(tmat, rotation, tmat);
m4.scale(tmat, [
gl.canvas.width / tileWidth / scaleX * (dispScaleX),
gl.canvas.height / tileHeight / scaleY * (dispScaleY),
1,
], tmat);
m4.translate(tmat, [
-originX / gl.canvas.width,
-originY / gl.canvas.height,
0,
], tmat);
twgl.setUniforms(programInfo, {
u_matrix: mat,
u_texMatrix: tmat,
u_tilemap: mapTexture,
u_tiles: tileTexture,
u_tilemapSize: [mapWidth, mapHeight],
u_tilesetSize: [tilesAcross, tilesDown],
});
gl.drawArrays(gl.TRIANGLES, 0, 6);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
canvas { border: 1px solid black; }
<canvas id="c"></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

How to reduce three.js CPU/GPU usage in browser

At the moment I have an animated globe which rotates and the the dots on the globe randomly change colour. It works fine but if left in the background it slows down my laptop a lot. Are there any changes I could make that would reduce how much memory it is using?
In the task manager on chrome I can see it's using 12% CPU and 128MB of GPU memory when the tab is active. Is that normal for three.js or does the code need to be changed?
ngAfterViewInit() {
if(this.enabled) {
this.controls = new OrbitControls(this.camera, this.renderer.domElement);
this.controls.rotateSpeed = 0.5;
this.controls.enableDamping = true;
this.controls.dampingFactor = 0.5;
this.controls.rotationSpeed = 0.3;
this.controls.enableZoom = false;
this.controls.autoRotate = true;
this.controls.autoRotateSpeed = -1;
this.renderer.setSize(window.innerWidth, window.innerHeight);
this.rendererContainer.nativeElement.appendChild(this.renderer.domElement);
this.animate();
const timerId = setInterval(() => this.updateColor(), 650);
}
}
private get enabled(): boolean {
if(this._enabled!==undefined) {
return this._enabled;
}
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl") || canvas.getContext("experimental-webgl");
this._enabled = gl && gl instanceof WebGLRenderingContext;
return this._enabled;
}
private initGlobe(): void {
this.scene = new THREE.Scene();
this.camera = new THREE.PerspectiveCamera(50, window.innerWidth / window.innerHeight, 1, 1000);
this.camera.position.set(0, 5, 15);
this.camera.lookAt(this.scene.position);
this.renderer = new THREE.WebGLRenderer({
antialias: true
});
this.renderer.setClearColor('rgb(55, 44, 80)');
this.geom = new THREE.SphereBufferGeometry(6, 350, 90);
this.colors = [];
this.color = new THREE.Color();
this.colorList = ['rgb(123, 120, 194)'];
for (let i = 0; i < this.geom.attributes.position.count; i++) {
this.color.set(this.colorList[THREE.Math.randInt(0, this.colorList.length - 1)]);
this.color.toArray(this.colors, i * 3);
}
this.geom.addAttribute('color', new THREE.BufferAttribute(new Float32Array(this.colors), 3));
this.geom.addAttribute('colorRestore', new THREE.BufferAttribute(new Float32Array(this.colors), 3));
this.loader = new THREE.TextureLoader();
this.loader.setCrossOrigin('');
this.texture = this.loader.load('/assets/globe-dot.jpg');
this.texture.wrapS = THREE.RepeatWrapping;
this.texture.wrapT = THREE.RepeatWrapping;
this.texture.repeat.set(1, 1);
const oval = this.loader.load('/assets/circle.png');
this.points = new THREE.Points(this.geom, new THREE.ShaderMaterial({
vertexColors: THREE.VertexColors,
uniforms: {
visibility: {
value: this.texture
},
shift: {
value: 0
},
shape: {
value: oval
},
size: {
value: 0.4
},
scale: {
value: 300
}
},
vertexShader: `
uniform float scale;
uniform float size;
varying vec2 vUv;
varying vec3 vColor;
void main() {
vUv = uv;
vColor = color;
vec4 mvPosition = modelViewMatrix * vec4( position, 0.99 );
gl_PointSize = size * ( scale / length( mvPosition.xyz )) * (0.3 + sin(uv.y * 3.1415926) * 0.35 );
gl_Position = projectionMatrix * mvPosition;
}
// `,
fragmentShader: `
uniform sampler2D visibility;
uniform float shift;
uniform sampler2D shape;
varying vec2 vUv;
varying vec3 vColor;
void main() {
vec2 uv = vUv;
uv.x += shift;
vec4 v = texture2D(visibility, uv);
if (length(v.rgb) > 1.0) discard;
gl_FragColor = vec4( vColor, 0.9 );
vec4 shapeData = texture2D( shape, gl_PointCoord );
if (shapeData.a < 0.0625) discard;
gl_FragColor = gl_FragColor * shapeData;
}
`,
transparent: false
}));
this.points.sizeAttenuation = false;
this.scene.add(this.points);
this.globe = new THREE.Mesh(this.geom, new THREE.MeshBasicMaterial({
color: 'rgb(65, 54, 88)', transparent: true, opacity: 0.5
}));
this.globe.scale.setScalar(0.99);
this.points.add(this.globe);
this.scene.add(this.globe);
}
animate() {
this.controls.update();
this.renderer.render(this.scene, this.camera);
this.animationQueue.push(this.animate);
window.requestAnimationFrame(_ => this.nextAnimation());
}
nextAnimation() {
try {
const animation = this.animationQueue.shift();
if (animation instanceof Function) {
animation.bind(this)();
}
} catch (e) {
console.error(e);
}
}
updateColor() {
for (let i = 0; i < this.usedIndices.length; i++) {
let idx = this.usedIndices[i];
this.geom.attributes.color.copyAt(idx, this.geom.attributes.colorRestore, idx);
}
for (let i = 0; i < this.pointsUsed; i++) {
let idx = THREE.Math.randInt(0, this.geom.attributes.color.count - 1);
if (idx%5 == 0 && idx%1 == 0) {
this.geom.attributes.color.setXYZ(idx, 0.9, 0.3, 0);
}
else {
this.geom.attributes.color.setXYZ(idx, 1, 1, 1);
}
this.usedIndices[i] = idx;
}
this.geom.attributes.color.needsUpdate = true;
I looked at other questions which suggest merging the meshes but I'm not sure that would work here. Thanks!
It depends on what you mean by "background"
If by "background" you mean "not the front tab" then, if you're using requestAnimationFrame (which you are) then if your page is not the front tab of the browser or if you minimize the browser window the browser will stop sending you animation frame events and your page should stop completely.
If by "background" you mean the front tab but of a window that's not minimized and is also not the front window then you can use the blur and focus events to stop the page completely.
Example: NOTE: blur events don't seem to work in an iframe so it won't work in the snippet below but if you copy it to a file it should work
let requestId;
function start() {
if (!requestId) {
requestId = requestAnimationFrame(animate);
}
}
function stop() {
console.log('stop');
if (requestId) {
cancelAnimationFrame(requestId);
requestId = undefined;
}
}
const ctx = document.querySelector("canvas").getContext('2d');
function animate(time) {
requestId = undefined;
ctx.save();
ctx.translate(
150 + 150 * Math.cos(time * 0.001),
75 + 75 * Math.sin(time * 0.003),
);
ctx.scale(
Math.cos(time * 0.005),
Math.cos(time * 0.007),
);
ctx.fillStyle = `hsl(${time % 360},100%,50%)`;
ctx.fillRect(-50, 50, 100, 100);
ctx.restore();
start();
}
start();
window.addEventListener('blur', stop);
window.addEventListener('focus', start);
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<canvas></canvas>
Of course rather than stopping completely on blur you could throttle your app your self. Only render every 5th frame or render less things, etc...

shaderMaterial not work width Points

function createParticles(imgData) {
const geometry = new THREE.BufferGeometry();
var c = 0, x = 0, y = 0, positions = [], colors = [];
var data = imgData.data;
x = -imgData.width * 0.5;
y = imgData.height * 0.5;
for (var i = 0; i < imgData.width; i++) {
for (var j = 0; j < imgData.height; j++) {
positions.push(i - imgData.width * 0.5, j - imgData.height * 0.5, 0);
}
}
geometry.addAttribute('position', new THREE.Float32BufferAttribute(positions, 3));
var material = new THREE.ShaderMaterial({ fragmentShader: document.getElementById('f-shader').textContent });
return new THREE.Points(geometry, material);
}
and this is my shader:
<script type="shader" id="f-shader">
void main(){
gl_FragColor = vec4(1.0, 0.58, 0.86, 1.0);
}
</script>
i add this shadermaterial ,but there is nothing ,and the points will show when no material
You will also need to specify a vertex shader, and the vertex shader will have to have a gl_PointSize call in it. Something like:
void main() {
gl_PointSize = 10.0;
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(position, 1.0);
}

Resources