WebGL: INVALID_VALUE: texImage2D: no canvas in three.js - three.js

I followed a procedural planet example from https://github.com/brunosimon/lab/blob/master/experiments/procedural-planet/index.html, the original three.js version is old.
The general idea of the code is simple, firstly generate a facted sphere from six box geometries, and then generate six textures from the gradient canvas, at last generate the material from these generated textures.
the main code about generating material
function generate_material()
{
var maps = generate_maps();
//console.log(maps.textures);
var materials = [];
// Each face
for( var i = 0; i < 6; i++ )
materials.push( get_shader_material( maps.textures[ i ], maps.bumps[ i ] ) );
//materials.push( new THREE.MeshBasicMaterial({map: makeCanvasTexture()}) );
return materials;
}
function generate_maps ()
{
var textures = [],
bumps = [],
materials = [],
resolution = 1024;
// Each face
for(var i = 0; i < 6; i++)
{
var texture = new THREE.WebGLRenderTarget( resolution, resolution, { minFilter : THREE.LinearFilter, magFilter : THREE.LinearFilter, format : THREE.RGBFormat } ),
texture_camera = new THREE.OrthographicCamera( - resolution / 2, resolution / 2, resolution / 2, - resolution / 2, -100, 100 ),
texture_scene = new THREE.Scene(),
geometry = new THREE.PlaneBufferGeometry( resolution, resolution ),
material = new get_texture_shader_material( i, seed, options.clouds.distortion ),
plane = new THREE.Mesh( geometry,material);
materials.push( material );
texture_camera.position.z = 10;
plane.position.z = - 10;
texture_scene.add(plane);
renderer.render( texture_scene, texture_camera );
/**/
// Retrieve buffer
var buffer = new Uint8Array( resolution * resolution * 4 ),
gl = renderer.getContext();
gl.readPixels( 0, 0, resolution, resolution, gl.RGBA, gl.UNSIGNED_BYTE, buffer );
textures.push( texture );
bumps.push( {
image :
{
data : buffer,
height : resolution,
width : resolution
}
} );
}
// Return
return {
textures : textures,
bumps : bumps,
smaterials : materials,
};
}
function get_shader_material ( texture_map, bump_map )
{ //
var vertexShader = document.getElementById( 'planet-vertex-shader' ).textContent,
fragmentShader = document.getElementById( 'planet-fragment-shader' ).textContent,
uniforms = generate_uniforms( texture_map );
if(textures.water){
}else{
console.log('no water')
}
uniforms.tWaterGradient =
{
type : 't',
value : textures.water.texture
};
uniforms.tGroundGradient =
{
type : 't',
value : textures.ground.texture
};
uniforms.tIceGradient =
{
type : 't',
value : textures.ice.texture
};
return new THREE.ShaderMaterial( {
uniforms : uniforms,
vertexShader : vertexShader,
fragmentShader : fragmentShader,
transparent : true
} );
}
function get_texture_shader_material ( index, seed, clouds_distortion )
{
var vertexShader = document.getElementById( 'planet-texture-vertex-shader' ).textContent,
fragmentShader = document.getElementById( 'planet-texture-fragment-shader' ).textContent,
uniforms =
{
index :
{
type : 'i',
value : index
},
fCloudsDistortion :
{
type : 'f',
value : clouds_distortion
},
v3Seed :
{
type : 'v3',
value : seed
}
};
return new THREE.ShaderMaterial( {
uniforms : uniforms,
vertexShader : vertexShader,
fragmentShader : fragmentShader,
transparent : true,
depthWrite : false
} );
}
I reused most of the code however got some problem with the material part, the code example I made is seen here - https://jsfiddle.net/ufjqcht6/86/
three.module.js?3179:19902 WebGL: INVALID_VALUE: texImage2D: no canvas
texImage2D # three.module.js?3179:19902
GL ERROR :GL_INVALID_OPERATION : glGenerateMipmap: Can not generate mips ERROR GL_INVALID_OPERATION : glGenerateMipmap: Can not generate mips
RENDER WARNING: texture bound to texture unit 1 is not renderable. It might be non-power-of-2 or have incompatible texture filtering (maybe)?
RENDER WARNING: texture bound to texture unit 2 is not renderable. It might be non-power-of-2 or have incompatible texture filtering (maybe)?
the code example can be seen here, where the issues can be only seen from the chrome 's developer 's console.
I debugged the code but I can't identify whether the problem coming from the shader's code or the js's code ? and how to solve it

I debugged the code but I can't identify whether the problem coming from the shader's code or the js's code ?
The problem is the way you are using Object.assign() in your code. For the second and third texture, you overwrite the style property so it does not has a definition for width and height anymore. Gradient_Texture.init() will then assign undefined to the canvas's dimensions which results in the respective WebGL error.
Fixing the Object.assign() statements solves the issue: https://jsfiddle.net/L21jz07k/

Related

Alpha blending in three.js

I am trying to volume render the raw data using three.js. The raw data can be of any type. For rendering the data I have written code:
dims is an array of size of data in three axes:
One of the data dims is [256, 256 128] and data is uint8Array(8388608).
var texture = new THREE.DataTexture3D(data, dims[0], dims[1], dims[2]);
var texture = new THREE.DataTexture3D(data, dims[0], dims[1], dims[2]);
texture.format = eval(format[1]);
texture.type = eval(format[0]);
texture.minFilter = texture.magFilter = THREE.LinearFilter;
texture.unpackAlignment = 1;
material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
side: THREE.BackSide,
transparent: true,
blending: THREE.CustomBlending,
blendEquation: THREE.AddEquation,
blendSrc: THREE.SrcAlphaFactor,
blendDst: THREE.OneMinusSrcAlphaFactor
});
This format array is from a function that checks the format of the data.
function findFormat(dtype) {
console.log(dtype);
let regex = /(u*)(int|float)(\d+)/ig;
let format = regex.exec(dtype);
if (format[3] == 8) {
dtype = ["THREE.UnsignedByteType", "THREE.RedFormat"];
}
if (format[3] == 16) {
dtype = ["THREE.UnsignedShort4444Type", "THREE.RGBAFormat"];
}
if (format[3] == 32) {
dtype = ["THREE.UnsignedByteType", "THREE.RGBAFormat"];
}
return (dtype);
}
I am just checking the data type if it is 8/16/32 bits.
This is the output I am getting :
This is iso type:
This is mip type output :
What I believe is that opacity is not active in these so all of the rendered layer or object is looking as an opaque object. This is a required output or looks like these :
I don't understand how to perform alpha blending in this.

Colors in THREE.WebGLRenderTarget with alpha channel are darker than expected

I'm trying to render some graphics with transparency into a WebGLRenderTarget. The rendered image is then used as texture for a second material.
I have an issue with alpha blending. The color that I obtain when alpha=0.5 is darker than expected.
The image below shows the issue:
Circle on top is what I expect. This is obtained with an HTML DIV with rounder corners and opacity=0.5
Circle on bottom is what I obtain with with a shader that renders the circle inside a texture.
I think that I'm missing something!
Part of the code is reported below. You can find the complete code in the following jsbin: https://jsbin.com/zukoyaziqe/1/edit?html,js,output
Thank you for your help!!
Shader:
const texFrag = `
varying vec2 vUv;
void main() {
vec2 center = vec2(0.5, 0.2);
float d = length(vUv - center);
if (d < 0.1) {
gl_FragColor = vec4(1.0,0.0,1.0,0.5);
}
else {
discard;
}
}
`;
Texture:
const makeTexture = (renderer, width, height) => {
const target = new THREE.WebGLRenderTarget(width, height, {minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBAFormat, type: THREE.FloatType});
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(90, 1, 0.1, 100000);
const geometry = new THREE.PlaneGeometry(2, 2);
const material = new THREE.ShaderMaterial({
transparent : true,
vertexShader : simpleVert,
fragmentShader : texFrag,
});
const mesh = new THREE.Mesh(geometry, material);
camera.position.set(0, 0, 1);
scene.add(camera);
scene.add(mesh);
renderer.render(scene, camera, target, true);
return target.texture;
}
Main view:
const renderer = new THREE.WebGLRenderer({canvas});
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(90, 1, 0.1, 100000);
const geometry = new THREE.PlaneGeometry( 2, 2 );
const material = new THREE.MeshBasicMaterial({
transparent : true,
map : makeTexture(renderer, canvas.width, canvas.height)
});
const mesh = new THREE.Mesh(geometry, material);
First of all, in the example you linked, your main function is called twice, so there are two CSS circles stacked on top of each other, resulting in a less transparent circle.
Then, you're drawing a circle with color (1,0,1,0.5) on a blank render target, which, using the default blend mode (SRC_ALPHA, ONE_MINUS_SRC_ALPHA), results in (0.5,0,0.5,0.5) color, which is then used as a texture. If you want the original color in your texture, you should disable alpha blending or use a different blend mode. Simply setting transparent to false inside makeTexture does the trick.

Animating a THREE.Points object

I'm trying to load a model from Blender, apply a PointsMaterial to it and animate it. So far, I've been able to load the model and animate it successfully as long as I use a material other than THREE.PointsMaterial to create the mesh.
When I create a THREE.Points object, the animation doesn't play. I noticed that when I set the PointsMaterial's morphTargets property to true, there's a warning saying that there is no such property of PointsMaterial. Does Threejs not support animation of Points objects using morph targets?
monster refers to a mesh that works when animated. It uses the loaded geometry and material. ParticleSystem is the the THREE.Points object.
Code:
function loadObject(path){
var loader = new THREE.JSONLoader();
loader.load(
path,
function(geometry, materials){
var material = materials[ 0 ];
material.morphTargets = true;
material.color.setHex( 0xffaaaa );
monster = new THREE.Mesh( geometry, materials );
monster.position.set( 0, 0, 0 );
var s = .003;
monster.scale.set( s, s, s );
var particleMaterial = new THREE.PointsMaterial({
color: 0xffffff,
size: .005,
morphTargets: true
});
particleSystem = new THREE.Points(geometry, particleMaterial);
particleSystem.morphTargetInfluences = monster.morphTargetInfluences;
particleSystem.morphTargetDictionary = monster.morphTargetDictionary;
particleSystem.position.set(0, 0, 0);
particleSystem.scale.set(s, s, s);
particleSystem.matrixAutoUpdate = false;
particleSystem.updateMatrix();
particleSystem.geometry.verticesNeedUpdate = true;
scene.add(particleSystem);
mixer.clipAction( geometry.animations[ 0 ], particleSystem )
.setDuration( 5 ) // one second
.startAt( - Math.random() ) // random phase (already running)
.play(); // let's go
}
)
}

Three.js Object3d child lookAt camera position

I am struggling with an Object3D which child meshes should look at the camera position.
It works fine, if the camera is "far" away, but not if the camera moves towards the object.
Than, if the camera position is near to the object position, the second added plane rotates, until the camera look at the edge of the plan.
And I have know idea why this behavior appears just on the second added plane and just if the camera is near the object position.
Here is what i have so far.
Create the Object:
var obj = new THREE.Object3D();
obj.position.set( x, y, z );
var Uniforms = {
texturePrimary: { type: "t", value: Texture },
textureColorGraph: { type: "t", value: ColorGraph },
time: { type: "f", value: 0 },
color: { type: "f", value: 0 }
};
obj.Uniforms = Uniforms;
obj.add( makeplane1( 3.2, Uniforms ) );
obj.add( makeplane2( 25, Uniforms ) );
obj.update = function( pos ){
this.Uniforms.time.value = shaderTiming;
$.each(this.children, function(i,mesh){
if( mesh.name === "plane1" || mesh.name === "plane2"){
var vec = mesh.parent.worldToLocal( pos );
mesh.lookAt( vec );
}
});
};
function makePlane1( radius, uniforms ){
var Geo = new THREE.PlaneGeometry( radius, radius );
var Material = new THREE.ShaderMaterial(
{
uniforms: uniforms,
vertexShader: shaders[1].vertex,
fragmentShader: shaders[1].fragment,
blending: THREE.AdditiveBlending,
transparent: true
};
var plane = new THREE.Mesh( Geo, Material );
plane.name = "plane1";
return plane;
);
function makePlane2( radius, uniforms ){
var Geo = new THREE.PlaneGeometry( radius, radius );
var Material = new THREE.ShaderMaterial(
{
uniforms: uniforms,
vertexShader: shaders[2].vertex,
fragmentShader: shaders[2].fragment,
blending: THREE.AdditiveBlending,
transparent: true
};
);
var plane = new THREE.Mesh( Geo, Material );
plane.name = "plane2";
return plane;
}
I could call this.lookAt( pos ) in obj.update( pos ) to rotate the whole object, but other meshes should not rotate that way, so that is sadly no option.
And a simple vertex shader for both planes:
varying vec2 vUv;
void main() {
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
vUv = uv;
}
And then i call in the animationloop:
$.each(scene.children, function(i, obj){
if( obj.update !== undefined ) {
shaderTiming = (time - startTime )/ 1000;
obj.update( camera.getWorldPosition() );
}
});
EDIT: I Just noticed that this behavior just occur, if the object's position is not (0,0,0). If so it works just like it should at any camera position.
Also a simple distance calculation, object to camera, is not working properly.
vec1 = this.position;
vec2 = camera.position;
var dist = Math.sqrt(Math.pow(vec1.x - vec2.x, 2) + Math.pow(vec1.y - vec2.y, 2) + Math.pow(vec1.z - vec2.z, 2));
Thanks for any hints.
Object3D.lookAt() does not support objects with rotated and/or translated parent(s).
three.js r.85

Shader material glow bug when pan,zoom in three.js

I create a virtual earth like this with this code:
function earthView(){
if (!scene){
main(); // create three js basic code(camera, renderer etc.)
}
// create the geometry sphere stars
var geometry = new THREE.SphereBufferGeometry(6371000000, 36, 36)
// create the material, using a texture of startfield
var material = new THREE.MeshBasicMaterial()
material.map = THREE.ImageUtils.loadTexture('images/earthView/ESO_-_Milky_Way.jpg')
material.side = THREE.BackSide
// create the mesh based on geometry and material
var mesh = new THREE.Mesh(geometry, material)
mesh.position.set(0,0,-6371000)
scene.add(mesh)
/*
var geometry = new THREE.SphereGeometry(5000,10,10);
var material = new THREE.MeshBasicMaterial({color:"0xff0000"});
var mesh_test = new THREE.Mesh(geometry,material);
scene.add(mesh_test);*/
//earth
var geometry = new THREE.SphereBufferGeometry(6371000, 36, 36)
var material = new THREE.MeshPhongMaterial()
var earthMesh = new THREE.Mesh(geometry, material)
earthMesh.position.set(0,0,-6371000)
earthMesh.rotation.set(Math.PI/2,Math.PI/2,0)
earthMesh.rotation.y -=22.87*Math.PI/180//rightturn ^
earthMesh.rotation.x +=49.02*Math.PI/180//rightturn ^
helper = new THREE.EdgesHelper( earthMesh );
helper.material.color.set( 0xffffff );
material.map = THREE.ImageUtils.loadTexture('images/earthView/earthmap1k.jpg')
material.bumpMap = THREE.ImageUtils.loadTexture('images/earthView/earthbump1k.jpg')
material.bumpScale = 100
material.specularMap = THREE.ImageUtils.loadTexture('images/earthView/earthspec1k.jpg')
scene.add(earthMesh);
scene.add( helper );
//atmosphere
var geometry = new THREE.SphereBufferGeometry(9371000, 36, 36)
var material = new createAtmosphereMaterial()
material.uniforms.glowColor.value.set(0x00b3ff)
material.uniforms.coeficient.value = 0.02
material.uniforms.power.value = 2.5
material.side = THREE.DoubleSide
var earthAtmo = new THREE.Mesh(geometry, material)
earthAtmo.position.set(0,0,-6371000)
scene.add(earthAtmo);
/**
* from http://stemkoski.blogspot.fr/2013/07/shaders-in-threejs-glow-and- halo.html
* #return {[type]} [description]
*/
function createAtmosphereMaterial(){
var vertexShader = [
'varying vec3 vNormal;',
'void main(){',
' // compute intensity',
' vNormal = normalize( normalMatrix * normal );',
' // set gl_Position',
' gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );',
'}',
].join('\n')
var fragmentShader = [
'uniform float coeficient;',
'uniform float power;',
'uniform vec3 glowColor;',
'varying vec3 vNormal;',
'void main(){',
' float intensity = pow( coeficient - dot(vNormal, vec3(0.0, 0.0, 1.0)), power );',
' gl_FragColor = vec4( glowColor * intensity, 1.0 );',
'}',
].join('\n')
// create custom material from the shader code above
// that is within specially labeled script tags
var material = new THREE.ShaderMaterial({
uniforms: {
coeficient : {
type : "f",
value : 1.0
},
power : {
type : "f",
value : 2
},
glowColor : {
type : "c",
value : new THREE.Color('blue')
},
},
vertexShader : vertexShader,
fragmentShader : fragmentShader,
side : THREE.BackSide,
blending : THREE.AdditiveBlending,
transparent : true,
depthWrite : false,
});
return material
}
}
in previus question, I had problem with the renderer because i create the virtual earth in real scale (1px = 1m). I overcame this error by setting the logarithmicDepthBuffer: true when defining the renderer.
Now the problem is that the atmosphere (glow shader material) has a bug when panning or zooming in the webgl - container which is already been stated and here is an example to solve this problem.
The question is: how can i change my code to overcome this bug?(I suppose there is something to add in the render function but i just can't get it to work).
Hint1: this bug is only happening when setting the logarithmicDepthBuffer: true. Else i get a false rendering because of the large scale object i am using.
Image1:render option logarithmicDepthBuffer: false,no bug, only false rendering.
Image2:render option logarithmicDepthBuffer: true,no bug, if not zoom or pan.
Image3:render option logarithmicDepthBuffer: true, when zoom in the area of the applied shader material seams to became smaller.
Image4:render option logarithmicDepthBuffer: true,when pan the area of the applied shader material seams not to follow or understand the pan.
Hint2: the area that shader material is renderable seams to became bigger when zooming out and smaller when zooming in.
Update: As a see now the problem is taking place when i add the star sphere. If i dont add the star sphere then everything works correct.. Any thoughts why this is happening??

Resources