I am trying to volume render the raw data using three.js. The raw data can be of any type. For rendering the data I have written code:
dims is an array of size of data in three axes:
One of the data dims is [256, 256 128] and data is uint8Array(8388608).
var texture = new THREE.DataTexture3D(data, dims[0], dims[1], dims[2]);
var texture = new THREE.DataTexture3D(data, dims[0], dims[1], dims[2]);
texture.format = eval(format[1]);
texture.type = eval(format[0]);
texture.minFilter = texture.magFilter = THREE.LinearFilter;
texture.unpackAlignment = 1;
material = new THREE.ShaderMaterial({
uniforms: uniforms,
vertexShader: shader.vertexShader,
fragmentShader: shader.fragmentShader,
side: THREE.BackSide,
transparent: true,
blending: THREE.CustomBlending,
blendEquation: THREE.AddEquation,
blendSrc: THREE.SrcAlphaFactor,
blendDst: THREE.OneMinusSrcAlphaFactor
});
This format array is from a function that checks the format of the data.
function findFormat(dtype) {
console.log(dtype);
let regex = /(u*)(int|float)(\d+)/ig;
let format = regex.exec(dtype);
if (format[3] == 8) {
dtype = ["THREE.UnsignedByteType", "THREE.RedFormat"];
}
if (format[3] == 16) {
dtype = ["THREE.UnsignedShort4444Type", "THREE.RGBAFormat"];
}
if (format[3] == 32) {
dtype = ["THREE.UnsignedByteType", "THREE.RGBAFormat"];
}
return (dtype);
}
I am just checking the data type if it is 8/16/32 bits.
This is the output I am getting :
This is iso type:
This is mip type output :
What I believe is that opacity is not active in these so all of the rendered layer or object is looking as an opaque object. This is a required output or looks like these :
I don't understand how to perform alpha blending in this.
Related
I followed a procedural planet example from https://github.com/brunosimon/lab/blob/master/experiments/procedural-planet/index.html, the original three.js version is old.
The general idea of the code is simple, firstly generate a facted sphere from six box geometries, and then generate six textures from the gradient canvas, at last generate the material from these generated textures.
the main code about generating material
function generate_material()
{
var maps = generate_maps();
//console.log(maps.textures);
var materials = [];
// Each face
for( var i = 0; i < 6; i++ )
materials.push( get_shader_material( maps.textures[ i ], maps.bumps[ i ] ) );
//materials.push( new THREE.MeshBasicMaterial({map: makeCanvasTexture()}) );
return materials;
}
function generate_maps ()
{
var textures = [],
bumps = [],
materials = [],
resolution = 1024;
// Each face
for(var i = 0; i < 6; i++)
{
var texture = new THREE.WebGLRenderTarget( resolution, resolution, { minFilter : THREE.LinearFilter, magFilter : THREE.LinearFilter, format : THREE.RGBFormat } ),
texture_camera = new THREE.OrthographicCamera( - resolution / 2, resolution / 2, resolution / 2, - resolution / 2, -100, 100 ),
texture_scene = new THREE.Scene(),
geometry = new THREE.PlaneBufferGeometry( resolution, resolution ),
material = new get_texture_shader_material( i, seed, options.clouds.distortion ),
plane = new THREE.Mesh( geometry,material);
materials.push( material );
texture_camera.position.z = 10;
plane.position.z = - 10;
texture_scene.add(plane);
renderer.render( texture_scene, texture_camera );
/**/
// Retrieve buffer
var buffer = new Uint8Array( resolution * resolution * 4 ),
gl = renderer.getContext();
gl.readPixels( 0, 0, resolution, resolution, gl.RGBA, gl.UNSIGNED_BYTE, buffer );
textures.push( texture );
bumps.push( {
image :
{
data : buffer,
height : resolution,
width : resolution
}
} );
}
// Return
return {
textures : textures,
bumps : bumps,
smaterials : materials,
};
}
function get_shader_material ( texture_map, bump_map )
{ //
var vertexShader = document.getElementById( 'planet-vertex-shader' ).textContent,
fragmentShader = document.getElementById( 'planet-fragment-shader' ).textContent,
uniforms = generate_uniforms( texture_map );
if(textures.water){
}else{
console.log('no water')
}
uniforms.tWaterGradient =
{
type : 't',
value : textures.water.texture
};
uniforms.tGroundGradient =
{
type : 't',
value : textures.ground.texture
};
uniforms.tIceGradient =
{
type : 't',
value : textures.ice.texture
};
return new THREE.ShaderMaterial( {
uniforms : uniforms,
vertexShader : vertexShader,
fragmentShader : fragmentShader,
transparent : true
} );
}
function get_texture_shader_material ( index, seed, clouds_distortion )
{
var vertexShader = document.getElementById( 'planet-texture-vertex-shader' ).textContent,
fragmentShader = document.getElementById( 'planet-texture-fragment-shader' ).textContent,
uniforms =
{
index :
{
type : 'i',
value : index
},
fCloudsDistortion :
{
type : 'f',
value : clouds_distortion
},
v3Seed :
{
type : 'v3',
value : seed
}
};
return new THREE.ShaderMaterial( {
uniforms : uniforms,
vertexShader : vertexShader,
fragmentShader : fragmentShader,
transparent : true,
depthWrite : false
} );
}
I reused most of the code however got some problem with the material part, the code example I made is seen here - https://jsfiddle.net/ufjqcht6/86/
three.module.js?3179:19902 WebGL: INVALID_VALUE: texImage2D: no canvas
texImage2D # three.module.js?3179:19902
GL ERROR :GL_INVALID_OPERATION : glGenerateMipmap: Can not generate mips ERROR GL_INVALID_OPERATION : glGenerateMipmap: Can not generate mips
RENDER WARNING: texture bound to texture unit 1 is not renderable. It might be non-power-of-2 or have incompatible texture filtering (maybe)?
RENDER WARNING: texture bound to texture unit 2 is not renderable. It might be non-power-of-2 or have incompatible texture filtering (maybe)?
the code example can be seen here, where the issues can be only seen from the chrome 's developer 's console.
I debugged the code but I can't identify whether the problem coming from the shader's code or the js's code ? and how to solve it
I debugged the code but I can't identify whether the problem coming from the shader's code or the js's code ?
The problem is the way you are using Object.assign() in your code. For the second and third texture, you overwrite the style property so it does not has a definition for width and height anymore. Gradient_Texture.init() will then assign undefined to the canvas's dimensions which results in the respective WebGL error.
Fixing the Object.assign() statements solves the issue: https://jsfiddle.net/L21jz07k/
I am trying to apply a texture to my 3D model with Three JS.
I tried a lot of ways from the Three JS Examples but for any reasons nothing is working.
Here is how I apply the texture to my model:
//LOADING TEXTURE
var textureLoader = new THREE.TextureLoader();
var diffuse = textureLoader.load( "models/Carbon.png" );
diffuse.encoding = THREE.sRGBEncoding;
diffuse.wrapS = THREE.RepeatWrapping;
diffuse.wrapT = THREE.RepeatWrapping;
diffuse.repeat.x = 1;
diffuse.repeat.y = 1;
var normalMap = textureLoader.load( "models/Carbon_Normal.png" );
normalMap.wrapS = THREE.RepeatWrapping;
normalMap.wrapT = THREE.RepeatWrapping;
var material = new THREE.MeshPhysicalMaterial({
roughness: 0.5,
clearcoat: 1.0,
clearcoatRoughness: 0.1,
map: diffuse,
normalMap: normalMap
});
// LOADING MODEL
var loader = new THREE.GLTFLoader();
loader.load("models/stairs.gltf", function(gltf){
model = gltf.scene
model.traverse((newModel) => {
if (newModel.isMesh){
newModel.material = material;
newModel.castShadow = true;
newModel.receiveShadow = true;
}
});
scene.add(model);
});
I appreciate any help!
When loading a texture for a glTF model, you have to set Texture.flipY to false to satisfy the uv convention of glTF.
diffuse.flipY = false;
normalMap.flipY = false;
Besides, you always have to ensure that your model has texture coordinates. You can do this by checking if the geometries of your meshes have an uv attribute. If this attribute is missing, I suggest you generate texture coordinates in a DCC tool like Blender.
three.js R112
I want to adapt this Shader here:
https://aerotwist.com/tutorials/an-introduction-to-shaders-part-2/
to a standard Lambert or Phong that it works with all my Lights in the Scene.
My current state is that I extend the Lambert with this code:
var attributes = {
displacement: {
type: 'f', // a float
value: [] // an empty array
}
};
var uniforms = {
amplitude: {
type: 'f', // a float
value: 0
}
};
var shaders = { mylambert : THREE.ShaderLib[ 'lambert' ] };
var materials = {};
materials.mylambert = function( parameters, myUniforms ) {
var material = new THREE.ShaderMaterial( {
vertexShader: $('#vertexLambert').text(),
fragmentShader: shaders.mylambert.fragmentShader,
uniforms: THREE.UniformsUtils.merge( [ shaders.mylambert.uniforms, myUniforms ] ),
attributes :attributes,
lights:true,
shading:THREE.FlatShading
} );
material.setValues( parameters );
return material;
};
var myProperties = {
lights: true,
fog: true,
transparent: true
};
var myMaterial = new materials.mylambert( myProperties, uniforms );
Which I got from this Post:
extending lambert material, opacity not working
The vertexShader is basically the shape as shaders.mylambert.vertexShader but with the additional code from the shader example on top.
It works somehow, so the vertices move, but the faces didn't shade when they change their shape so they always have the same shader when I use a plane for example as a the mesh.
In short;
I need a Lambert/Phong Shader that manipulates the Vertices over time up and down to simulate a low Poly Water surface.
If this is still relevant, You can solve this issue much simpler:
Have your model render with a Lambert, Phong, Standard or whatever lit material you like.
Create another Scene, Camera and a WebGLRenderTarget, create a plane and apply your ShaderMaterial to it. Position your camera so that the plane fits exactly your entire frame of the scene.
Render the other Scene to the WebGlRenderTarget and apply it as a map to your original Lambert material this way:
let mat = new THREE.MeshLambertMaterial({
map: renderTarget.texture
})
Viola! You now have a fully lit ShaderMaterial as you like.
I would like to modify one object within a mesh using perlin noise. How I am doing this right now...
I am creating the objects and adding them to a 3d object...
spheres = new THREE.Object3D();
for ( var i = 0; i < 40; i ++ ) {
var ball = new THREE.Mesh(new THREE.SphereGeometry(20,20,20), material);
ball.position.x = Math.random() * 600 - 300;
ball.position.y = Math.random() * 600 - 300;
ball.position.z = Math.random() * 600 - 300;
spheres.add(ball);
}
scene.add(spheres);
I am using a shader material...
material = new THREE.ShaderMaterial({
uniforms: {
time: {
type: "f",
value: 0.0
},
noisemax: {
type: "f",
value: 100.0
},
transparent: true
},
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
});
The shader material works on all 40 balls no problem. What I would like to do is change the shader say for spheres.children[0]. Is it possible to change the perlin noise values (noiseMax) for one object or does it by nature effect the material for all objects using the material?
You have a couple of options.
The easy way is to create and use a separate ShaderMaterial for each item that differs, and you can set its uniform easily. e.g.
var firstMaterial = new THREE.ShaderMaterial({
uniforms: { noisemax: { type: 'f', value: 3}}
});
var secondMaterial = new THREE.ShaderMaterial({
uniforms: { noisemax: { type: 'f', value: 10}}
});
var firstBall = new THREE.Mesh(new THREE.SphereGeometry(20,20,20), firstMaterial);
var secondBall = new THREE.Mesh(new THREE.SphereGeometry(20,20,20), secondMaterial);
// or in your case
spheres.children[0].material = secondMaterial;
Alternatively and probably preferably for your situation (at least from a performance standpoint) is that you should change noisemax to an attribute, that way you can have a separate value per object.
You will have to remember that attributes are per vertex, so you'll need to duplicate the values for all vertices belonging to each object. This will complicate things a little bit.
Edit: To reduce memory usage, you can use THREE.InstancedBufferGeometry with THREE.InstancedBufferAttribute
in threejs i want to change the skybox textures dynamically, am added 2 scenes here so i attached one scene to my skybox, so i decided to remove the skybox before switch from on texture to another, and the skybox textures are reflecting Environmentmap on the object, if i change the textures by select from the given list, am checking textures with the switch case and adding the texture , the reflection on the object is working, but if i switch skyBoxMaterial to selected texture Enviromentmap changed successfully but the reflection on the object is not changed to current Enviromentmap
Following is my code:
function envmap(envmap)
{
switch(envmap)
{
case 'canary':
var urls = [
'textures/cube/canary/pos-x.png',
'textures/cube/canary/neg-x.png',
'textures/cube/canary/pos-y.png',
'textures/cube/canary/neg-y.png',
'textures/cube/canary/pos-z.png',
'textures/cube/canary/neg-z.png'
];
//var cubemap = THREE.ImageUtils.loadTextureCube(urls); // load textures
break;
case 'Park2':
var urls = [
'textures/cube/Park2/posx.jpg',
'textures/cube/Park2/negx.jpg',
'textures/cube/Park2/posy.jpg',
'textures/cube/Park2/negy.jpg',
'textures/cube/Park2/posz.jpg',
'textures/cube/Park2/negz.jpg'
];
//var cubemap = THREE.ImageUtils.loadTextureCube(urls); // load textures
break;
case 'Park3Med':
var urls = [
'textures/cube/Park3Med/px.jpg',
'textures/cube/Park3Med/nx.jpg',
'textures/cube/Park3Med/py.jpg',
'textures/cube/Park3Med/ny.jpg',
'textures/cube/Park3Med/pz.jpg',
'textures/cube/Park3Med/nz.jpg'
];
//var cubemap = THREE.ImageUtils.loadTextureCube(urls); // load textures
break;
case 'Bridge2':
var urls = [
'textures/cube/Bridge2/posx.jpg',
'textures/cube/Bridge2/negx.jpg',
'textures/cube/Bridge2/posy.jpg',
'textures/cube/Bridge2/negy.jpg',
'textures/cube/Bridge2/posz.jpg',
'textures/cube/Bridge2/negz.jpg'
];
//var cubemap = THREE.ImageUtils.loadTextureCube(urls); // load textures
break;
case 'pisa':
var urls = [
'textures/cube/pisa/px.png',
'textures/cube/pisa/nx.png',
'textures/cube/pisa/py.png',
'textures/cube/pisa/ny.png',
'textures/cube/pisa/pz.png',
'textures/cube/pisa/nz.png'
];
//var cubemap = THREE.ImageUtils.loadTextureCube(urls); // load textures
break;
default:
var urls = [
'textures/cube/canary/pos-x.png',
'textures/cube/canary/neg-x.png',
'textures/cube/canary/pos-y.png',
'textures/cube/canary/neg-y.png',
'textures/cube/canary/pos-z.png',
'textures/cube/canary/neg-z.png'
];
break;
}
var cubemap = THREE.ImageUtils.loadTextureCube(urls,undefined, render); // load textures
cubemap.needsUpdate = true;
cubemap.format = THREE.RGBFormat;
var shader = THREE.ShaderLib['cube']; // init cube shader from built-in lib
shader.uniforms['tCube'].value = cubemap; // apply textures to shader
// create shader material
var skyBoxMaterial = new THREE.ShaderMaterial( {
fragmentShader: shader.fragmentShader,
vertexShader: shader.vertexShader,
uniforms: shader.uniforms,
depthWrite: false,
side: THREE.BackSide
});
alert(skybox);
if (skybox)
{
sceneCube.remove(skybox);
}
skybox = new THREE.Mesh( new THREE.BoxGeometry( 1000, 1000, 1000 ), skyBoxMaterial );
sceneCube.add( skybox );
var reflectionMaterial = new THREE.MeshBasicMaterial({
color: 0xcccccc,
envMap: cubemap
});
var object = scene.getObjectByName ("myname", true);
object.traverse( function ( child ) {
if ( child instanceof THREE.Mesh )
{
//child.geometry.computeFaceNormals();
var geometry = child.geometry;
//console.log(geometry);
material = child.material;
material.transparent = true;
child.material.needsUpdate = true;
child.material.map = reflectionMaterial;
mesh = new THREE.Mesh(geometry, reflectionMaterial);
scene.add(mesh);
}
});
}
After the work around fixed it myself
i used child.material=reflectionMaterial; instead of child.material.map = reflectionMaterial; and the refection is worked on the object material
So now while switch from one environment to another environment the reflection applied to my Object too
and there is no need of add new mesh below
mesh = new THREE.Mesh(geometry, reflectionMaterial);
scene.add(mesh);