My question might seem basic for most of you, but i'm still learning THREE.JS and i'm having difficulties understanding it..
So right now, i've created a slider using images (i'm iterating through each texture). Everything works fine. But i'd like to replace 2 of my images with videos. Right now, my code looks like this :
(...)
const gl = {
camera: new THREE.PerspectiveCamera(75, innerWidth / innerHeight, 0.01, 1000),
scene: new THREE.Scene(),
renderer: new THREE.WebGLRenderer(),
loader: new THREE.TextureLoader()
};
const textures = [
gl.loader.load('https://i.ibb.co/myimage01.jpg'),
gl.loader.load('https://i.ibb.co/myimage02.jpg'),
gl.loader.load('https://i.ibb.co/myimage03.jpg'),
gl.loader.load('https://i.ibb.co/myimage04.jpg')
];
(...)
Reading the docs, i understand that i can't load a video using the texture loader right? Is there a workaround? How can i replace my images with some .mp4 videos hosted on the web without having to re-write all my code?
I've tried doing the following :
var videoTexture01 = new THREE.VideoTexture(
'https://linktomyvideo.mp4'
);
videoTexture01.minFilter = THREE.LinearFilter;
videoTexture01.magFilter = THREE.LinearFilter;
videoTexture01.format = THREE.RGBFormat;
const gl = {
camera: new THREE.PerspectiveCamera(75, innerWidth / innerHeight, 0.01, 1000),
scene: new THREE.Scene(),
renderer: new THREE.WebGLRenderer(),
loader: new THREE.TextureLoader()
};
const textures = [
videoTexture01,
gl.loader.load('https://i.ibb.co/myimage02.jpg'),
gl.loader.load('https://i.ibb.co/myimage03.jpg'),
gl.loader.load('https://i.ibb.co/myimage04.jpg')
];
but the video don't show up, i only have a black screen.. (the images continues to work fine, and i don't have any error in the console)
My bad! Reading the docs i've found the answer..
For thos having the same problem, here is my workaround. I've started by adding the video to my html file :
<video id="video01" loop crossOrigin="anonymous" playsinline style="display:none">
<source src="https://linktomyvideo.mp4"
type='video/mp4; codecs="avc1.42E01E, mp4a.40.2"'>
</video>
And then in my js :
video01 = document.getElementById('video01');
video01.play();
const videoTexture01 = new THREE.VideoTexture(video01);
const gl = {
camera: new THREE.PerspectiveCamera(75, innerWidth / innerHeight, 0.01, 1000),
scene: new THREE.Scene(),
renderer: new THREE.WebGLRenderer(),
loader: new THREE.TextureLoader()
};
const textures = [
videoTexture01,
gl.loader.load('https://i.ibb.co/myimage02.jpg'),
gl.loader.load('https://i.ibb.co/myimage03.jpg'),
gl.loader.load('https://i.ibb.co/myimage04.jpg')
];
Related
I'm using CubeTextureLoader for a 3d background of 2 images and it aint working. Even though the path to images is fine and I get the images working fine in TextureLoader.
import * as THREE from "three"
import {OrbitControls} from "three/examples/jsm/controls/OrbitControls.js"
import * as dat from "dat.gui"
import nebula from './images/nebula.jpg'
import stars from './images/stars.jpg'
const renderer = new THREE.WebGLRenderer({ alpha: true });
renderer.shadowMap.enabled=true
renderer.setClearColor(0x590d18, 1);
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
const scene = new THREE.Scene();
const camera = new THREE.PerspectiveCamera(
75,
window.innerWidth / window.innerHeight,
0.1,
1000
);
const orbit=new OrbitControls(camera,renderer.domElement)
const axesHelper = new THREE.AxesHelper(2);
scene.add(axesHelper);
camera.position.set(-10, 30, 30);
orbit.update()
const planeGeometry=new THREE.PlaneGeometry(30,30)
const planeMaterial=new THREE.MeshStandardMaterial({
color:0xffffff,
side:THREE.DoubleSide
})
const plane=new THREE.Mesh(planeGeometry,planeMaterial)
scene.add(plane)
plane.rotation.x=-0.5*Math.PI
plane.receiveShadow=true
const gridHelper=new THREE.GridHelper(30)
scene.add(gridHelper)
const sphereGeometry = new THREE.SphereGeometry(4,50,50);
const sphereMaterial = new THREE.MeshStandardMaterial({
color: 0x0000ff,
wireframe:false,
});
const sphere = new THREE.Mesh(sphereGeometry, sphereMaterial);
scene.add(sphere);
sphere.position.set(-10,4,4)
sphere.castShadow=true
const spotLight=new THREE.SpotLight(0xFFFFFF)
scene.add(spotLight)
spotLight.position.set(-100,100,0)
spotLight.castShadow=true
spotLight.angle=0.2
const sLightHelper=new THREE.SpotLightHelper(spotLight)
scene.add(sLightHelper)
scene.fog=new THREE.FogExp2(0xFFFFFF,0.02)
//loading an image
//const textureLoader=new THREE.TextureLoader()
//scene.background=textureLoader.load(stars) THIS WORKS
const cubeTextureLoader=new THREE.CubeTextureLoader() THIS DONT
scene.background=cubeTextureLoader.load([
stars,
stars,
nebula,
stars,
stars,
stars
])
const gui=new dat.GUI()
const options={
sphereColor:'#ffea00',
wireframe:false,
speed:0.01,
angle:0.2,
penumbra:0,
intensity:1
}
gui.addColor(options,'sphereColor').onChange(function(e){
sphere.material.color.set(e)
})
gui.add(options,'wireframe').onChange(function(e){
sphere.material.wireframe=e
})
gui.add(options,"speed",0,0.1)
gui.add(options,"angle",0,1)
gui.add(options,"penumbra",0,1)
gui.add(options,"intensity",0,1)
let step=0
function animate(time) {
box.rotation.x = time/1000;
box.rotation.y = time/1000;
step+=options.speed
sphere.position.y=10*Math.abs(Math.sin(step))
spotLight.angle=options.angle;
spotLight.penumbra=options.penumbra;
spotLight.intensity=options.intensity;
sLightHelper.update()
renderer.render(scene, camera);
}
renderer.setAnimationLoop(animate)
I was expecting a 3d background , but instead I get the original black color.
I'm using CubeTextureLoader for a 3d background of 2 images and it aint working. Even though the path to images is fine and I get the images working fine in TextureLoader.
I had the same problem a while back and it turns out all the faces of the CubeTextureLoader must be squares.
So, cropping the images to have a 1:1 dimension might work.
I am working with Three.js and I encounter strange behavior: when I manipulate the camera location (after user's gestures), this object disappear and come back after additional manipulations. Sometimes they don't exist at first and get shown after relocating the camera.
The camera is defined that way:
camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 0.01, 100000);
The objects are defined that way:
new THREE.TextureLoader().load(imageUrl,
function(texture){
texture.magFilter = THREE.LinearFilter;
texture.minFilter = THREE.LinearMipMapLinearFilter;
var material = new THREE.SpriteMaterial({map: texture, useScreenCoordinates: true});
var marker = new THREE.Sprite(material);
scene.add(marker); })
Thanks!
I am experimenting with GLTF and Three.js, and I am having a devil of a time trying to get animations to work. My end goal is to be able to create keyframe animated meshes in Blender, export them to GLTF, and use them in Aframe-based WebVR scenes. At the moment, however, I'm just trying to get them to load and animate in a simple Three.js test harness page.
I'm trying to do a very basic test to get this working. I took Blender's default cube scene, removed the camera and the light, and created a keyframe animation to spin the cube 360 degrees around the Z axis. I then exported that cube to GLTF. First, I tried the GLTF export add on, and then I tried exporting it to Collada and using Cesium's tool to convert it to GLTF. Both versions of the GLTF file load and render the cube properly, but the cube does not animate.
I was able to use this same blend file and export to JSON using Three's own JSON export add on for Blender, and everything works fine there. So, I feel like I must be doing something stupid in my Javascript or there is something about GLTF I am missing.
Can anyone tell me what I'm doing wrong here? I'm getting to hair-yanking time here.
Here's the Javascript I'm trying to use for the GLTF version (specifically the binary version from Cesium's tool):
var scene = null;
var camera = null;
var renderer = null;
var mixer = null;
var clock = new THREE.Clock();
function init3D() {
scene = new THREE.Scene();
camera = new THREE.PerspectiveCamera(75, window.innerWidth / window.innerHeight, 0.1, 1000);
renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var ambientLight = new THREE.AmbientLight(0x080818);
scene.add(ambientLight);
var pointLight = new THREE.PointLight(0xffffff, 1, 100);
pointLight.position.set(-5, 1, 5);
scene.add(pointLight);
camera.position.z = 5;
camera.position.y = 1.5;
}
function loadScene() {
// Instantiate a loader
var loader = new THREE.GLTFLoader();
// Load a glTF resource
loader.load('gltf/SpinCube.glb',
function (gltf) {
var model = gltf.scene;
scene.add(model);
mixer = new THREE.AnimationMixer(model);
mixer.clipAction(gltf.animations[0]).play();
render();
});
}
function render() {
requestAnimationFrame(render);
var delta = clock.getDelta();
if (mixer != null) {
mixer.update(delta);
};
renderer.render(scene, camera);
}
init3D();
loadScene();
The problem appeared to have been a bug in the version of Three.js's GLTF2Loader that I was using at the time. I pulled a copy of Three.js from the dev branch, and my animations showed correctly.
I am using Three JS to blend a video texture onto a canvas.
I'm trying to make it so that the background of the videotexture is transparent but what ends up happening is the video is only transparent on some computers and not all.
Below is a screenshot of what it looks like on a computer where it is not showing as transparent. (ironically this will appear transparent if your computer does not suffer this problem)
I am trying to figure out why this is. Here's what I've concluded:
This is not browser dependent as the problem occurs on different browsers.
This is not OS dependent. I've seen this problem sometimes on Mac and sometimes on Windows.
This is not monitor dependent because I switched monitors with my QA guy. My QA guy currently sees the transparent box. On my computer I don't. Switching monitors with my QA guy results in me using his monitor but not seeing the transparent box. He however still sees the transparent box despite using my monitor. Ergo, it isn't a monitor problem.
So the question is, what is happening here and what could be causing this transparency problem?
JS Fiddle code
function init() {
// create container, scenes and camera
THREE.ImageUtils.crossOrigin = true;
container = document.createElement('div');
container.className = "ThreeJSCanvas";
container.id = "ThreeJSCanvas";
document.body.appendChild(container);
camera = new THREE.PerspectiveCamera(50, window.innerWidth / (window.innerHeight - 61), 1, 2000);
camera.position.z = 100;
cameraInterface = new THREE.PerspectiveCamera(50, window.innerWidth / (window.innerHeight - 61), 1, 2000);
cameraInterface.position = camera.position;
cameraInterface.position.z = 100;
sceneSprites = new THREE.Scene();
sceneSky = new THREE.Scene();
//renderer
renderer3D = new THREE.WebGLRenderer({
antialias: true,
preserveDrawingBuffer: true,
devicePixelRatio: 1
});
renderer3D.autoClear = false;
renderer3D.setSize(window.innerWidth, (window.innerHeight - 61));
container.appendChild(renderer3D.domElement);
// load background image
var loader = new THREE.OBJLoader();
loader.load('https://dl.dropboxusercontent.com/s/1cq5i4rio1iudwe/skyDome.obj', function (object) {
skyMesh = object;
var ss = THREE.ImageUtils.loadTexture('https://dl.dropboxusercontent.com/s/f7jeyl6cl03aelu/background.jpg');
var texture = new THREE.MeshBasicMaterial({
map: ss,
wrapS: THREE.RepeatWrapping,
wrapT: THREE.RepeatWrapping,
transparent: true,
minFilter: THREE.LinearFilter,
magFilter: THREE.LinearFilter,
opacity: 0.7
});
skyMesh.position.y = -80;
skyMesh.children[0].material = texture;
sceneSky.add(skyMesh);
});
createVideo();
animate()
}
function createVideo() {
video = document.getElementById( 'video' );
video.setAttribute('crossorigin', 'anonymous');
// Create Video Texture for THREE JS
var videoTexture = new THREE.VideoTexture(video);
videoTexture.minFilter = THREE.LinearFilter;
videoTexture.magFilter = THREE.LinearFilter;
videoTexture.format = THREE.RGBAFormat;
var materialConfig = new THREE.MeshBasicMaterial({
map: videoTexture,
color: 0xffffff,
blending: THREE.AdditiveBlending,
transparent: true,
opacity: 1,
depthTest: false
});
var geometry = new THREE.PlaneBufferGeometry(125, 125);
var mesh = new THREE.Mesh(geometry, materialConfig);
sceneSprites.add(mesh);
video.load();
video.play();
}
function animate() {
requestAnimationFrame(animate)
render()
}
function render() {
renderer3D.clear()
renderer3D.render(sceneSky, camera);
renderer3D.render(sceneSprites, cameraInterface);
}
init()
EDIT: ADDED JS FIDDLE, edited code to reflect JS Fiddle
https://jsfiddle.net/ytmbL69q/
Guessing it could be a graphic card issue depending on the pc being used.
Three.js is a library that is used on top of webgl for simplicity plus alot of other goodies..
In saying that, graphics cards play a huge role in webgl and how shaders display graphics, not all support everything and not all are universal.. Maybe hence your issue... what you can is firstly check your machines graphics, brand etc..They generally have a document giving info on each cards version of gsls support or look at writing your own shaders to accomadate...
"at this time I was not able to comment"
I am trying to create a simple rectangle with a .mp4 video as texture. As per three.js documentation(http://threejs.org/docs/#Reference/Textures/Texture) this should be straight forward.
When I am putting link of video, all I am getting is a black colored box with no texture on it. I have tested code by replacing video with a jpg image and it works fine. Can someone please explain me what I am doing wrong.
I have already seen the examples in which video is played by first linking it to a video element and then copy the frames on a canvas. I want to try the direct way as mentioned in the three.js documentation.
Think of video as a sequence of images. So to "play" this video on your 3D object - you'll have to pass every single frame of that sequence to your material and then update that material.
Good place to start is here: https://github.com/mrdoob/three.js/wiki/Updates
And here: http://stemkoski.github.io/Three.js/Video.html
Step 1:
Add a video to your HTML and "hide" it:
<video id="video" playsinline webkit-playsinline muted loop autoplay width="320" height="240" src="some-video.mp4" style="display: none;"></video>
Step 2:
//Get your video element:
const video = document.getElementById('video');
//Create your video texture:
const videoTexture = new THREE.VideoTexture(video);
const videoMaterial = new THREE.MeshBasicMaterial( {map: videoTexture, side: THREE.FrontSide, toneMapped: false} );
//Create screen
const screen = new THREE.PlaneGeometry(1, 1);
const videoScreen = new THREE.Mesh(screen, videoMaterial);
scene.add(videoScreen);
In addition to Haos' answer, I needed to set videoTexture.needsUpdate = true; and videoMaterial.needsUpdate = true;. Also I've played the video on onloadeddata.
//Get your video element:
const video = document.getElementById("video");
video.onloadeddata = function () {
video.play();
};
//Create your video texture:
const videoTexture = new THREE.VideoTexture(video);
videoTexture.needsUpdate = true;
const videoMaterial = new THREE.MeshBasicMaterial({
map: videoTexture,
side: THREE.FrontSide,
toneMapped: false,
});
videoMaterial.needsUpdate = true;
//Create screen
const screen = new THREE.PlaneGeometry(10, 10);
const videoScreen = new THREE.Mesh(screen, videoMaterial);
scene.add(videoScreen);