Event throws errors on shapeGeometry mesh with react-three-fiber - react-three-fiber

I was doing some experimenting with react-three-fiber, I took the sample code from react-three-fiber and added some code to create a plane with rounded edges.
When I add an event to the plane with the roundend edges and try to trigger the event the browser throws an error:
error , is there something I'm doing wrong that I'm not seeing?
import ReactDOM from 'react-dom'
import React, { useRef, useState } from 'react'
import { Canvas, useFrame } from '#react-three/fiber'
import * as THREE from 'three';
function RoundedRect(width,height,radius){
const roundedRectShape = new THREE.Shape();
roundedRectShape.moveTo( -(width/2), -(height/2) + radius );
roundedRectShape.lineTo( -(width/2), (height/2) - radius );
roundedRectShape.quadraticCurveTo( -(width/2), (height/2), -(width/2) + radius, (height/2) );
roundedRectShape.lineTo( (width/2) - radius, (height/2) );
roundedRectShape.quadraticCurveTo( (width/2), (height/2), (width/2), (height/2) - radius );
roundedRectShape.lineTo( (width/2), -(height/2) + radius );
roundedRectShape.quadraticCurveTo( (width/2), -(height/2), (width/2) - radius, -(height/2) );
roundedRectShape.lineTo( -(width/2) + radius, -(height/2) );
roundedRectShape.quadraticCurveTo( -(width/2), -(height/2), -(width/2) , -(height/2) + radius );
return roundedRectShape
}
function Temp(props){
// This reference will give us direct access to the mesh
const mesh = useRef()
// Set up state for the hovered and active state
const [hovered, setHover] = useState(false)
const [active, setActive] = useState(1)
// Subscribe this component to the render-loop, rotate the mesh every frame
useFrame((state, delta) => (mesh.current.rotation.y += delta))
// Return view, these are regular three.js elements expressed in JSX
return (
<mesh
{...props}
ref={mesh}
scale={active}
onClick={(event) => setActive(active+0.1)}
onPointerOver={(event) => setHover(true)}
onPointerOut={(event) => setHover(false)}
>
<shapeGeometry args={RoundedRect(1,1,0.2)}/>
<meshStandardMaterial color={hovered ? 'hotpink' : 'orange'} />
</mesh>
)
}
function Box(props) {
// This reference will give us direct access to the mesh
const mesh = useRef()
// Set up state for the hovered and active state
const [hovered, setHover] = useState(false)
const [active, setActive] = useState(1)
// Subscribe this component to the render-loop, rotate the mesh every frame
useFrame((state, delta) => (mesh.current.rotation.y += delta))
// Return view, these are regular three.js elements expressed in JSX
return (
<mesh
{...props}
ref={mesh}
scale={active}
onClick={(event) => setActive(active+0.1)}
onPointerOver={(event) => setHover(true)}
onPointerOut={(event) => setHover(false)}
>
<boxGeometry args={[1, 1, 1]} />
<meshStandardMaterial color={hovered ? 'hotpink' : 'orange'} />
</mesh>
)
}
ReactDOM.render(
<Canvas style={{height:'100%',width:'100%'}}>
<ambientLight />
<pointLight position={[10, 10, 10]} />
<Box position={[-1.2, 0, 0]} />
<Temp position={[1.2, 0, 0]} />
</Canvas>,
document.getElementById('root')
)

I found the solution: in the 'args' from the shapeGeometry I use args={RoundedRect(1,1,0.2)} and I need to use args={[RoundedRect(1,1,0.2)]} (see square brackets) , the strange thing is that the code works fine without events.

Related

Why is shader not working on specific models

As a Matrix fan and newbie to three.js/r3f/react/shader, i've decied to try them out with a matrix-like shader.
I've gone through docs and found some sample codes and managed to combine them, here's what i've got: https://codesandbox.io/s/react-fox-matrix-shader-test-g3i4n?file=/src/App.js
import React, { Suspense, useRef } from "react";
import { Canvas, useLoader, useFrame, useThree } from "#react-three/fiber";
import { GLTFLoader } from "three/examples/jsm/loaders/GLTFLoader";
import fragmentShader from "./fragment.js";
import vertexShader from "./vertex.js";
import * as THREE from "three";
import { OrbitControls } from "#react-three/drei";
const clock = new THREE.Clock();
const uniforms = {
u_resolution: { value: { x: window.innerWidth, y: window.innerHeight } },
u_time: { value: 0.0 }
};
const matrixMaterial = new THREE.ShaderMaterial({
vertexShader: vertexShader,
fragmentShader: fragmentShader,
uniforms
});
const Model = () => {
const group = useRef();
const gltf = useLoader(GLTFLoader, "models/scene.gltf"); //Modify to arwing.glb and shader is not working
const model = gltf.scene;
model.traverse((m) => {
if (m instanceof THREE.Mesh) {
m.material = matrixMaterial;
m.material.side = THREE.DoubleSide;
}
});
useFrame(() => {
matrixMaterial.uniforms.u_time.value = clock.getElapsedTime();
group.current.rotation.y += 0.004;
});
return <primitive ref={group} object={model} />;
};
export default function App() {
const canvasRef = useRef();
return (
<>
<Canvas
ref={canvasRef}
style={{ background: "white" }}
onCreated={({ camera }) => {
camera.position.set(0, 20, 30);
}}
>
<directionalLight />
<Suspense fallback={null}>
<Model />
</Suspense>
<OrbitControls />
</Canvas>
</>
);
}
const vertexShader = `
varying vec2 vUv;
void main()
{
vUv = uv;
gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);
}
`;
export default vertexShader;
Yet not all models work if you switch model on l24. What might be the cause? i believe i'm missing some background knowledge on graphics or gltf/glb
Any suggestions on how to debug such issue is appreciated.
loading the arwing model and changing the shader to just output a color works, so it seems to me the model does not have UV coordinates set up.
You will have to use some sort of 3d editing program to add the appropriate UV coordinates to the model.
I don't know if this helps, but sometimes a mesh can have multiple materials.
fox example, if you want a box to have different textures on each side, you could have up to 6 materials for 1 mesh.
I would check if the material is an instance of an array and would apply the shader for each material
if (m.material instanceof Array) {
for (let i = 0; i < m.material.length; i ++) {
m.material[i] = matrixMaterial;
m.material[i].side = THREE.DoubleSide;
}
} else {
m.material = matrixMaterial;
m.material.side = THREE.DoubleSide;
}

How can I drag an object in x and z constrained in y in react-three/fiber with an orthographic camera which may be moved via orbitcontrols?

I would like to be able to drag an object across a plane (think of a piece on a chess board) in a canvas using React-three-fiber and an orthographic camera.
Here is an example (not mine) of that working with a fixed camera position: https://codepen.io/kaolay/pen/bqKjVz
But I would like to be able to move the camera too - so I have added Orbitcontrols which are disabled when the object is being dragged.
I have a code sandbox here with my attempt based many other's examples:
https://codesandbox.io/s/inspiring-franklin-2r3ri?file=/src/Obj.jsx
The main code is in two files, App.jsx with the canvas, camera, and orbitcontrols. And Obj.jsx with the mesh that gets dragged as well as the dragging logic inside of a use-gesture useDrag function.
App.jsx
import React, { useState } from "react";
import { Canvas } from "#react-three/fiber";
import Obj from "./Obj.jsx";
import { OrthographicCamera, OrbitControls } from "#react-three/drei";
import * as THREE from "three";
export default function App() {
const [isDragging, setIsDragging] = useState(false);
return (
<Canvas style={{ background: "white" }} shadows dpr={[1, 2]}>
<ambientLight intensity={0.5} />
<directionalLight
intensity={0.5}
castShadow
shadow-mapSize-height={512}
shadow-mapSize-width={512}
/>
<mesh rotation={[-Math.PI / 2, 0, 0]} receiveShadow>
<planeBufferGeometry attach="geometry" args={[10, 10]} receiveShadow />
<meshPhongMaterial
attach="material"
color="#ccc"
side={THREE.DoubleSide}
receiveShadow
/>
</mesh>
<Obj setIsDragging={setIsDragging} />
<OrthographicCamera makeDefault zoom={50} position={[0, 40, 200]} />
<OrbitControls minZoom={10} maxZoom={50} enabled={!isDragging} />
</Canvas>
);
}
Obj.jsx (with the offending code in the Use Drag function)
import React, { useState } from "react";
import { useDrag } from "#use-gesture/react";
import { animated, useSpring } from "#react-spring/three";
import { useThree } from "#react-three/fiber";
import * as THREE from "three";
function Obj({ setIsDragging }) {
const { camera } = useThree();
const [pos, setPos] = useState([0, 1, 0]);
const { size, viewport } = useThree();
const aspect = size.width / viewport.width;
const [spring, api] = useSpring(() => ({
// position: [0, 0, 0],
position: pos,
scale: 1,
rotation: [0, 0, 0],
config: { friction: 10 }
}));
const bind = useDrag(
({ active, delta, movement: [x, y], velocity, timeStamp, memo = 0 }) => {
if (active) {
//// THIS IS THE CODE THAT I KNOW IS NOT WORKING /////
let vDir = new THREE.Vector3();
let vPos = new THREE.Vector3(
(x / window.innerWidth) * 2 - 1,
-(y / window.innerHeight) * 2 + 1,
0.5
).unproject(camera);
vDir.copy(vPos).sub(camera.position).normalize();
let flDistance = -camera.position.z / vDir.z;
vPos = vPos.copy(camera.position).add(vDir.multiplyScalar(flDistance));
const arbitraryFactor = 1; // I suspect this has to reflect the distance from camera in all dims...
setPos([vPos.x * arbitraryFactor, 1.5, -vPos.y * arbitraryFactor]);
//// END /////
}
setIsDragging(active);
api.start({
// position: active ? [x / aspect, -y / aspect, 0] : [0, 0, 0],
position: pos,
scale: active ? 1.2 : 1,
rotation: [y / aspect, x / aspect, 0]
});
return timeStamp;
}
);
return (
<animated.mesh {...spring} {...bind()} castShadow>
<dodecahedronBufferGeometry
castShadow
attach="geometry"
args={[1.4, 0]}
/>
<meshNormalMaterial attach="material" />
</animated.mesh>
);
}
export default Obj;
A few references that have been helpful but have not got me there yet!
Mouse / Canvas X, Y to Three.js World X, Y, Z
https://codesandbox.io/s/react-three-fiber-gestures-forked-lpfv3?file=/src/App.js:1160-1247
https://codesandbox.io/embed/react-three-fiber-gestures-08d22?codemirror=1
https://codesandbox.io/s/r3f-lines-capture-1gkvp
https://github.com/pmndrs/react-three-fiber/discussions/641
And finally relinking to my code sandbox example again:
https://codesandbox.io/s/inspiring-franklin-2r3ri?file=/src/Obj.jsx:0-1848
I took a different approach after I realized that React-Three-Fiber passes event info into useDrag, which contains the coordinate and Ray information I needed.
https://codesandbox.io/s/musing-night-wso9v?file=/src/App.jsx
App.jsx
import React, { useState } from "react";
import { Canvas } from "#react-three/fiber";
import Obj from "./Obj.jsx";
import { OrthographicCamera, OrbitControls } from "#react-three/drei";
import * as THREE from "three";
export default function App() {
const [isDragging, setIsDragging] = useState(false);
const floorPlane = new THREE.Plane(new THREE.Vector3(0, 1, 0), 0);
return (
<Canvas style={{ background: "white" }} shadows dpr={[1, 2]}>
<ambientLight intensity={0.5} />
<directionalLight
intensity={0.5}
castShadow
shadow-mapSize-height={512}
shadow-mapSize-width={512}
/>
<mesh
rotation={[-Math.PI / 2, 0, 0]}
position={[0, -0.1, 0]}
receiveShadow
>
<planeBufferGeometry attach="geometry" args={[10, 10]} receiveShadow />
<meshPhongMaterial
attach="material"
color="#ccc"
side={THREE.DoubleSide}
receiveShadow
/>
</mesh>
<planeHelper args={[floorPlane, 5, "red"]} />
<gridHelper args={[100, 100]} />
<Obj setIsDragging={setIsDragging} floorPlane={floorPlane} />
<OrthographicCamera makeDefault zoom={50} position={[0, 40, 200]} />
<OrbitControls minZoom={10} maxZoom={50} enabled={!isDragging} />
</Canvas>
);
}
Obj.jsx
import React, { useState, useRef } from "react";
import { useDrag } from "#use-gesture/react";
import { animated, useSpring } from "#react-spring/three";
import { useThree } from "#react-three/fiber";
import * as THREE from "three";
function Obj({ setIsDragging, floorPlane }) {
const [pos, setPos] = useState([0, 1, 0]);
const { size, viewport } = useThree();
const aspect = size.width / viewport.width;
let planeIntersectPoint = new THREE.Vector3();
const dragObjectRef = useRef();
const [spring, api] = useSpring(() => ({
// position: [0, 0, 0],
position: pos,
scale: 1,
rotation: [0, 0, 0],
config: { friction: 10 }
}));
const bind = useDrag(
({ active, movement: [x, y], timeStamp, event }) => {
if (active) {
event.ray.intersectPlane(floorPlane, planeIntersectPoint);
setPos([planeIntersectPoint.x, 1.5, planeIntersectPoint.z]);
}
setIsDragging(active);
api.start({
// position: active ? [x / aspect, -y / aspect, 0] : [0, 0, 0],
position: pos,
scale: active ? 1.2 : 1,
rotation: [y / aspect, x / aspect, 0]
});
return timeStamp;
},
{ delay: true }
);
return (
<animated.mesh {...spring} {...bind()} castShadow>
<dodecahedronBufferGeometry
ref={dragObjectRef}
attach="geometry"
args={[1.4, 0]}
/>
<meshNormalMaterial attach="material" />
</animated.mesh>
);
}
export default Obj;
That should not have taken me as long as it did, I hope that this is helpful for someone else!

Pinning Html to fixed location r3f

I'm trying to pin a drei Html element to the top left corner. I'm struggeling with getting it to stay in the same location when rotating or moving the camera. This is my current attempt:
const { mouse2D, mouse3D, normal, plane } = useMemo(() => {
return {
mouse2D: new THREE.Vector2(),
mouse3D: new THREE.Vector3(),
normal: new THREE.Vector3(),
plane: new THREE.Plane(),
};
}, []);
const { width, height } = useWindowDimensions();
useFrame(({camera, raycaster, size}) => {
mouse2D.set(((0.02 * width) / size.width) * 2 - 1, ((-0.02 * height) / size.height) * 2 + 1);
raycaster.setFromCamera(mouse2D, camera);
camera.getWorldDirection(normal).negate();
plane.setFromNormalAndCoplanarPoint(normal, mouse3D);
raycaster.ray.intersectPlane(plane, mouse3D);
});
return <Html position={[mouse3D.x, mouse3D.y, mouse3D.z]}>stuff</Html>;
The Html position updates correctly on window resize but it doesn't update on camera rotation or camera moving. Anyone got any idea how to solve this?
If anyone is interested this is my current solution:
const { viewport } = useThree();
const groupRef = useRef<THREE.Group>(null!);
useEffect(() => {
const groupRefCopy = groupRef.current;
camera.add(groupRefCopy);
return () => {
camera.remove(groupRefCopy);
};
}, [camera, groupRef.current]);
return (
<group ref={groupRef} position={[-viewport.width / 3.5, viewport.height / 3.8, -5]} rotation={[0, 0, 0]}>
<Html style={{ opacity: show ? 1 : 0, width: menuWidth, userSelect: "none", pointerEvents: show? "auto" : "none" }}>
{...}
</Html>
</group>
);
3.5 and 3.8 works very well for my scene. If anyone knows how to get these constants dynamically from 3JS, please comment!

Trying to render a custom triangle in react-three-fiber and mesh flickers in and out of view

I am trying to generate some custom geometry on the fly with R3F. However, when I attempt to generate a triangle using a geometry object with vert array, I get odd behavior. The triangle doesn't render until I right click or alt tab away from the window or sometimes scroll - otherwise it is invisible. It flickers almost like it's z-fighting, but it is the only poly in the scene. I am pretty sure that I am winding the face in correct counterclockwise order (if I reverse order, tri never renders at all, as expected.)
What am I doing wrong/how can I get R3F to render custom geometry properly?
Minimal example here, all relevant code is in App.js:
https://codesandbox.io/s/wizardly-neumann-o78ry?file=/src/App.js
TIA
import React, { Suspense } from "react";
import "./styles.css";
import * as THREE from "three";
import { Canvas, useLoader, useThree, useFrame } from "react-three-fiber";
import UVTestImg from "../assets/tex/uvTest.jpg";
import TestBox from "./TestBox";
const RoomVerts = [
new THREE.Vector3(-1, -1, 1),
new THREE.Vector3(1, -1, 1),
new THREE.Vector3(1, 1, 1)
];
const RoomFaces = [new THREE.Face3(0, 1, 2)];
function Room(props) {
const { gl } = useThree();
const meshRef = React.useRef();
const geoRef = React.useRef();
//const testMap = useLoader(THREE.TextureLoader, UVTestImg);
//testMap.wrapS = THREE.RepeatWrapping;
//testMap.wrapT = THREE.RepeatWrapping;
// useFrame(() => {
// console.log("polycount:", gl.info.render.triangles); //verifies only 1 poly
// });
//boxbuffergeometry works, geometry does not.
return (
<mesh ref={meshRef}>
{/* <boxBufferGeometry args={[1, 1, 1]} /> */}
<geometry
geoRef={geoRef}
attach="geometry"
vertices={RoomVerts}
faces={RoomFaces}
/>
<meshStandardMaterial attach="material" color={0xff0000} />
</mesh>
);
}
export default function App() {
return (
<Canvas
className="main"
shadowMap
camera={{ fov: 50, position: [0, 0, 3.55] }}
onCreated={({ gl }) => {
gl.setClearColor("black");
}}
>
<ambientLight intensity={0.8} />
<Suspense fallback={TestBox}>
<Room />
</Suspense>
</Canvas>
);
}

THREE.JS import a blender OBJ - add material

I have a simple square plate made in Blender.
I'm trying to add a simple texture to this object. I tried so many tutorials and code I found on the net, I just cant make it happen
my code is
<html>
<style>
body{
margin: 0;
overflow: hidden;
}
</style>
<canvas id="myCanvas"></canvas>
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/93/three.min.js"></script>
<script src="http://threejs.org/examples/js/loaders/MTLLoader.js"></script>
<script src="http://threejs.org/examples/js/loaders/OBJLoader.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<script src="UnpackDepthRGBAShader.js"></script>
<script src="ShadowMapViewer.js"></script>
<script>
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(45 ,window.innerWidth / window.innerHeight, 1, 1000);
camera.position.set(0, 5, 14);
var renderer = new THREE.WebGLRenderer({
antialias: true
});
renderer.setClearColor(0xededed);
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize(window.innerWidth, window.innerHeight);
document.body.appendChild(renderer.domElement);
var loader = new THREE.TextureLoader();
var texture = loader.load( "./2.jpg" );
// it's necessary to apply these settings in order to correctly display the texture on a shape geometry
//texture.wrapS = texture.wrapT = THREE.RepeatWrapping;
texture.repeat.set( 0.05, 0.05 );
var controls = new THREE.OrbitControls(camera, renderer.domElement);
controls.enableDamping = true;
controls.dampingFactor = 0.25;
controls.enableZoom = true;
var keyLight = new THREE.DirectionalLight(new THREE.Color('hsl(30, 100%, 75%)'), 1.0);
keyLight.position.set(-100, 0, 100);
var fillLight = new THREE.DirectionalLight(new THREE.Color('hsl(240, 100%, 75%)'), 0.75);
fillLight.position.set(100, 0, 100);
var backLight = new THREE.DirectionalLight(0xffffff, 1.0);
backLight.position.set(100, 0, -100).normalize();
var manager = new THREE.LoadingManager();
manager.onProgress = function ( item, loaded, total ) {
console.log( item, loaded, total );
};
var textureLoader = new THREE.TextureLoader( manager );
var textureOBJ = textureLoader.load( './1.jpg' );
var onProgress = function ( xhr ) {
if ( xhr.lengthComputable ) {
var percentComplete = xhr.loaded / xhr.total * 100;
console.log( Math.round(percentComplete, 2) + '% downloaded' );
}
};
var onError = function ( xhr ) {
console.log(xhr);
};
var roundedRectShape = new THREE.Shape();
( function roundedRect( ctx, x, y, width, height, radius ) {
ctx.moveTo( x, y + radius );
ctx.lineTo( x, y + height - radius );
ctx.quadraticCurveTo( x, y + height, x + radius, y + height );
ctx.lineTo( x + width - radius, y + height );
ctx.quadraticCurveTo( x + width, y + height, x + width, y + height - radius );
ctx.lineTo( x + width, y + radius );
ctx.quadraticCurveTo( x + width, y, x + width - radius, y );
ctx.lineTo( x + radius, y );
ctx.quadraticCurveTo( x, y, x, y + radius );
} )( roundedRectShape, 0, 0, 18.5, 18.5, 2 );
addShape( roundedRectShape, 0x008000, -1.41, 0.5, 1, Math.PI / -2, 0, 0, 0.1 );
scene.add(keyLight);
scene.add(fillLight);
scene.add(backLight);
var loader = new THREE.OBJLoader( manager );
loader.load( './3.obj', function ( object ) {
object.traverse( function ( child ) {
if ( child instanceof THREE.Mesh ) {
child.material.map = textureOBJ;
}
} );
object.position.y = -0.01;
object.position.x = 0;
object.position.z = 0;
scene.add( object );
}, onProgress, onError );
controls.update();
var animate = function () {
requestAnimationFrame( animate );
renderer.render(scene, camera);
};
animate();
function addShape( shape, color, x, y, z, rx, ry, rz, s ) {
var geometry = new THREE.ShapeBufferGeometry( shape );
var mesh = new THREE.Mesh( geometry, new THREE.MeshPhongMaterial( { side: THREE.DoubleSide, map: texture } ) );
mesh.position.set( x, y, z );
mesh.rotation.set( rx, ry, rz );
mesh.scale.set( s, s, s );
scene.add( mesh );
}
</script>
I'm trying to add this simple png as the texture. anyone can help?
plate - example -> http://37.59.53.90:8080/test1.html
I've managed to create a cup with texture -> http://37.59.53.90:8080/test.html
It looks like you're using THREE.TextureLoader incorrectly.
The texture loader's load() method takes two arguments;
- the URL of the image to be loaded, and
- a callback
The callback is invoked when the texture has loaded and is ready for use.
You should restructure your code so that the texture returned in the load() methods callback can be applied to your object.
To illustrate this, a quick solution to get textures loading and displaying on your object might look like this:
var loader = new THREE.OBJLoader( manager );
// Use a callback, in a similar way to your THREE.OBJLoader
new THREE.TextureLoader( manager ).load( './1.jpg' , function(texture) {
loader.load( './3.obj', function ( object ) {
object.traverse( function ( child ) {
if ( child instanceof THREE.Mesh ) {
// Assign the loaded texture from the enclosing THREE.TextureLoader to the child material
child.material.map = texture;
}
});
object.position.y = -0.01;
object.position.x = 0;
object.position.z = 0;
scene.add( object );
}, onProgress, onError );
})

Resources