I have animations working on p5 with still objs being swapped in and out but this will require a lot of stills and saving every frame to the computer and be a real hassle. I need to use p5js as I built the whole program in it I just need to be able to do all the calculations needed in three.js and turn all the models and textures after being rendered into p5 vertices and texture images. I asked open ai and it said something like this would work.
// Load the glTF model into Three.js
var loader = new THREE.GLTFLoader();
loader.load("model.gltf", function (gltf) {
// Get the Three.js scene, camera, and renderer
var scene = gltf.scene;
var camera = new THREE.PerspectiveCamera(
60,
window.innerWidth / window.innerHeight,
0.1,
1000
);
camera.position.z = 5;
var renderer = new THREE.WebGLRenderer();
renderer.setSize(window.innerWidth, window.innerHeight);
// Append the Three.js renderer to the DOM
document.body.appendChild(renderer.domElement);
// Get the skeleton and animations from the glTF model
var skeleton = new THREE.SkeletonHelper(scene);
var mixer = new THREE.AnimationMixer(scene);
gltf.animations.forEach(function (animation) {
mixer.clipAction(animation).play();
});
// Render the model using p5.js's WebGL renderer
function setup() {
createCanvas(window.innerWidth, window.innerHeight, WEBGL);
}
function draw() {
background(200);
translate(-width / 2, -height / 2);
rotateX(frameCount * 0.01);
rotateY(frameCount * 0.02);
scale(50);
// Update the animation
mixer.update(1 / 60);
// Get the vertices and textures from the Three.js mesh
var vertices = scene.children[0].geometry.vertices;
var textures = scene.children[0].material.map;
// Convert the vertices into p5.js vertex objects
var p5Vertices = vertices.map(function (vertex) {
return createVector(vertex.x, vertex.y, vertex.z);
});
// Create a texture from the Three.js texture
var p5Texture = createImage(textures.image.src);
// Render the character using p5.js's WebGL renderer
beginShape(TRIANGLES);
for (var i = 0; i < vertices.length; i += 3) {
texture(p5Texture);
vertex(
p5Vertices[i].x,
p5Vertices[i].y,
p5Vertices[i].z,
textures[i].u,
textures[i].v
);
vertex(
p5Vertices[i + 1].x,
p5Vertices[i + 1].y,
p5Vertices[i + 1].z,
textures[i + 1].u,
textures[i + 1].v
);
vertex(
p5Vertices[i + 2].x,
p5Vertices[i + 2].y,
p5Vertices[i + 2].z,
textures[i + 2].u,
textures[i + 2].v
);
}
endShape();
}
});
But I tried it and it just makes a black screen. I think it is close to what I need to do. I like the p5 simplicity and stuff I just need the models and textures from three.js is there any way to do this?
I tried putting in the p5 js editor the code that was given to me from open ai I was expecting a model to be displayed but a black screen was there. I think there must be a way to do this as three.js can bake the image and the vertices if the web canvas is the same settings and everything should also be the same I think. Then I bake all the images of the animation at run time after the first run and store them. Edit so thanks to Paul Wheeler the animation is working now I wanted to bake the texture with just some ambient light. so I did //make a camera we are going to have to bake the texture once.
const camera = new THREE.PerspectiveCamera(75,window.innerWidth / window.innerHeight,0.1,1000);
camera.position.z = 35;
const ambientLight = new THREE.AmbientLight(0xffffff, 0.5);
const scene = new THREE.Scene();
scene.add(ambientLight);
//now we can render with some light once to get the image
//this function will convert our image to a loadable image for p5js
async function loadTexturesToMesh(bitmap, mesh,p) { // create ambient light with color #ffffff and intensity 0.5
bitmap=bitmap.data;
var width=bitmap.width,
height=bitmap.height;
const imageBitmap = await createImageBitmap(bitmap);
const image = await loadImageFromBitmap(imageBitmap);
image.width=width;
image.height=height;
image.src+="#ext=.png";
p.loadImage(image.src,(image)=>mesh.image=image);
}
function loadImageFromBitmap(bitmap) {
return new Promise(resolve => {
// Convert ImageBitmap to Blob
bitmapToBlob(bitmap)
.then(blob => {
// Load the blob into an Image object
const image = new Image();
image.onload = () => resolve(image);
image.src = URL.createObjectURL(blob,{type:"image/png"});
});
});
}
function bitmapToBlob(bitmap) {
return new Promise(resolve => {
const canvas = document.createElement('canvas');
canvas.width = bitmap.width;
canvas.height = bitmap.height;
canvas.getContext('2d').drawImage(bitmap, 0, 0);
canvas.toBlob(blob => resolve(blob));
});
}
Then we can use
if(!obj.material.map.loadingTextures)loadTexturesToMesh(obj.material.map.source,obj,p)
obj.material.map.loadingTextures=true;
if(obj.image){
p.texture(obj.image);
}
in the render function.
and this gltf => {
var model = gltf.scene;
scene.add(model);
scene.add(ambientLight);
renderer.render(model,camera);
to when it loads the glft to bake the image.
It bakes fine but the image doesn't wrap the model at all. IDK what I am doing wrong here.
Euclid said "There is no royal road to geometry," but then he didn't have StackOverflow. Curiosity got the better of me, so here you go...
GLTFLoader's .load
function invokes the callback with an object that has the following properties:
animations
// Array<THREE.AnimationClip>scene
// THREE.Groupscenes
// Array<THREE.Group>cameras
// Array<THREE.Camera>asset
// ObjectThe main one we care about is scene
which is a Group
. A Group is a kind of Object3D
that just contains child objects in its children
property. Each of those children is itself some kind of Object3D
. In my testing I only encounters Object3D
which seems just like Group
to me, and Mesh
which is the actual 3d models. You cannot just assume that you only care about the first child of the scene Group like that garbage OpenAPI vomited at you. Instead you need to recursively process the tree of objects and render the Meshes.
Meshes contain a BufferGeometry
object in there geometry
property and that contains several critical components:
index
- a list of vertex indices that make up the meshattributes.position
- the x, y, and z coordinates of the vertices referenced by entries in indexattributes.normal
- the x, y, and z components of normal vectors for each vertex (normals are used to determine how much a face should be illuminated by a light source).Armed with this knowledge we can simply iterate over the index list and draw vertices that are part of a TRIANGLES
shape similar to what OpenAPI suggested.
However, before we do that there are a few considerations to take in mind:
fromBufferAttribute
function on Vector3
.matrixWorld
property, which happily appears to be compatible with p5.js's applyMatrix
.groups
property which is used to draw the mesh in multiple chunks, the code really ought to respect that, which it doesn't currently.References:
html, body {
margin: 0;
padding: 0;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.6.0/p5.js"></script>
<script async src="https://unpkg.com/[email protected]/dist/es-module-shims.js"></script>
<script type="importmap">
{
"imports": {
"three": "https://unpkg.com/[email protected]/build/three.module.js",
"three/addons/": "https://unpkg.com/[email protected]/examples/jsm/"
}
}
</script>
<script type="module">
import { Vector3 } from 'three';
import { GLTFLoader } from 'three/addons/loaders/GLTFLoader.js';
function sketch(p) {
p.setup = () => {
p.createCanvas(p.windowWidth, p.windowHeight, p.WEBGL);
p.background(100);
};
// Loading Screen
p.draw = () => {
p.circle(p.mouseX, p.mouseY, 20);
}
function renderObject(obj) {
switch (obj.type) {
case 'Object3D':
case 'Group':
for (const child of obj.children) {
renderObject(child);
}
break;
case 'Mesh':
p.push();
p.applyMatrix(obj.matrixWorld.elements);
p.beginShape(p.TRIANGLES);
let pos = new Vector3();
let norm = new Vector3();
for (const ix of obj.geometry.index.array) {
pos.fromBufferAttribute(obj.geometry.attributes.position, ix);
norm.fromBufferAttribute(obj.geometry.attributes.normal, ix);
p.normal(norm.x, norm.y, norm.z);
p.vertex(pos.x, pos.y, pos.z);
}
p.endShape();
p.pop();
break;
default:
throw new Error(`Unsupported Object Type: {obj.type}`);
}
}
const loader = new GLTFLoader();
loader.load(
'https://www.paulwheeler.us/files/BoxAnimated.gltf',
gltf => {
p.draw = () => {
p.background(0);
p.orbitControl(8, 4, 0.1);
p.scale(100);
p.noStroke();
p.normalMaterial();
renderObject(gltf.scene);
}
}
);
}
new p5(sketch);
console.log(`
The file BoxAnimated.gltf provided by [Cesium](http://cesiumjs.org/) for glTF testing.
This model is licensed under a [Creative Commons Attribution 4.0 International License](http://creativecommons.org/licenses/by/4.0/)
`);
</script>
I know you were also interested in animation, but I think I've given you a lot to go on. Read more Three.js API documentation and you should be able to figure it out.
Things OpenAI got wrong:
WebGLRenderer
and PerspectiveCamera
stuff since you aren't rendering with Three.js, you're rendering with p5.jsscene
is a single child Group
containing a Mesh
vertices
for the Mesh
geometry is incorrect (perhaps this is from an outdated version of Three.js or a different type of geometry
Vector3
objects into p5.Vector
objects with createVector
when all you are going to do is access their x
, y
, and z
componentsOpenAI should be considered harmful for programming and all other forms of creative knowledge work. Most of the time it will give you crap that looks right but doesn't pass muster, and when it works it is just tantamount to either cheating or stealing someone else's work. When you succeed using OpenAI you just end up failing in he long run because you didn't gain the knowledge for your self and are therefore building on a house of cards. If you are diligent you will find it doesn't actually take years to get to where you want to go. There truly is no royal road to being a good computer programmer.
I went back and figured out how to get animation working. The important bits are:
Create an AnimationMixer
instance for the target object.
let animator = new AnimationMixer(gltf.scene);
Tell the AnimationMixer
to play the desired animation (Note: some GLTF files may have multiple animations for different actions or effects, here we just assume there is at least one and play the first one).
let animation = gltf.animations[0];
animator.clipAction(animation).play();
Update the AnimationMixer each frame:
animator.update(p.deltaTime / 1000);
Because the animation updates translations and rotations, we need to have the scene recompute the transformation matrices:
gltf.scene.updateMatrixWorld();
Here's a version of the snippet with animation working:
html, body {
margin: 0;
padding: 0;
overflow: hidden;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.6.0/p5.js"></script>
<script async src="https://unpkg.com/[email protected]/dist/es-module-shims.js"></script>
<script type="importmap">
{
"imports": {
"three": "https://unpkg.com/[email protected]/build/three.module.js",
"three/addons/": "https://unpkg.com/[email protected]/examples/jsm/"
}
}
</script>
<script type="module">
import { AnimationMixer, Vector3 } from 'three';
import { GLTFLoader } from 'three/addons/loaders/GLTFLoader.js';
function sketch(p) {
p.setup = () => {
p.createCanvas(p.windowWidth, p.windowHeight, p.WEBGL);
p.background(100);
};
// Loading Screen
p.draw = () => {
p.circle(p.mouseX, p.mouseY, 20);
}
function renderObject(obj) {
switch (obj.type) {
case 'Object3D':
case 'Group':
for (const child of obj.children) {
renderObject(child);
}
break;
case 'Mesh':
p.push();
p.applyMatrix(obj.matrixWorld.elements);
p.beginShape(p.TRIANGLES);
let pos = new Vector3();
let norm = new Vector3();
for (const ix of obj.geometry.index.array) {
pos.fromBufferAttribute(obj.geometry.attributes.position, ix);
norm.fromBufferAttribute(obj.geometry.attributes.normal, ix);
p.normal(norm.x, norm.y, norm.z);
p.vertex(pos.x, pos.y, pos.z);
}
p.endShape();
p.pop();
break;
default:
throw new Error(`Unsupported Object Type: {obj.type}`);
}
}
const loader = new GLTFLoader();
loader.load(
'https://www.paulwheeler.us/files/BoxAnimated.gltf',
gltf => {
let animator = new AnimationMixer(gltf.scene);
console.log(`There are ${gltf.animations.length} animations. Auto-playing the first one.`);
let animation = gltf.animations[0];
animator.clipAction(animation).play();
p.draw = () => {
p.background(0);
p.orbitControl(8, 4, 0.1);
p.scale(100);
p.noStroke();
p.normalMaterial();
animator.update(p.deltaTime / 1000);
gltf.scene.updateMatrixWorld();
renderObject(gltf.scene);
}
}
);
}
new p5(sketch);
console.log(`
The file BoxAnimated.gltf provided by [Cesium](http://cesiumjs.org/) for glTF testing.
This model is licensed under a [Creative Commons Attribution 4.0 International License](http://creativecommons.org/licenses/by/4.0/)
`);
</script>