Turn on your camera and smile
xxxxxxxxxx
/*
* π Hello! This is an ml5.js example made and shared with β€οΈ.
* Learn more about the ml5.js project: https://ml5js.org/
* ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
* This example demonstrates face tracking on live video through ml5.faceMesh.
* The original code is from https://editor.p5js.org/ml5/sketches/lCurUW1TT
*/
β
let faceMesh;
let video;
let faces = []; //this array will hold 2D face mesh positions
β
// these options define specifics to ml5
let options = { maxFaces: 1, refineLandmarks: false, flipHorizontal: false };
β
function preload() {
// Load the faceMesh model
// This is a pre-trained model that looks at an image and returns mesh positions if it detects an human face
faceMesh = ml5.faceMesh(options);
}
β
function setup() {
createCanvas(640, 480);
// Create the webcam video and hide it
video = createCapture(VIDEO);
video.size(640, 480);
video.hide();
// Start detecting faces from the webcam video
// gotFaces function (see below) will be run on every frame faces are detected
faceMesh.detectStart(video, gotFaces);
fill(0, 255, 0); //make mesh dots green
noStroke();
}
β
function draw() {
// Draw the webcam video
image(video, 0, 0, width, height);
β
//Draw each face detected
for (let i = 0; i < faces.length; i++) {
let face = faces[i];
// Draw all the tracked face points
for (let j = 0; j < face.keypoints.length; j++) {
let keypoint = face.keypoints[j];
circle(keypoint.x, keypoint.y, 3);
}
}
}
β
// Callback function for when faceMesh outputs data
function gotFaces(results) {
// Save the output to the faces variable
faces = results;
}
β