Oh, that naughty sketch! Please let us know what the issue is below.
Apply Template
Applying this template will reset your sketch and remove all your changes. Are you sure you would like to continue?
Report Sketch
Report Comment
Please confirm that you would like to report the comment below.
We will review your submission and take any actions necessary per our Community Guidelines. In addition to reporting this comment, you can also block the user to prevent any future interactions.
Please report comments only when necessary. Unnecessary or abusive use of this tool may result in your own account being suspended.
Are you sure you want to delete your sketch?
Any files uploaded will be deleted as well.
Forks of this sketch will no longer be attributed to this sketch.
Delete Comment?
This will also delete all the replies to this comment.
Delete this tab? Any code in it will be deleted as well.
Select a collection to submit your sketch
We Need Your Support
Since 2008, OpenProcessing has provided tools for creative coders to learn, create, and share over a million open source projects in a friendly environment.
Niche websites like ours need your continued support for future development and maintenance, while keeping it an ad-free platform that respects your data and privacy!
Please consider subscribing below to show your support with a "Plus" badge on your profile and get access to many other features!
Enable your webcam and put your face in view.
CC Attribution NonCommercial ShareAlike
MediaPipe-FaceMetrics-2025
Levin
xxxxxxxxxx
// p5.js interface to Google MediaPipe Landmark Tracking
// Tracks 478 points on the face, and calculates 52 face metrics.
// See https://mediapipe-studio.webapps.google.com/home
// Uses p5.js v.1.11.3 + MediaPipe v.0.10.21
// By Golan Levin, version of 2/26/2025
// Huge thanks to Orr Kislev, who made it work in p5's global mode!
// Based off of: https://editor.p5js.org/golan/sketches/0yyu6uEwM
//
// If you'd also like to use microphone input, this sketch
// includes: https://cdn.jsdelivr.net/npm/p5.sound@0.1.0
// See this example to understand how to access sound:
// https://openprocessing.org/sketch/2189445
// Don't change the names of these global variables.
let myFaceLandmarker;
let faceLandmarks;
let myCapture;
let lastVideoTime = -1;
// Works best with just one or two sets of landmarks.
const trackingConfig = {
doAcquireFaceMetrics: true,
cpuOrGpuString: "GPU", /* "GPU" or "CPU" */
maxNumFaces: 1,
};
//------------------------------------------
async function preload() {
const mediapipe_module = await import('https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision/vision_bundle.js');
FaceLandmarker = mediapipe_module.FaceLandmarker;
FilesetResolver = mediapipe_module.FilesetResolver;
const vision = await FilesetResolver.forVisionTasks(
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.21/wasm"
);
// Face Landmark Tracking:
// https://codepen.io/mediapipe-preview/pen/OJBVQJm
// https://developers.google.com/mediapipe/solutions/vision/face_landmarker
myFaceLandmarker = await FaceLandmarker.createFromOptions(vision, {
numFaces: trackingConfig.maxNumFaces,
runningMode: "VIDEO",
outputFaceBlendshapes:trackingConfig.doAcquireFaceMetrics,
baseOptions: {
delegate: trackingConfig.cpuOrGpuString,
modelAssetPath:
"https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task",
},
});
}
//------------------------------------------
async function predictWebcam() {
let startTimeMs = performance.now();
if (lastVideoTime !== myCapture.elt.currentTime) {
if (myFaceLandmarker) {
faceLandmarks = myFaceLandmarker.detectForVideo(myCapture.elt,startTimeMs);
}
lastVideoTime = myCapture.elt.currentTime;
}
window.requestAnimationFrame(predictWebcam);
}
//------------------------------------------
function setup() {
createCanvas(800, 600);
myCapture = createCapture(VIDEO);
myCapture.size(320, 240);
myCapture.hide();
}
function draw() {
background("white");
predictWebcam();
drawVideoBackground();
drawFacePoints();
drawFaceMetrics();
drawDiagnosticInfo();
}
//------------------------------------------
function drawVideoBackground() {
push();
translate(width, 0);
scale(-1, 1);
tint(255, 255, 255, 72);
image(myCapture, 0, 0, width, height);
tint(255);
pop();
}
//------------------------------------------
// Tracks 478 points on the face.
function drawFacePoints() {
if (faceLandmarks && faceLandmarks.faceLandmarks) {
const nFaces = faceLandmarks.faceLandmarks.length;
if (nFaces > 0) {
for (let f = 0; f < nFaces; f++) {
let aFace = faceLandmarks.faceLandmarks[f];
if (aFace) {
let nFaceLandmarks = aFace.length;
noFill();
stroke("black");
strokeWeight(1.0);
for (let i = 0; i < nFaceLandmarks; i++) {
let px = aFace[i].x;
let py = aFace[i].y;
px = map(px, 0, 1, width, 0);
py = map(py, 0, 1, 0, height);
circle(px, py, 1);
}
noFill();
stroke("black");
strokeWeight(2.0);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_RIGHT_EYE);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_RIGHT_EYEBROW);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LEFT_EYE);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LEFT_EYEBROW);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_FACE_OVAL);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LIPS);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_RIGHT_IRIS);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LEFT_IRIS);
drawConnectors(aFace, FACELANDMARKER_NOSE); // Google offers no nose
}
}
}
}
}
function drawFaceMetrics(){
if (trackingConfig.doAcquireFaceMetrics){
if (faceLandmarks && faceLandmarks.faceBlendshapes) {
const nFaces = faceLandmarks.faceLandmarks.length;
for (let f = 0; f < nFaces; f++) {
let aFaceMetrics = faceLandmarks.faceBlendshapes[f];
if (aFaceMetrics){
fill('black');
textSize(10.5);
let tx = 50;
let ty = 40;
let dy = 11;
let vx0 = tx-5;
let vx1 = 5;
let nMetrics = aFaceMetrics.categories.length;
for (let i=1; i<nMetrics; i++){
let metricName = aFaceMetrics.categories[i].categoryName;
noStroke();
text(metricName, tx,ty);
let metricValue = aFaceMetrics.categories[i].score;
let vx = map(metricValue,0,1,vx0,vx1);
stroke(0,0,0);
strokeWeight(2.0);
line(vx0,ty-2, vx,ty-2);
stroke(0,0,0,20);
line(vx0,ty-2, vx1,ty-2);
ty+=dy;
}
}
}
}
}
}
//------------------------------------------
function drawConnectors(landmarks, connectorSet) {
if (landmarks) {
let nConnectors = connectorSet.length;
for (let i=0; i<nConnectors; i++){
let index0 = connectorSet[i].start;
let index1 = connectorSet[i].end;
let x0 = map(landmarks[index0].x, 0,1, width,0);
let y0 = map(landmarks[index0].y, 0,1, 0,height);
let x1 = map(landmarks[index1].x, 0,1, width,0);
let y1 = map(landmarks[index1].y, 0,1, 0,height);
line(x0,y0, x1,y1);
}
}
}
//------------------------------------------
function drawDiagnosticInfo() {
noStroke();
fill("black");
textSize(12);
text("FPS: " + int(frameRate()), 50, 27);
}
See More Shortcuts
Please verify your email to comment
Verify Email