Oh, that naughty sketch! Please let us know what the issue is below.
Apply Template
Applying this template will reset your sketch and remove all your changes. Are you sure you would like to continue?
Report Sketch
Report Comment
Please confirm that you would like to report the comment below.
We will review your submission and take any actions necessary per our Community Guidelines. In addition to reporting this comment, you can also block the user to prevent any future interactions.
Please report comments only when necessary. Unnecessary or abusive use of this tool may result in your own account being suspended.
Are you sure you want to delete your sketch?
Any files uploaded will be deleted as well.
Forks of this sketch will become the forks of "MediaPipe-FaceMetrics-2025".
Delete Comment?
This will also delete all the replies to this comment.
Delete this tab? Any code in it will be deleted as well.
Select a collection to submit your sketch
We Need Your Support
Since 2008, OpenProcessing has provided tools for creative coders to learn, create, and share over a million open source projects in a friendly environment.
Niche websites like ours need your continued support for future development and maintenance, while keeping it an ad-free platform that respects your data and privacy!
Please consider subscribing below to show your support with a "Plus" badge on your profile and get access to many other features!
Enable your webcam and put your face & hand in view.
A fork of MediaPipe-FaceMetrics-2025 by Golan Levin
CC Attribution NonCommercial ShareAlike
Face+Hand-2025
Levin
xxxxxxxxxx
// p5.js interface to Google MediaPipe Landmark Tracking (Work-in-Progress)
// Allows tracking of landmarks on hands, bodies, and/or faces.
// See https://mediapipe-studio.webapps.google.com/home
// Uses p5.js v.1.11.3 + MediaPipe v.0.10.21
// By Golan Levin, version of 2/27/2025
//
// Demonstrate that I can access:
// - face points (clown nose)
// - hand points (thumb plum)
// - face metrics (jaw openness)
// Don't change the names of these global variables.
let myHandLandmarker;
let myPoseLandmarker;
let myFaceLandmarker;
let handLandmarks;
let poseLandmarks;
let faceLandmarks;
let myCapture;
let lastVideoTime = -1;
// For landmarks you want, set to true; set false the ones you don't.
// Works best with just one or two sets of landmarks.
const trackingConfig = {
doAcquireHandLandmarks: true,
doAcquirePoseLandmarks: !true,
doAcquireFaceLandmarks: true,
doAcquireFaceMetrics: true,
poseModelLiteOrFull: "full", /* "lite" (3MB) or "full" (6MB) */
cpuOrGpuString: "GPU", /* "GPU" or "CPU" */
maxNumHands: 1,
maxNumPoses: 1,
maxNumFaces: 1,
};
//------------------------------------------
async function preload() {
const mediapipe_module = await import('https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision/vision_bundle.js');
HandLandmarker = mediapipe_module.HandLandmarker;
PoseLandmarker = mediapipe_module.PoseLandmarker;
FaceLandmarker = mediapipe_module.FaceLandmarker;
FilesetResolver = mediapipe_module.FilesetResolver;
const vision = await FilesetResolver.forVisionTasks(
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.21/wasm"
);
// Hand Landmark Tracking:
// https://codepen.io/mediapipe-preview/pen/gOKBGPN
// https://mediapipe-studio.webapps.google.com/studio/demo/hand_landmarker
if (trackingConfig.doAcquireHandLandmarks){
myHandLandmarker = await HandLandmarker.createFromOptions(vision, {
numHands: trackingConfig.maxNumHands,
runningMode: "VIDEO",
baseOptions: {
delegate: trackingConfig.cpuOrGpuString,
modelAssetPath:
"https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task",
},
});
}
// Pose (Body) Landmark Tracking:
// https://codepen.io/mediapipe-preview/pen/abRLMxN
// https://developers.google.com/mediapipe/solutions/vision/pose_landmarker
if (trackingConfig.doAcquirePoseLandmarks){
const poseModelLite = "https://storage.googleapis.com/mediapipe-models/pose_landmarker/pose_landmarker_lite/float16/1/pose_landmarker_lite.task";
const poseModelFull = "https://storage.googleapis.com/mediapipe-models/pose_landmarker/pose_landmarker_full/float16/1/pose_landmarker_full.task";
let poseModel = poseModelLite;
poseModel = (trackingConfig.poseModelLiteOrFull == "full") ? poseModelFull:poseModelLite;
myPoseLandmarker = await PoseLandmarker.createFromOptions(vision, {
numPoses: trackingConfig.maxNumPoses,
runningMode: "VIDEO",
baseOptions: {
modelAssetPath: poseModel,
delegate:trackingConfig.cpuOrGpuString,
},
});
}
// Face Landmark Tracking:
// https://codepen.io/mediapipe-preview/pen/OJBVQJm
// https://developers.google.com/mediapipe/solutions/vision/face_landmarker
if (trackingConfig.doAcquireFaceLandmarks){
myFaceLandmarker = await FaceLandmarker.createFromOptions(vision, {
numFaces: trackingConfig.maxNumFaces,
runningMode: "VIDEO",
outputFaceBlendshapes:trackingConfig.doAcquireFaceMetrics,
baseOptions: {
delegate: trackingConfig.cpuOrGpuString,
modelAssetPath:
"https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task",
},
});
}
}
//------------------------------------------
async function predictWebcam() {
let startTimeMs = performance.now();
if (lastVideoTime !== myCapture.elt.currentTime) {
if (trackingConfig.doAcquireHandLandmarks && myHandLandmarker) {
handLandmarks = myHandLandmarker.detectForVideo(myCapture.elt,startTimeMs);
}
if (trackingConfig.doAcquirePoseLandmarks && myPoseLandmarker) {
poseLandmarks = myPoseLandmarker.detectForVideo(myCapture.elt,startTimeMs);
}
if (trackingConfig.doAcquireFaceLandmarks && myFaceLandmarker) {
faceLandmarks = myFaceLandmarker.detectForVideo(myCapture.elt,startTimeMs);
}
lastVideoTime = myCapture.elt.currentTime;
}
window.requestAnimationFrame(predictWebcam);
}
//------------------------------------------
function setup() {
createCanvas(640, 480);
myCapture = createCapture(VIDEO);
myCapture.size(160, 120);
myCapture.hide();
}
function draw() {
background("white");
predictWebcam();
drawVideoBackground();
drawHandPoints();
drawPosePoints();
drawFacePoints();
drawFaceMetrics();
drawDiagnosticInfo();
drawClownNose();
drawThumbPlum();
drawJawOpenness();
}
//------------------------------------------
function drawJawOpenness(){
if (trackingConfig.doAcquireFaceLandmarks &&
trackingConfig.doAcquireFaceMetrics){
if (faceLandmarks && faceLandmarks.faceBlendshapes) {
const nFaces = faceLandmarks.faceLandmarks.length;
let aFaceMetrics = faceLandmarks.faceBlendshapes[0];
if (aFaceMetrics){
let i=25; // "jawOpen"
let mouthOpenness01 = aFaceMetrics.categories[i].score;
let mouthBarHeight = map(mouthOpenness01,0,1, 0,height);
fill('black');
rect(150,height, 40,0-mouthBarHeight);
}
}
}
}
//------------------------------------------
function drawClownNose(){
if (trackingConfig.doAcquireFaceLandmarks) {
if (faceLandmarks && faceLandmarks.faceLandmarks) {
const nFaces = faceLandmarks.faceLandmarks.length;
print (nFaces);
if (nFaces > 0) {
let aFace = faceLandmarks.faceLandmarks[0];
let noseIndex = 1;
let nosePt = aFace[noseIndex];
let nx = map(nosePt.x, 0,1, width,0);
let ny = map(nosePt.y, 0,1, 0,height);
fill('red');
noStroke();
circle(nx,ny, 40);
}
}
}
}
//------------------------------------------
function drawThumbPlum(){
// Stick in thumb; pull out plum
if (trackingConfig.doAcquireHandLandmarks) {
if (handLandmarks && handLandmarks.landmarks) {
const nHands = handLandmarks.landmarks.length;
if (nHands > 0) {
let joints = handLandmarks.landmarks[0];
let thumbPt = joints[THUMB_TIP];
let thx = map(thumbPt.x, 0,1, width,0);
let thy = map(thumbPt.y, 0,1, 0,height);
fill('purple');
noStroke();
circle(thx,thy, 40);
}
}
}
}
//------------------------------------------
function drawVideoBackground() {
push();
translate(width, 0);
scale(-1, 1);
tint(255, 255, 255, 72);
image(myCapture, 0, 0, width, height);
tint(255);
pop();
}
//------------------------------------------
// HANDS: 21 2D landmarks per hand, up to maxNumHands at once
function drawHandPoints() {
if (trackingConfig.doAcquireHandLandmarks) {
if (handLandmarks && handLandmarks.landmarks) {
const nHands = handLandmarks.landmarks.length;
if (nHands > 0) {
// Draw lines connecting the joints of the fingers
noFill();
stroke("black");
strokeWeight(2.0);
for (let h = 0; h < nHands; h++) {
let joints = handLandmarks.landmarks[h];
drawConnectors(joints, HANDLANDMARKER_PALM);
drawConnectors(joints, HANDLANDMARKER_THUMB);
drawConnectors(joints, HANDLANDMARKER_INDEX_FINGER);
drawConnectors(joints, HANDLANDMARKER_MIDDLE_FINGER);
drawConnectors(joints, HANDLANDMARKER_RING_FINGER);
drawConnectors(joints, HANDLANDMARKER_PINKY);
}
// Draw just the joints of the hands
strokeWeight(1.0);
stroke("black");
fill("red");
for (let h = 0; h < nHands; h++) {
let joints = handLandmarks.landmarks[h];
for (let i = 0; i <= 20; i++) {
let px = joints[i].x;
let py = joints[i].y;
px = map(px, 0, 1, width, 0);
py = map(py, 0, 1, 0, height);
circle(px, py, 9);
}
}
}
}
}
}
//------------------------------------------
// 33 joints of the body. See landmarks.js for the list.
function drawPosePoints(){
if (trackingConfig.doAcquirePoseLandmarks) {
if (poseLandmarks && poseLandmarks.landmarks) {
const nPoses = poseLandmarks.landmarks.length;
if (nPoses > 0) {
// Draw lines connecting the joints of the body
noFill();
stroke("darkblue");
strokeWeight(2.0);
for (let h = 0; h < nPoses; h++) {
let joints = poseLandmarks.landmarks[h];
drawConnectors(joints, PoseLandmarker.POSE_CONNECTIONS);
}
}
}
}
}
//------------------------------------------
// Tracks 478 points on the face.
function drawFacePoints() {
if (trackingConfig.doAcquireFaceLandmarks) {
if (faceLandmarks && faceLandmarks.faceLandmarks) {
const nFaces = faceLandmarks.faceLandmarks.length;
if (nFaces > 0) {
for (let f = 0; f < nFaces; f++) {
let aFace = faceLandmarks.faceLandmarks[f];
if (aFace) {
let nFaceLandmarks = aFace.length;
noFill();
stroke("black");
strokeWeight(1.0);
for (let i = 0; i < nFaceLandmarks; i++) {
let px = aFace[i].x;
let py = aFace[i].y;
px = map(px, 0, 1, width, 0);
py = map(py, 0, 1, 0, height);
circle(px, py, 1);
}
noFill();
stroke("black");
strokeWeight(2.0);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_RIGHT_EYE);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_RIGHT_EYEBROW);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LEFT_EYE);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LEFT_EYEBROW);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_FACE_OVAL);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LIPS);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_RIGHT_IRIS);
drawConnectors(aFace, FaceLandmarker.FACE_LANDMARKS_LEFT_IRIS);
drawConnectors(aFace, FACELANDMARKER_NOSE); // Google offers no nose
}
}
}
}
}
}
function drawFaceMetrics(){
if (trackingConfig.doAcquireFaceLandmarks &&
trackingConfig.doAcquireFaceMetrics){
if (faceLandmarks && faceLandmarks.faceBlendshapes) {
const nFaces = faceLandmarks.faceLandmarks.length;
for (let f = 0; f < nFaces; f++) {
let aFaceMetrics = faceLandmarks.faceBlendshapes[f];
if (aFaceMetrics){
fill('black');
textSize(7);
let tx = 40;
let ty = 40;
let dy = 8.5;
let vx0 = tx-5;
let vx1 = tx-35;
let nMetrics = aFaceMetrics.categories.length;
for (let i=1; i<nMetrics; i++){
let metricName = aFaceMetrics.categories[i].categoryName;
noStroke();
text(metricName, tx,ty);
let metricValue = aFaceMetrics.categories[i].score;
let vx = map(metricValue,0,1,vx0,vx1);
stroke(0,0,0);
strokeWeight(2.0);
line(vx0,ty-2, vx,ty-2);
stroke(0,0,0,20);
line(vx0,ty-2, vx1,ty-2);
ty+=dy;
}
}
}
}
}
}
//------------------------------------------
function drawConnectors(landmarks, connectorSet) {
if (landmarks) {
let nConnectors = connectorSet.length;
for (let i=0; i<nConnectors; i++){
let index0 = connectorSet[i].start;
let index1 = connectorSet[i].end;
let x0 = map(landmarks[index0].x, 0,1, width,0);
let y0 = map(landmarks[index0].y, 0,1, 0,height);
let x1 = map(landmarks[index1].x, 0,1, width,0);
let y1 = map(landmarks[index1].y, 0,1, 0,height);
line(x0,y0, x1,y1);
}
}
}
//------------------------------------------
function drawDiagnosticInfo() {
noStroke();
fill("black");
textSize(12);
text("FPS: " + int(frameRate()), 40, 30);
}
See More Shortcuts
Please verify your email to comment
Verify Email
%c * Tone.js v15.0.2 *
background: #000; color: #fff
🌸 p5.js says: No webcam found on this device (http://p5js.org/reference/p5/createCapture)
Object