Skip to content

Commit

Permalink
top of head landmark
Browse files Browse the repository at this point in the history
  • Loading branch information
mayarajan3 committed Aug 29, 2024
1 parent 3a93d5b commit 1201035
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 6 deletions.
42 changes: 37 additions & 5 deletions extensions/src/poseFace/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import { Extension, Environment, untilExternalGlobalVariableLoaded, validGenericExtension, RuntimeEvent } from "$common";
import BlockUtility from "$root/packages/scratch-vm/src/engine/block-utility";
import { legacyFullSupport, info } from "./legacy";
import { getLandmarkModel } from "./landmarkHelper";
import { type Results, type FaceMesh } from "@mediapipe/face_mesh";

const { legacyExtension, legacyDefinition } = legacyFullSupport.for<PoseFace>();

Expand Down Expand Up @@ -104,17 +106,26 @@ export default class PoseFace extends Extension<Details, Blocks> {
emotions = info.menus.EMOTION.items
all_emotions = info.menus.EMOTION_ALL.items

landmarkDetector: FaceMesh;
landmarkResults: Results;

private processResults(results: Results) {
this.landmarkResults = results;
}
/**
* Acts like class PoseHand's constructor (instead of a child class constructor)
* @param env
*/
init(env: Environment) {
async init(env: Environment) {
this.landmarkDetector = await getLandmarkModel((results) => this.processResults(results));
if (this.runtime.ioDevices) {
this.runtime.on(RuntimeEvent.ProjectStart, this.projectStarted.bind(this));
this._loop();
}
}



projectStarted() {
this.setTransparency(this.globalVideoTransparency);
this.toggleVideo(this.globalVideoState);
Expand All @@ -130,16 +141,31 @@ export default class PoseFace extends Extension<Details, Blocks> {
return { x: x - (this.DIMENSIONS[0] / 2), y: (this.DIMENSIONS[1] / 2) - y };
}

/**
* Converts the coordinates from the MediaPipe face estimate to Scratch coordinates
* @param x
* @param y
* @returns enum
*/
convertMediaPipeCoordsToScratch(x, y) {
return this.convertCoordsToScratch({ x: this.DIMENSIONS[0] * x, y: this.DIMENSIONS[1] * y });
}

async _loop() {
while (true) {
const frame = this.runtime.ioDevices.video.getFrame({
format: 'image-data',
dimensions: this.DIMENSIONS
});

const canvas = this.runtime.ioDevices.video.getFrame({
format: 'canvas'
});

const time = +new Date();
if (frame) {
this.affdexState = await this.estimateAffdexOnImage(frame);
await this.landmarkDetector.send({ image: canvas });
// TODO: Once indicators are implemented, indicate the state of the extension based on this.affdexState
}
const estimateThrottleTimeout = (+new Date() - time) / 4;
Expand Down Expand Up @@ -192,11 +218,17 @@ export default class PoseFace extends Extension<Details, Blocks> {
* @returns None
*/
goToPart(part, util) {
if (!this.affdexState || !this.affdexState.featurePoints) return;
if (part < 34) {
if (!this.affdexState || !this.affdexState.featurePoints) return;
const featurePoint = this.affdexState.featurePoints[part];
const { x, y } = this.convertCoordsToScratch(featurePoint);
(util.target as any).setXY(x, y, false);
} else {
if (!this.landmarkResults) return;
const { x, y } = this.convertMediaPipeCoordsToScratch(this.landmarkResults.multiFaceLandmarks[0][10].x, this.landmarkResults.multiFaceLandmarks[0][10].y);
(util.target as any).setXY(x, y, false);
}

const featurePoint = this.affdexState.featurePoints[part];
const { x, y } = this.convertCoordsToScratch(featurePoint);
(util.target as any).setXY(x, y, false);
}

/**
Expand Down
31 changes: 31 additions & 0 deletions extensions/src/poseFace/landmarkHelper.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import { ResultsListener, type FaceMesh } from '@mediapipe/face_mesh';
import '@tensorflow/tfjs-core';
// Register WebGL backend.
import { untilExternalGlobalVariableLoaded } from "$common";
import '@tensorflow/tfjs-backend-webgl';

export const getLandmarkModel = async (onFrame: ResultsListener) => {

const packageURL = "https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh";
const packageClassName = "FaceMesh";

const Class = await untilExternalGlobalVariableLoaded<typeof FaceMesh>(packageURL, packageClassName);

const faceMesh = new Class({
locateFile: (file) => {
return `https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh/${file}`;
},
});

// Initialize the mediaPipe model according to the documentation
faceMesh.setOptions({
maxNumFaces: 1,
refineLandmarks: true,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5
});

faceMesh.onResults(onFrame);
await faceMesh.initialize();
return faceMesh;
}
6 changes: 5 additions & 1 deletion extensions/src/poseFace/legacy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -257,6 +257,10 @@ export const info = {
{
"text": "right lower eyelid",
"value": "33"
},
{
"text": "top of head",
"value": "34"
}
],
"acceptReporters": false
Expand Down Expand Up @@ -450,4 +454,4 @@ export const info = {
}
} as const;
export const legacyFullSupport = legacy(info);
export const legacyIncrementalSupport = legacy(info, {"incrementalDevelopment":true});
export const legacyIncrementalSupport = legacy(info, { "incrementalDevelopment": true });

0 comments on commit 1201035

Please sign in to comment.