In progress of a project using WebRTC
I found the code to apply the filter using face_mesh.
The code is as follows
import { FaceMesh } from "@mediapipe/face_mesh";
import React, { useRef, useEffect, createRef } from "react";
import * as Facemesh from "@mediapipe/face_mesh";
import * as cam from "@mediapipe/camera_utils";
import Webcam from "react-webcam";
import "../stream/StreamComponent.css";
function Filter() {
const webcamRef = useRef(null);
const canvasRef = useRef(null);
const connect = window.drawConnectors;
var camera = null;
function onResults(results) {
// const video = webcamRef.current.video;
const videoWidth = webcamRef.current.video.videoWidth;
const videoHeight = webcamRef.current.video.videoHeight;
const videoRef = createRef();
console.log(videoRef);
console.log(connect);
// Set canvas width
canvasRef.current.width = videoWidth;
canvasRef.current.height = videoHeight;
const canvasElement = canvasRef.current;
const canvasCtx = canvasElement.getContext("2d");
canvasCtx.save();
canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
canvasCtx.drawImage(
results.image,
0,
0,
canvasElement.width,
canvasElement.height
);
if (results.multiFaceLandmarks) {
for (const landmarks of results.multiFaceLandmarks) {
connect(canvasCtx, landmarks, Facemesh.FACEMESH_TESSELATION, {
color: "#C0C0C070",
lineWidth: 1,
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_RIGHT_EYE, {
color: "#FF3030",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_RIGHT_EYEBROW, {
color: "#FF3030",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_LEFT_EYE, {
color: "#30FF30",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_LEFT_EYEBROW, {
color: "#30FF30",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_FACE_OVAL, {
color: "#E0E0E0",
});
connect(canvasCtx, landmarks, Facemesh.FACEMESH_LIPS, {
color: "#E0E0E0",
});
}
}
canvasCtx.restore();
}
// }
// setInterval(())
useEffect(() => {
const faceMesh = new FaceMesh({
locateFile: (file) => {
return `https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh/${file}`;
},
});
faceMesh.setOptions({
maxNumFaces: 1,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5,
});
faceMesh.onResults(onResults);
if (
typeof webcamRef.current !== "undefined" &&
webcamRef.current !== null
) {
camera = new cam.Camera(webcamRef.current.video, {
onFrame: async () => {
await faceMesh.send({ image: webcamRef.current.video });
},
width: 640,
height: 480,
});
camera.start();
}
}, []);
return (
<center>
<div className="Filter">
<Webcam
ref={webcamRef}
style={{
position: "absolute",
marginLeft: "auto",
marginRight: "auto",
left: 0,
right: 0,
textAlign: "center",
zindex: 9,
width: 640,
height: 480,
}}
/>{" "}
<canvas
ref={canvasRef}
className="output_canvas"
style={{
position: "absolute",
marginLeft: "auto",
marginRight: "auto",
left: 0,
right: 0,
textAlign: "center",
zindex: 9,
width: 640,
height: 480,
}}
></canvas>
</div>
</center>
);
}
export default Filter;
So I'm trying to send a screen with a filter through openvidu
The openvidu code that exports the default camera screen is as follows
import React, { Component } from "react";
import "./StreamComponent.css";
export default class OvVideoComponent extends Component {
constructor(props) {
super(props);
this.videoRef = React.createRef();
}
componentDidMount() {
if (this.props && this.props.user.streamManager && !!this.videoRef) {
console.log("PROPS: ", this.props);
this.props.user.getStreamManager().addVideoElement(this.videoRef.current);
}
}
componentDidUpdate(props) {
if (props && !!this.videoRef) {
this.props.user.getStreamManager().addVideoElement(this.videoRef.current);
}
}
render() {
return (
<video
autoPlay={true}
id={"video-" + this.props.user.getStreamManager().stream.streamId}
ref={this.videoRef}
muted={this.props.mutedSound}
/>
);
}
}
I think apply a canvas object or a canvas.captureStream() to an element**.props.user.getStreamManager().addVideoElement()**
But I don't know how to apply it
It's my first time using openvidu, so I'm facing a lot of difficulties
I'm so sorry if you don't have enough explanation