refactor code detection + classification

This commit is contained in:
mphstar 2025-02-22 15:16:09 +07:00
parent 8bbd0906d7
commit a3a2bd255b
8 changed files with 257 additions and 130 deletions

View File

@ -1,29 +1,22 @@
import { lazy, Suspense } from "react";
import { Suspense } from "react";
import { Route, Routes } from "react-router-dom";
import Kamus from "./pages/Kamus";
const Home = lazy(() => import("@/pages/Home"));
import myRoute from "./routes/routes";
const App = () => {
return (
<Routes>
<Route path="/">
<Route
index
element={
<Suspense fallback={<div>Loading...</div>}>
<Home />
</Suspense>
}
/>
<Route
path="/kamus"
element={
<Suspense fallback={<div>Loading...</div>}>
<Kamus />
</Suspense>
}
/>
{myRoute.map((route, index) => (
<Route
key={index}
path={route.path}
element={
<Suspense fallback={<div>Loading...</div>}>
<route.component />
</Suspense>
}
/>
))}
</Route>
</Routes>
);

View File

@ -17,7 +17,7 @@ const HeaderPage = () => {
/>
<div className="form-control">
<h1 className="font-semibold">K-SULI</h1>
<p className="text-gray-600">Kedai Susu Tuli</p>
<p className="text-primary">Kedai Susu Tuli</p>
</div>
</div>
<ul
@ -44,7 +44,7 @@ const HeaderPage = () => {
isActive={navStore.navSelected === "kamus"}
/>
<NavLink
href="/"
href="/kuis"
name="Kuis"
isActive={navStore.navSelected === "kuis"}
/>

View File

@ -0,0 +1,58 @@
import ConvertResult from "@/utils/ConvertResult";
import * as tf from "@tensorflow/tfjs";
class DetectionHelper {
model: tf.LayersModel | undefined;
constructor() {
this.loadModel();
}
loadModel = async () => {
try {
const lm = await tf.loadLayersModel("/model/model.json");
this.model = lm;
// const emptyInput = tf.tensor2d([[0, 0]]);
// this. model.predict(emptyInput) as tf.Tensor;
} catch (error) {
// console.error("Error loading model:", error);
}
};
makePrediction = async (finalResult: any) => {
const input = tf.tensor2d([finalResult]);
if(!this.model) {
console.error("Model is not initialized.");
return;
}
// Melakukan prediksi
const prediction = this.model.predict(input) as tf.Tensor;
const result = prediction.dataSync();
const maxEntry = Object.entries(result).reduce((max, entry) => {
const [, value] = entry;
return value > max[1] ? entry : max;
});
// maxEntry sekarang berisi [key, value] dengan nilai terbesar
const [maxKey, maxValue] = maxEntry;
const percentageValue = (maxValue * 100).toFixed(2) + "%";
// Hapus tensor
input.dispose();
prediction.dispose();
return {
abjad: ConvertResult(parseInt(maxKey)),
acc: percentageValue
}
};
}
export default DetectionHelper;

View File

@ -0,0 +1,93 @@
import calcLandmarkList from "@/utils/CalculateLandmark";
import preProcessLandmark from "@/utils/PreProcessLandmark";
import { FilesetResolver, HandLandmarker } from "@mediapipe/tasks-vision";
import { RefObject } from "react";
class MediapipeHelper {
handLandmarker: HandLandmarker | undefined;
videoRef: React.RefObject<HTMLVideoElement>;
private result = {
handPresence: false,
finalResult: [],
};
getResult = () => {
return this.result;
};
constructor(video: RefObject<HTMLVideoElement>) {
this.videoRef = video;
this.initializeHandDetection();
}
initializeHandDetection = async () => {
try {
const vision = await FilesetResolver.forVisionTasks(
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm"
);
this.handLandmarker = await HandLandmarker.createFromOptions(vision, {
baseOptions: {
modelAssetPath: `https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task`,
},
numHands: 2,
runningMode: "VIDEO",
});
this.detectHands();
} catch (error) {
console.error("Error initializing hand detection:", error);
}
};
detectHands = async () => {
if (this.videoRef.current === null) {
console.error("Video is not initialized.");
return;
}
if (this.videoRef && this.videoRef.current.readyState >= 2) {
if (!this.handLandmarker) {
console.error("HandLandmarker is not initialized.");
return;
}
const detections = this.handLandmarker.detectForVideo(
this.videoRef.current,
performance.now()
);
this.result = {
handPresence: false,
finalResult: [],
};
// Assuming detections.landmarks is an array of landmark objects
if (detections.landmarks) {
if (detections.handednesses.length > 0) {
// console.log(detections);
if (detections.handednesses[0][0].displayName === "Right") {
const landm = detections.landmarks[0].map((landmark) => landmark);
const calt = calcLandmarkList(this.videoRef.current, landm);
const finalResult = preProcessLandmark(calt);
this.result = {
handPresence: true,
finalResult: finalResult,
};
} else {
this.result = {
handPresence: false,
finalResult: [],
};
}
}
}
}
requestAnimationFrame(this.detectHands);
};
}
export default MediapipeHelper;

View File

@ -1,12 +1,9 @@
import LayoutPage from "@/components/templates/LayoutPage";
import { useEffect, useRef, useState } from "react";
import { FaCircleCheck } from "react-icons/fa6";
import * as tf from "@tensorflow/tfjs";
import { FilesetResolver, HandLandmarker } from "@mediapipe/tasks-vision";
import calcLandmarkList from "@/utils/CalculateLandmark";
import preProcessLandmark from "@/utils/PreProcessLandmark";
import ConvertResult from "@/utils/ConvertResult";
import useNavbarStore from "@/stores/NavbarStore";
import MediapipeHelper from "@/helper/MediapipeHelper";
import DetectionHelper from "@/helper/DetectionHelper";
type PredictResult = {
abjad: String;
@ -23,11 +20,31 @@ const Home = () => {
acc: "",
});
let model: tf.LayersModel;
let handLandmarker: HandLandmarker;
const [handPresence, setHandPresence] = useState(false);
const onHandDetected = async () => {
const result = mediapipeHelper.getResult();
if (result.handPresence) {
// console.log("Hand Detected");
setHandPresence(true);
const predict = await detectionHelper.makePrediction(result.finalResult);
console.log(predict);
if (predict) {
setResultPredict((prevState) => ({
...prevState,
...predict,
}));
}
} else {
setHandPresence(false);
}
requestAnimationFrame(onHandDetected);
};
const startWebcam = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({
@ -38,120 +55,36 @@ const Home = () => {
videoRef.current.srcObject = stream;
}
// setLoadCamera(true);
await initializeHandDetection();
mediapipeHelper = new MediapipeHelper(videoRef);
detectionHelper = new DetectionHelper();
onHandDetected();
} catch (error) {
console.error("Error accessing webcam:", error);
}
};
const loadModel = async () => {
setLoadCamera(false);
try {
const lm = await tf.loadLayersModel("/model/model.json");
model = lm;
const emptyInput = tf.tensor2d([[0, 0]]);
model.predict(emptyInput) as tf.Tensor;
setLoadCamera(true);
} catch (error) {
// console.error("Error loading model:", error);
}
};
const initializeHandDetection = async () => {
try {
const vision = await FilesetResolver.forVisionTasks(
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@latest/wasm"
);
handLandmarker = await HandLandmarker.createFromOptions(vision, {
baseOptions: {
modelAssetPath: `https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task`,
},
numHands: 2,
runningMode: "VIDEO",
});
detectHands();
} catch (error) {
console.error("Error initializing hand detection:", error);
}
};
const makePrediction = async (finalResult: any) => {
const input = tf.tensor2d([finalResult]);
// Melakukan prediksi
const prediction = model.predict(input) as tf.Tensor;
const result = prediction.dataSync();
const maxEntry = Object.entries(result).reduce((max, entry) => {
const [, value] = entry;
return value > max[1] ? entry : max;
});
// maxEntry sekarang berisi [key, value] dengan nilai terbesar
const [maxKey, maxValue] = maxEntry;
const percentageValue = (maxValue * 100).toFixed(2) + "%";
setResultPredict({
abjad: ConvertResult(parseInt(maxKey)),
acc: percentageValue,
});
// Hapus tensor
input.dispose();
prediction.dispose();
};
const detectHands = async () => {
if (videoRef.current && videoRef.current.readyState >= 2) {
const detections = handLandmarker.detectForVideo(
videoRef.current,
performance.now()
);
setHandPresence(detections.handedness.length > 0);
// Assuming detections.landmarks is an array of landmark objects
if (detections.landmarks) {
if (detections.handednesses.length > 0) {
console.log(detections);
if (detections.handednesses[0][0].displayName === "Right") {
const landm = detections.landmarks[0].map((landmark) => landmark);
const calt = calcLandmarkList(videoRef.current, landm);
const finalResult = preProcessLandmark(calt);
makePrediction(finalResult);
} else {
setHandPresence(false);
}
}
}
}
requestAnimationFrame(detectHands);
};
const store = useNavbarStore();
let mediapipeHelper: MediapipeHelper;
let detectionHelper: DetectionHelper;
useEffect(() => {
store.setNavSelected("home");
loadModel();
startWebcam();
setLoadCamera(true);
return () => {
if (handLandmarker) {
handLandmarker.close();
}
};
}, []);

24
src/pages/Kuis.tsx Normal file
View File

@ -0,0 +1,24 @@
import LayoutPage from "@/components/templates/LayoutPage";
import useNavbarStore from "@/stores/NavbarStore";
import { useEffect } from "react";
const Kuis = () => {
const store = useNavbarStore();
useEffect(() => {
store.setNavSelected("kuis");
}, []);
return (
<LayoutPage>
<div className="flex flex-col flex-1 py-4">
<h1 className="font-semibold">Kuis SIBI</h1>
<div className="grid grid-cols-2 md:grid-cols-4 gap-6 mt-6">
</div>
</div>
</LayoutPage>
);
};
export default Kuis;

View File

@ -0,0 +1,26 @@
import { lazy } from "react";
const Home = lazy(() => import("@/pages/Home"));
const Kamus = lazy(() => import("@/pages/Kamus"));
const Kuis = lazy(() => import("@/pages/Kuis"));
const myRoute = [
{
"title": "Home",
"path": "/",
"component": Home,
},
{
"title": "Kamus",
"path": "/kamus",
"component": Kamus
},
{
"title": "Kuis",
"path": "/kuis",
"component": Kuis
},
]
export default myRoute;

View File

@ -16,7 +16,7 @@ export default {
themes: [
{
mytheme: {
primary: "#fbbf24",
primary: "#FF6884",
secondary: "#00b44a",
accent: "#0099db",
neutral: "#080f0e",