<aside> 📌

[과제명][학교][이름] 바꿔주세요, 과제태그를 선생님 설명 듣고 넣어주세요.

</aside>

3️⃣ 웹 AI 모델로 피코 제어하기

<!DOCTYPE html>
<html lang="ko">
<head>
  <meta charset="UTF-8">
  <title>손 제스처 BLE 전송</title>
  <style>
    body {
      margin: 0;
      background: #000;
      color: #0f0;
      font-family: Arial, sans-serif;
      overflow: hidden;
    }
    video, canvas {
      position: absolute;
      top: 0; left: 0;
      width: 100vw;
      height: 100vh;
      object-fit: cover;
      z-index: 0;
    }
    #gestureDisplay {
      position: absolute;
      top: 20px;
      left: 50%;
      transform: translateX(-50%);
      z-index: 10;
      font-size: 3em;
      background: rgba(0, 0, 0, 0.7);
      padding: 10px 30px;
      border-radius: 20px;
      color: #0f0;
    }
    #connectButton {
      position: absolute;
      top: 20px;
      right: 20px;
      z-index: 10;
      font-size: 1em;
      padding: 10px 20px;
    }
  </style>
</head>
<body>

<div id="gestureDisplay">손바닥 감지 중...</div>
<button id="connectButton">BLE 연결</button>
<video id="video" autoplay muted playsinline></video>
<canvas id="canvas"></canvas>

<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/hands/hands.js>"></script>
<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/drawing_utils/drawing_utils.js>"></script>
<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js>"></script>

<script>
  const videoElement = document.getElementById('video');
  const canvasElement = document.getElementById('canvas');
  const canvasCtx = canvasElement.getContext('2d');
  const gestureDisplay = document.getElementById('gestureDisplay');
  let lastSignal = null;

  // BLE 연결 변수
  let device, server, writeCharacteristic;
  const SERVICE_UUID = '6e400001-b5a3-f393-e0a9-e50e24dcca9e';
  const TX_CHAR_UUID = '6e400002-b5a3-f393-e0a9-e50e24dcca9e';

  document.getElementById('connectButton').addEventListener('click', async () => {
    try {
      device = await navigator.bluetooth.requestDevice({
        filters: [{ services: [SERVICE_UUID] }]
      });
      server = await device.gatt.connect();
      const service = await server.getPrimaryService(SERVICE_UUID);
      writeCharacteristic = await service.getCharacteristic(TX_CHAR_UUID);
      alert('BLE 연결 성공');
    } catch (e) {
      alert('BLE 연결 실패: ' + e);
    }
  });

  async function sendBLESignal(signal) {
    if (writeCharacteristic) {
      const encoded = new TextEncoder().encode(signal);
      await writeCharacteristic.writeValue(encoded);
    }
  }

  function countFingers(landmarks) {
    const tips = [8, 12, 16, 20];
    const mcp = [5, 9, 13, 17];
    let count = 0;
    if (landmarks[4].x < landmarks[3].x) count++;
    for (let i = 0; i < tips.length; i++) {
      if (landmarks[tips[i]].y < landmarks[mcp[i]].y) count++;
    }
    return count;
  }

  function interpretGesture(results) {
    const hands = results.multiHandLandmarks;
    if (!hands || hands.length === 0) {
      gestureDisplay.textContent = '손바닥 감지 중...';
      return;
    }

    const fingerCounts = hands.map(hand => countFingers(hand));
    let signal = null;

    if (hands.length === 1) {
      signal = (fingerCounts[0] >= 3) ? "1" : "2";
    } else if (hands.length === 2) {
      if (fingerCounts[0] >= 3 && fingerCounts[1] >= 3) {
        signal = "3";
      } else if (fingerCounts[0] < 3 && fingerCounts[1] < 3) {
        signal = "4";
      }
    }

    if (signal && signal !== lastSignal) {
      gestureDisplay.textContent = `신호: ${signal}`;
      console.log("전송 신호:", signal);
      sendBLESignal(signal);
      lastSignal = signal;
    }
  }

  const hands = new Hands({
    locateFile: (file) => `https://cdn.jsdelivr.net/npm/@mediapipe/hands/${file}`
  });
  hands.setOptions({ maxNumHands: 2, modelComplexity: 1, minDetectionConfidence: 0.8, minTrackingConfidence: 0.8 });
  hands.onResults((results) => {
    canvasElement.width = videoElement.videoWidth;
    canvasElement.height = videoElement.videoHeight;
    canvasCtx.save();
    canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
    canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height);
    if (results.multiHandLandmarks) {
      for (const landmarks of results.multiHandLandmarks) {
        drawConnectors(canvasCtx, landmarks, HAND_CONNECTIONS, { color: '#0f0', lineWidth: 3 });
        drawLandmarks(canvasCtx, landmarks, { color: '#ff0', lineWidth: 2 });
      }
    }
    interpretGesture(results);
    canvasCtx.restore();
  });

  const camera = new Camera(videoElement, {
    onFrame: async () => { await hands.send({ image: videoElement }); },
    width: 640,
    height: 480
  });

  camera.start();
</script>

</body>
</html>

image.png

2️⃣ 미디어파이프로 웹에서 불러오기

image.png

1️⃣ 코드펜으로 나만의 웹사이트 제작하기

테스트 웹사이트

<!DOCTYPE html>
<html lang="ko">
<head>
  <meta charset="UTF-8">
  <title>손바닥 제스처 신호 인식</title>
  <meta name="viewport" content="width=device-width, initial-scale=1.0">
  <style>
    body {
      margin: 0;
      background: #000;
      color: #0f0;
      font-family: Arial, sans-serif;
      overflow: hidden;
    }
    video, canvas {
      position: absolute;
      top: 0; left: 0;
      width: 100vw;
      height: 100vh;
      object-fit: cover;
      z-index: 0;
    }
    #gestureDisplay {
      position: absolute;
      top: 20px;
      left: 50%;
      transform: translateX(-50%);
      z-index: 10;
      font-size: 3em;
      background: rgba(0, 0, 0, 0.7);
      padding: 10px 30px;
      border-radius: 20px;
      color: #0f0;
    }
  </style>
</head>
<body>

<div id="gestureDisplay">손바닥 감지 중...</div>
<video id="video" autoplay muted playsinline></video>
<canvas id="canvas"></canvas>

<!-- MediaPipe -->
<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/hands/hands.js>"></script>
<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/drawing_utils/drawing_utils.js>"></script>
<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js>"></script>

<script>
  const videoElement = document.getElementById('video');
  const canvasElement = document.getElementById('canvas');
  const canvasCtx = canvasElement.getContext('2d');
  const gestureDisplay = document.getElementById('gestureDisplay');

  let lastSignal = null;

  function countFingers(landmarks) {
    const tips = [8, 12, 16, 20];
    const mcp = [5, 9, 13, 17];
    let count = 0;

    const thumbOpen = landmarks[4].x < landmarks[3].x;
    if (thumbOpen) count++;

    for (let i = 0; i < tips.length; i++) {
      if (landmarks[tips[i]].y < landmarks[mcp[i]].y) count++;
    }

    return count;
  }

  function interpretGesture(results) {
    const hands = results.multiHandLandmarks;
    if (!hands || hands.length === 0) {
      gestureDisplay.textContent = '손바닥 감지 중...';
      return;
    }

    const fingerCounts = hands.map(hand => countFingers(hand));
    let signal = null;

    if (hands.length === 1) {
      signal = (fingerCounts[0] >= 3) ? "1" : "2";
    } else if (hands.length === 2) {
      if (fingerCounts[0] >= 3 && fingerCounts[1] >= 3) {
        signal = "3";
      } else if (fingerCounts[0] < 3 && fingerCounts[1] < 3) {
        signal = "4";
      }
    }

    if (signal) {
      gestureDisplay.textContent = `신호: ${signal}`;
      if (signal !== lastSignal) {
        console.log(signal);
        lastSignal = signal;
      }
    }
  }

  const hands = new Hands({
    locateFile: (file) => `https://cdn.jsdelivr.net/npm/@mediapipe/hands/${file}`
  });

  hands.setOptions({
    maxNumHands: 2,
    modelComplexity: 1,
    minDetectionConfidence: 0.8,
    minTrackingConfidence: 0.8
  });

  hands.onResults((results) => {
    canvasElement.width = videoElement.videoWidth;
    canvasElement.height = videoElement.videoHeight;

    canvasCtx.save();
    canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
    canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height);

    if (results.multiHandLandmarks) {
      for (const landmarks of results.multiHandLandmarks) {
        drawConnectors(canvasCtx, landmarks, HAND_CONNECTIONS, { color: '#0f0', lineWidth: 3 });
        drawLandmarks(canvasCtx, landmarks, { color: '#ff0', lineWidth: 2 });
      }
    }

    interpretGesture(results);
    canvasCtx.restore();
  });

  const camera = new Camera(videoElement, {
    onFrame: async () => {
      await hands.send({ image: videoElement });
    },
    width: 640,
    height: 480
  });

  camera.start();
</script>

</body>
</html>

image.png

image.png

image.png