전신마비 환자나 신체적 장애로 인해 손이나 팔을 자유롭게 사용할 수 없는 사람들도 이동 보조기기나 휠체어등을 스스로 제어할 수 있도록 돕는 기술을 개발하는 것이다. 이를 위해 웹캠 기반의 머리 움직임 인식 기술과 마이크로비트, 블루투스 통신을 활용하여 사용자가 고개를 움직이는 것만으로 전진, 후진, 좌우 이동 등의 조작을 수행할 수 있는 시스템을 구현하였다. 사용자의 신체 조건에 맞춘 조작 방식 연구를 목표로 하였다.
KakaoTalk_20251122_120559431.mp4
Face Tracking App 2.0 | LOFI Robot
<!DOCTYPE html>
<html lang="ko">
<head>
<meta charset="UTF-8">
<title>Face Tracking + Micro:bit Controller</title>
<style>
body {
font-family: Arial, sans-serif;
display: flex;
flex-direction: column;
align-items: center;
background: linear-gradient(135deg,#dfe9f3,#ffffff);
min-height: 100vh;
padding: 2rem;
color: #222;
}
h1 { font-size: 1.8rem; font-weight: 700; margin-bottom: 0.5rem; }
#status { margin-bottom:1rem; padding:.5rem 1rem; border-radius:1rem; background:rgba(255,255,255,0.7); font-weight:600; }
video { margin-top: 10px; border-radius:10px; width:400px; border:2px solid #00eaff; }
#gaugeContainer { margin-top:20px; width:420px; display:flex; justify-content:space-between; }
.gauge { width:180px; height:180px; border-radius:50%; border:8px solid #333; position:relative; }
.needle { width:4px; height:70px; background:red; position:absolute; bottom:50%; left:50%; transform-origin:bottom center; }
.label { text-align:center; margin-top:5px; color:#0ff; font-size:20px; }
.valueBox { margin-top:10px; font-size:22px; text-align:center; color:yellow; }
#direction { margin-top:20px; font-size:28px; color:lime; font-weight:bold; }
#connectButton, #disconnectButton { margin:.4rem; padding:.7rem 1.6rem; border:none; border-radius:1rem; font-weight:600; font-size:1rem; color:#fff; cursor:pointer; }
#connectButton { background:#4caf50; } #disconnectButton { background:#f44336; }
#messages { margin-bottom:1rem; padding:.5rem 1rem; border-radius:1rem; background:rgba(255,255,255,0.7); }
</style>
</head>
<body>
<h1>마이크로비트 얼굴 컨트롤러</h1>
<div id="status">미연결</div>
<div id="messages">
<div id="sent">HTML → micro:bit: –</div>
<div id="received">micro:bit → HTML: –</div>
</div>
<div>
<button id="connectButton">🔗 Connect</button>
<button id="disconnectButton" disabled>🔒 Disconnect</button>
</div>
<video id="webcam" autoplay playsinline></video>
<div id="gaugeContainer">
<div>
<div class="gauge"><div id="yawNeedle" class="needle"></div></div>
<div class="label">Yaw</div>
<div id="yawValue" class="valueBox">Yaw: 0°</div>
</div>
<div>
<div class="gauge"><div id="pitchNeedle" class="needle"></div></div>
<div class="label">Pitch</div>
<div id="pitchValue" class="valueBox">Pitch: 0°</div>
</div>
</div>
<div id="direction">대기</div>
<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh>"></script>
<script src="<https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils>"></script>
<script>
/* ===== Micro:bit 블루투스 연결 ===== */
const UART_SERVICE_UUID = '6e400001-b5a3-f393-e0a9-e50e24dcca9e';
let device, txChar, rxChar;
let isConnected = false;
let sending = false;
let sendQueue = [];
const statusEl = document.getElementById('status');
const sentEl = document.getElementById('sent');
const receivedEl = document.getElementById('received');
const btnConnect = document.getElementById('connectButton');
const btnDisc = document.getElementById('disconnectButton');
function logStatus(msg){ statusEl.textContent = msg; }
function logSent(cmd){
sentEl.textContent = `HTML → micro:bit: ${cmd}`;
sentEl.classList.remove('sent-active');
void sentEl.offsetWidth;
sentEl.classList.add('sent-active');
}
async function safeSend(cmd){
sendQueue.push(cmd);
if (sending) return;
sending = true;
while (sendQueue.length > 0 && isConnected && txChar){
const next = sendQueue.shift();
const data = new TextEncoder().encode(next + '\\n');
try {
if(txChar.properties.writeWithoutResponse) await txChar.writeValueWithoutResponse(data);
else await txChar.writeValue(data);
logSent(next);
await new Promise(r=>setTimeout(r,30));
} catch(e){
logStatus('⚠️ 전송 오류: '+e.message+' (재시도)');
await new Promise(r=>setTimeout(r,100));
}
}
sending=false;
}
btnConnect.addEventListener('click', async ()=>{
try{
logStatus('🔍 디바이스 검색 중…');
device = await navigator.bluetooth.requestDevice({
filters:[{namePrefix:'BBC micro:bit'}],
optionalServices:[UART_SERVICE_UUID]
});
const server = await device.gatt.connect();
logStatus('서비스 연결 중…');
const svc = await server.getPrimaryService(UART_SERVICE_UUID);
const chars = await svc.getCharacteristics();
txChar = rxChar = null;
chars.forEach(ch=>{
const p = ch.properties;
if((p.write||p.writeWithoutResponse)&&!txChar) txChar=ch;
if((p.notify||p.indicate)&&!rxChar) rxChar=ch;
});
rxChar.addEventListener('characteristicvaluechanged', e=>{
const v = new TextDecoder().decode(e.target.value).trim();
receivedEl.textContent = `micro:bit → HTML: ${v}`;
});
await rxChar.startNotifications();
isConnected = true;
btnConnect.disabled = true;
btnDisc.disabled = false;
logStatus('✅ 연결 완료');
} catch(e){ logStatus('❌ 연결 실패: '+e.message); }
});
btnDisc.addEventListener('click', ()=>{
if(device?.gatt.connected) device.gatt.disconnect();
isConnected = false;
btnConnect.disabled = false;
btnDisc.disabled = true;
logStatus('🔌 연결 해제됨');
});
/* ===== FaceMesh + 방향 결정 ===== */
const video = document.getElementById("webcam");
const yawNeedle = document.getElementById("yawNeedle");
const pitchNeedle = document.getElementById("pitchNeedle");
const yawValueBox = document.getElementById("yawValue");
const pitchValueBox = document.getElementById("pitchValue");
const directionBox = document.getElementById("direction");
let camera = null;
// 얼굴 좌우 중앙 기준 코 위치로 Yaw 계산
function calculateAngles(landmarks){
const noseTip = landmarks[1];
const leftFace = landmarks[234];
const rightFace = landmarks[454];
const forehead = landmarks[10];
const chin = landmarks[152];
const faceCenterX = (leftFace.x + rightFace.x)/2;
const yaw = (noseTip.x - faceCenterX) * 300; // 정면 = 0°
const pitch = ((forehead.y - noseTip.y) - (noseTip.y - chin.y)) * 150;
return { yaw, pitch };
}
function updateGauge(yaw, pitch){
yaw = Math.max(-90, Math.min(90, yaw));
pitch = Math.max(-90, Math.min(90, pitch));
yawNeedle.style.transform = `rotate(${yaw}deg)`;
pitchNeedle.style.transform = `rotate(${pitch}deg)`;
yawValueBox.textContent = `Yaw: ${yaw.toFixed(2)}°`;
pitchValueBox.textContent = `Pitch: ${pitch.toFixed(2)}°`;
// 방향 판단
let cmd = '';
if(pitch > 20) { cmd = 'w'; directionBox.textContent = '직진'; }
else if(yaw > 20) { cmd = 'd'; directionBox.textContent = '우회전'; }
else if(yaw < -20) { cmd = 'a'; directionBox.textContent = '좌회전'; }
else { directionBox.textContent = '대기'; }
if(cmd) safeSend(cmd);
}
async function startFaceTracking(){
const faceMesh = new FaceMesh({ locateFile:file=>`https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh/${file}` });
faceMesh.setOptions({ maxNumFaces:1, refineLandmarks:true, minDetectionConfidence:0.6, minTrackingConfidence:0.6 });
faceMesh.onResults(results=>{
if(results.multiFaceLandmarks?.length>0){
const { yaw, pitch } = calculateAngles(results.multiFaceLandmarks[0]);
updateGauge(yaw, pitch);
}
});
camera = new Camera(video, { onFrame: async ()=>await faceMesh.send({ image:video }), width:640, height:480 });
camera.start();
}
navigator.mediaDevices.getUserMedia({ video:true })
.then(stream=>{ video.srcObject=stream; startFaceTracking(); })
.catch(err=>alert("카메라 권한 필요!"));
</script>
</body>
</html>


