Skip to content

Commit

Permalink
Show green dot for showing face is detected
Browse files Browse the repository at this point in the history
  • Loading branch information
Amalreji111 committed Oct 30, 2024
1 parent 1a093b4 commit c5219f6
Show file tree
Hide file tree
Showing 3 changed files with 97 additions and 73 deletions.
23 changes: 21 additions & 2 deletions src/app-front/greeter-demo/GreeterDemo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ import { useAvailableVoices } from '../../ui/useAvailableVoices';
import useFaceDetection from './hooks/faceDetection';
import useScreenAttention from './hooks/screenAttention';
// width: 100%;
interface StatusDotProps {
status: 'online' | 'offline';
}

const Container = styled.div`
height: 100%;
background: linear-gradient(180deg, #5046E5 0%, #3832A0 50%, #000000 100%); /* Gradient flows from top to bottom, dark at footer */
Expand Down Expand Up @@ -227,6 +231,19 @@ const StyledText = styled.span`
text-align: center;
padding: 0 10px;
`;
const StatusDot = styled.div<StatusDotProps>`
width: 20px;
height: 20px;
border-radius: 50%;
background-color: ${({ status }) => (status === 'online' ? 'green' : 'red')};
`;
interface StatusIndicatorProps {
isOnline: boolean;
}

const StatusIndicator: React.FC<StatusIndicatorProps> = ({ isOnline }) => (
<StatusDot status={isOnline ? 'online' : 'offline'} />
);

const TypingOverlay = memo(
({ text, typingSpeed = 50 }: { text: string; typingSpeed?: number }) => {
Expand Down Expand Up @@ -310,7 +327,6 @@ const IntelligageScreen: React.FC = memo(() => {

// const lookingAtScreen=useMemo(()=>isLookingAtScreen,[isLookingAtScreen])

// console.log("attentionState,",attentionState)
// useEffect(() => {
// if (attentionState.hasGreeted === false) {
// ChatStates.addChatMessage({ chat, text: "Hi" });
Expand All @@ -321,6 +337,7 @@ const IntelligageScreen: React.FC = memo(() => {
if (!ttsEnabled) {
Ttss.enableTts();
}

// speak({
// text:"Hey there!",

Expand All @@ -337,6 +354,8 @@ const IntelligageScreen: React.FC = memo(() => {
</WaveAnimation>

<Content style={{ position: "relative" }}>
<StatusIndicator isOnline={isLookingAtScreen}/>

<ImageContainer >
{/* <AssistantImage src={girlImage} alt="AI Assistant" /> */}
{/* <AssistantImage> */}
Expand All @@ -354,7 +373,7 @@ const IntelligageScreen: React.FC = memo(() => {
playsInline
muted
style={{
position: 'fixed',
position: 'absolute',
top: 0,
left: 0,
width: '1px',
Expand Down
32 changes: 25 additions & 7 deletions src/app-front/greeter-demo/hooks/faceDetection.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ const useFaceDetection = () => {
const [faceDetected, setFaceDetected] = useState<boolean>(false);
const [isLookingAtScreen, setIsLookingAtScreen] = useState<boolean>(false);
const [faceAttributes, setFaceAttributes] = useState<any>(null);
const [stableFaceDetected, setStableFaceDetected] = useState<boolean>(false); // New debounced state
const debounceTimeoutRef = useRef<NodeJS.Timeout | null>(null); // Reference for debounce timeout

// Constants for gaze detection
const ROTATION_THRESHOLD = 15;
Expand All @@ -21,19 +23,19 @@ const useFaceDetection = () => {
const eyeVector = {
x: rightEye.x - leftEye.x,
y: rightEye.y - leftEye.y,
z: 0
z: 0,
};

const noseVector = {
x: nose.x - leftEye.x,
y: nose.y - leftEye.y,
z: 0
z: 0,
};

const normalVector = {
x: eyeVector.y * noseVector.z - eyeVector.z * noseVector.y,
y: eyeVector.z * noseVector.x - eyeVector.x * noseVector.z,
z: eyeVector.x * noseVector.y - eyeVector.y * noseVector.x
z: eyeVector.x * noseVector.y - eyeVector.y * noseVector.x,
};

const dotProduct = normalVector.x * NORMAL_VECTOR.x +
Expand Down Expand Up @@ -100,7 +102,7 @@ const useFaceDetection = () => {
.then((stream) => {
if (videoRef.current) {
videoRef.current.srcObject = stream;
videoRef.current.play(); // Explicitly call play()
videoRef.current.play();
videoRef.current.onloadeddata = () => {
processVideo();
};
Expand All @@ -118,7 +120,6 @@ const useFaceDetection = () => {
const video = videoRef.current;

if (video) {
// Create an offscreen canvas for processing
const canvas = document.createElement('canvas');
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
Expand Down Expand Up @@ -168,12 +169,29 @@ const useFaceDetection = () => {
};
}, []);

// Debounce effect for faceDetected
useEffect(() => {
if (debounceTimeoutRef.current) {
clearTimeout(debounceTimeoutRef.current);
}

debounceTimeoutRef.current = setTimeout(() => {
setStableFaceDetected(faceDetected);
}, 500); // Adjust debounce duration as needed

return () => {
if (debounceTimeoutRef.current) {
clearTimeout(debounceTimeoutRef.current);
}
};
}, [faceDetected]);

return {
videoRef,
isLoading,
error,
faceDetected,
isLookingAtScreen,
faceDetected, // Use debounced value for stable status
isLookingAtScreen:stableFaceDetected,
faceAttributes,
};
};
Expand Down
115 changes: 51 additions & 64 deletions src/app-front/greeter-demo/hooks/screenAttention.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,72 +8,59 @@ interface AttentionState {
canGreetAgain: boolean;
}

const useScreenAttention = (isLookingAtScreen: boolean) => {
const [attentionState, setAttentionState] = useState<AttentionState>({
hasGreeted: false,
lastLookAwayTime: null,
canGreetAgain: true
});
const { chat, messages } = useCurrentChat();
const useScreenAttention = (isLookingAtScreen: boolean) => {
const { chat } = useCurrentChat();
const attentionStateRef = useRef<AttentionState>({
hasGreeted: false,
lastLookAwayTime: null,
canGreetAgain: true
});

const MINIMUM_LOOK_AWAY_TIME = 5000; // 5 seconds
const timeoutRef = useRef<NodeJS.Timeout | null>(null);

const speakToUser = () => {
// Your TTS function here
ChatStates.addChatMessage({ chat, text: "Hi" });
const MINIMUM_LOOK_AWAY_TIME = 5000; // 5 seconds
const timeoutRef = useRef<NodeJS.Timeout | null>(null);

console.log("Hello! How can I help you today?");
};

useEffect(() => {
if (isLookingAtScreen) {
// Clear any pending timeout when user looks back
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
timeoutRef.current = null;
}

// Check if enough time has passed since last look away
const canGreet = attentionState.lastLookAwayTime
? Date.now() - attentionState.lastLookAwayTime > MINIMUM_LOOK_AWAY_TIME
const speakToUser = () => {
ChatStates.addChatMessage({ chat, text: "Hi" });
console.log("Hello! How can I help you today?");
};

useEffect(() => {
const attentionState = attentionStateRef.current;

if (isLookingAtScreen) {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
timeoutRef.current = null;
}

const canGreet =
attentionState.lastLookAwayTime
? Date.now() - attentionState.lastLookAwayTime > MINIMUM_LOOK_AWAY_TIME
: true;

// Only greet if we haven't greeted and can greet again
if (!attentionState.hasGreeted && attentionState.canGreetAgain && canGreet) {
speakToUser();
setAttentionState(prev => ({
...prev,
hasGreeted: true,
canGreetAgain: false
}));
}
} else {
// User looked away
setAttentionState(prev => ({
...prev,
lastLookAwayTime: Date.now()
}));

// Start timeout to allow greeting again
timeoutRef.current = setTimeout(() => {
setAttentionState(prev => ({
hasGreeted: false,
lastLookAwayTime: null,
canGreetAgain: true
}));
}, MINIMUM_LOOK_AWAY_TIME);

if (!attentionState.hasGreeted && attentionState.canGreetAgain && canGreet) {
speakToUser();
attentionState.hasGreeted = true;
attentionState.canGreetAgain = false;
}

// Cleanup
return () => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
};
}, [isLookingAtScreen]);

return attentionState;
};

} else {
attentionState.lastLookAwayTime = Date.now();

timeoutRef.current = setTimeout(() => {
attentionState.hasGreeted = false;
attentionState.lastLookAwayTime = null;
attentionState.canGreetAgain = true;
}, MINIMUM_LOOK_AWAY_TIME);
}

return () => {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current);
}
};
}, [isLookingAtScreen]);

return attentionStateRef.current;
};

export default useScreenAttention;

0 comments on commit c5219f6

Please sign in to comment.