diff --git a/client/src/components/AudioOutput.jsx b/client/src/components/AudioOutput.jsx
index ec77cec..5676de8 100644
--- a/client/src/components/AudioOutput.jsx
+++ b/client/src/components/AudioOutput.jsx
@@ -1,10 +1,16 @@
import React, { useEffect, useRef } from "react";
-function AudioOutput({ currentAudioMessage, onFinishedPlaying }) {
+function AudioOutput({ currentAudioMessage, onFinishedPlaying, stopAudio }) {
const audioRef = useRef(null);
const urlRef = useRef(null);
const checkPlaybackIntervalRef = useRef(null);
+ useEffect(() => {
+ audioRef.current && audioRef.current.pause();
+
+ console.log("Stopping audio...");
+ }, [stopAudio]);
+
useEffect(() => {
// Initialize the audio element if it does not exist
if (!audioRef.current) {
diff --git a/client/src/components/ConversationControls.jsx b/client/src/components/ConversationControls.jsx
index e7afbf9..ae0fabd 100644
--- a/client/src/components/ConversationControls.jsx
+++ b/client/src/components/ConversationControls.jsx
@@ -1,10 +1,24 @@
import React from "react";
-function ConversationControls({ isPaused, onPauseResume, onSkipForward }) {
+function ConversationControls({
+ isPaused,
+ onPauseResume,
+ onSkipForward,
+ onRaiseHandOrNevermind,
+ onSubmit,
+ isRaisedHand,
+ humanInterjection,
+}) {
return (
{foods.map((food, index) => (
diff --git a/client/src/components/HumanInput.jsx b/client/src/components/HumanInput.jsx
new file mode 100644
index 0000000..2a8af1e
--- /dev/null
+++ b/client/src/components/HumanInput.jsx
@@ -0,0 +1,19 @@
+import React from "react";
+
+function HumanInput({ onAddNewTopic }) {
+ function handleOnInput(e) {
+ onAddNewTopic(e.target.value);
+ }
+
+ return (
+
+
+
+ );
+}
+
+export default HumanInput;
diff --git a/client/src/components/Output.jsx b/client/src/components/Output.jsx
index bb443f6..0d73d99 100644
--- a/client/src/components/Output.jsx
+++ b/client/src/components/Output.jsx
@@ -2,49 +2,123 @@ import React, { useState, useEffect } from "react";
import TextOutput from "./TextOutput";
import AudioOutput from "./AudioOutput";
-function Output({ textMessages, audioMessages, isActiveOverlay }) {
+function Output({
+ textMessages,
+ audioMessages,
+ isActiveOverlay,
+ isRaisedHand,
+ onIsReady,
+ onHumanInterjection,
+ humanInterjection,
+ skipForward,
+ interjectionReplyRecieved,
+ onResetInterjectionReply,
+}) {
const [currentMessageIndex, setCurrentMessageIndex] = useState(0);
const [currentTextMessage, setCurrentTextMessage] = useState(null);
const [currentAudioMessage, setCurrentAudioMessage] = useState(null);
+ const [stopAudio, setStopAudio] = useState(false);
useEffect(() => {
- tryFindTextAndAudio();
- }, [currentMessageIndex, textMessages, audioMessages]);
+ if (interjectionReplyRecieved) {
+ console.log(
+ "Should be about dancing: ",
+ textMessages[textMessages.length - 1].text
+ );
- function tryFindTextAndAudio() {
+ setCurrentTextMessage(textMessages[textMessages.length - 1]);
+ setCurrentAudioMessage(audioMessages[audioMessages.length - 1]);
+
+ onResetInterjectionReply();
+ }
+ }, [interjectionReplyRecieved]);
+
+ useEffect(() => {
+ if (currentTextMessage && currentAudioMessage) {
+ proceedToNextMessage();
+ }
+ }, [skipForward]);
+
+ // useEffect for checking for raised hand when changing message index (inbetween food talking)
+ useEffect(() => {
+ // Check to see if the hand is raised
+ if (isRaisedHand) {
+ setStopAudio(!stopAudio);
+ onHumanInterjection(true);
+ } else {
+ findTextAndAudio();
+ }
+ }, [currentMessageIndex]);
+
+ // useEffect for nevermind when adding new input
+ useEffect(() => {
+ if (
+ !isRaisedHand &&
+ currentTextMessage === null &&
+ currentAudioMessage === null &&
+ !interjectionReplyRecieved
+ ) {
+ onHumanInterjection(false);
+ findTextAndAudio();
+ }
+ }, [isRaisedHand]);
+
+ useEffect(() => {
+ console.log("Amount of text messages :", textMessages.length);
+ console.log("Amount of voice messages :", audioMessages.length);
+
+ console.log("Text messages:", textMessages);
+ console.log("Audio messages:", audioMessages);
+
+ findTextAndAudio();
+ }, [textMessages, audioMessages]);
+
+ function findTextAndAudio() {
const textMessage = textMessages[currentMessageIndex];
- const audioMessage = audioMessages.find(
- (a) => a.message_index === currentMessageIndex
- );
+ const audioMessage = audioMessages.find((a) => a.id === textMessage.id);
if (
textMessage &&
audioMessage &&
!currentTextMessage &&
- !currentAudioMessage
+ !currentAudioMessage &&
+ !isRaisedHand &&
+ !interjectionReplyRecieved
) {
console.log("Both found!");
+ // Set isReady to true in the parent component to render the controls (for skipping forward et.c.)
+ onIsReady();
+
setCurrentTextMessage((prev) => textMessage);
setCurrentAudioMessage((prev) => audioMessage);
}
}
- function handleOnFinishedPlaying() {
- setCurrentTextMessage((prev) => null);
- setCurrentAudioMessage((prev) => null);
+ function proceedToNextMessage() {
+ setCurrentTextMessage(() => null);
+ setCurrentAudioMessage(() => null);
+
+ console.log("Current index: ", currentMessageIndex);
setCurrentMessageIndex((prev) => prev + 1);
}
+ function handleOnFinishedPlaying() {
+ proceedToNextMessage();
+ }
+
return (
<>
-
+ {!humanInterjection && (
+
+ )}
>
);
diff --git a/client/src/components/TextOutput.jsx b/client/src/components/TextOutput.jsx
index 4563ff2..f5632f8 100644
--- a/client/src/components/TextOutput.jsx
+++ b/client/src/components/TextOutput.jsx
@@ -1,6 +1,6 @@
import React, { useState, useEffect } from "react";
-function TextOutput({ currentTextMessage }) {
+function TextOutput({ currentTextMessage, currentAudioMessage }) {
const [currentSnippetIndex, setCurrentSnippetIndex] = useState(0);
const [currentSnippet, setCurrentSnippet] = useState("");
@@ -10,7 +10,7 @@ function TextOutput({ currentTextMessage }) {
}, [currentTextMessage]);
useEffect(() => {
- if (currentTextMessage && currentTextMessage.text) {
+ if (currentTextMessage && currentTextMessage.text && currentAudioMessage) {
const text = currentTextMessage.text;
// Split the text into sentences, ignoring periods followed by a number
const sentences = text.split(/(?<=[.!?])(?=\s+(?![0-9]))/);
@@ -23,7 +23,12 @@ function TextOutput({ currentTextMessage }) {
}, calculateDisplayTime(currentSnippet) * 1000);
return () => clearInterval(interval);
}
- }, [currentTextMessage, currentSnippetIndex]);
+ }, [
+ currentTextMessage,
+ currentSnippetIndex,
+ currentAudioMessage,
+ currentSnippet,
+ ]);
// Calculate the display time based on the number of characters in the snippet
const calculateDisplayTime = (text) => {