From 8e3c97f365768c5d6a355aec23f2abcdd1843df3 Mon Sep 17 00:00:00 2001
From: albin-karlsson <55614148+albin-karlsson@users.noreply.github.com>
Date: Thu, 25 Apr 2024 16:59:08 +0200
Subject: [PATCH 1/4] Add functionality for raising hand
---
.../src/components/ConversationControls.jsx | 11 +++++-
client/src/components/Council.jsx | 23 +++++++++++-
client/src/components/HumanInput.jsx | 15 ++++++++
client/src/components/Output.jsx | 36 +++++++++++++++++--
client/src/components/TextOutput.jsx | 11 ++++--
5 files changed, 89 insertions(+), 7 deletions(-)
create mode 100644 client/src/components/HumanInput.jsx
diff --git a/client/src/components/ConversationControls.jsx b/client/src/components/ConversationControls.jsx
index e7afbf9..1d157d3 100644
--- a/client/src/components/ConversationControls.jsx
+++ b/client/src/components/ConversationControls.jsx
@@ -1,10 +1,19 @@
import React from "react";
-function ConversationControls({ isPaused, onPauseResume, onSkipForward }) {
+function ConversationControls({
+ isPaused,
+ onPauseResume,
+ onSkipForward,
+ onRaiseHandOrNevermind,
+ isRaisedHand,
+}) {
return (
{foods.map((food, index) => (
diff --git a/client/src/components/HumanInput.jsx b/client/src/components/HumanInput.jsx
new file mode 100644
index 0000000..4763592
--- /dev/null
+++ b/client/src/components/HumanInput.jsx
@@ -0,0 +1,15 @@
+import React from "react";
+
+function HumanInput() {
+ return (
+
+
+
+ );
+}
+
+export default HumanInput;
diff --git a/client/src/components/Output.jsx b/client/src/components/Output.jsx
index bb443f6..caac28b 100644
--- a/client/src/components/Output.jsx
+++ b/client/src/components/Output.jsx
@@ -2,14 +2,44 @@ import React, { useState, useEffect } from "react";
import TextOutput from "./TextOutput";
import AudioOutput from "./AudioOutput";
-function Output({ textMessages, audioMessages, isActiveOverlay }) {
+function Output({
+ textMessages,
+ audioMessages,
+ isActiveOverlay,
+ isRaisedHand,
+ onIsReady,
+}) {
const [currentMessageIndex, setCurrentMessageIndex] = useState(0);
const [currentTextMessage, setCurrentTextMessage] = useState(null);
const [currentAudioMessage, setCurrentAudioMessage] = useState(null);
+ // useEffect for raising hand or nevermind when a food is talking
+ useEffect(() => {
+ if (!isRaisedHand) {
+ tryFindTextAndAudio();
+ } else {
+ console.log("Human interjection time!");
+ }
+ }, [currentMessageIndex]);
+
+ // useEffect for nevermind when adding new input
+ useEffect(() => {
+ if (
+ !isRaisedHand &&
+ currentTextMessage === null &&
+ currentAudioMessage === null
+ ) {
+ tryFindTextAndAudio();
+ }
+ }, [isRaisedHand]);
+
useEffect(() => {
tryFindTextAndAudio();
- }, [currentMessageIndex, textMessages, audioMessages]);
+ }, [textMessages, audioMessages]);
+
+ useEffect(() => {
+ console.log("Hand raised: ", isRaisedHand);
+ }, [isRaisedHand]);
function tryFindTextAndAudio() {
const textMessage = textMessages[currentMessageIndex];
@@ -25,6 +55,8 @@ function Output({ textMessages, audioMessages, isActiveOverlay }) {
) {
console.log("Both found!");
+ onIsReady();
+
setCurrentTextMessage((prev) => textMessage);
setCurrentAudioMessage((prev) => audioMessage);
}
diff --git a/client/src/components/TextOutput.jsx b/client/src/components/TextOutput.jsx
index 4563ff2..f5632f8 100644
--- a/client/src/components/TextOutput.jsx
+++ b/client/src/components/TextOutput.jsx
@@ -1,6 +1,6 @@
import React, { useState, useEffect } from "react";
-function TextOutput({ currentTextMessage }) {
+function TextOutput({ currentTextMessage, currentAudioMessage }) {
const [currentSnippetIndex, setCurrentSnippetIndex] = useState(0);
const [currentSnippet, setCurrentSnippet] = useState("");
@@ -10,7 +10,7 @@ function TextOutput({ currentTextMessage }) {
}, [currentTextMessage]);
useEffect(() => {
- if (currentTextMessage && currentTextMessage.text) {
+ if (currentTextMessage && currentTextMessage.text && currentAudioMessage) {
const text = currentTextMessage.text;
// Split the text into sentences, ignoring periods followed by a number
const sentences = text.split(/(?<=[.!?])(?=\s+(?![0-9]))/);
@@ -23,7 +23,12 @@ function TextOutput({ currentTextMessage }) {
}, calculateDisplayTime(currentSnippet) * 1000);
return () => clearInterval(interval);
}
- }, [currentTextMessage, currentSnippetIndex]);
+ }, [
+ currentTextMessage,
+ currentSnippetIndex,
+ currentAudioMessage,
+ currentSnippet,
+ ]);
// Calculate the display time based on the number of characters in the snippet
const calculateDisplayTime = (text) => {
From 16a8ef94e9367d289a5b63f7cbb16425d9bd6a50 Mon Sep 17 00:00:00 2001
From: albin-karlsson <55614148+albin-karlsson@users.noreply.github.com>
Date: Thu, 25 Apr 2024 19:39:23 +0200
Subject: [PATCH 2/4] Implement raise hand
---
client/src/components/AudioOutput.jsx | 6 ++-
.../src/components/ConversationControls.jsx | 7 +++-
client/src/components/Council.jsx | 17 ++++++++-
client/src/components/HumanInput.jsx | 3 +-
client/src/components/Output.jsx | 38 +++++++++++++------
5 files changed, 54 insertions(+), 17 deletions(-)
diff --git a/client/src/components/AudioOutput.jsx b/client/src/components/AudioOutput.jsx
index ec77cec..6180d4d 100644
--- a/client/src/components/AudioOutput.jsx
+++ b/client/src/components/AudioOutput.jsx
@@ -1,10 +1,14 @@
import React, { useEffect, useRef } from "react";
-function AudioOutput({ currentAudioMessage, onFinishedPlaying }) {
+function AudioOutput({ currentAudioMessage, onFinishedPlaying, stopAudio }) {
const audioRef = useRef(null);
const urlRef = useRef(null);
const checkPlaybackIntervalRef = useRef(null);
+ useEffect(() => {
+ audioRef.current && audioRef.current.pause();
+ }, [stopAudio]);
+
useEffect(() => {
// Initialize the audio element if it does not exist
if (!audioRef.current) {
diff --git a/client/src/components/ConversationControls.jsx b/client/src/components/ConversationControls.jsx
index 1d157d3..3b73c00 100644
--- a/client/src/components/ConversationControls.jsx
+++ b/client/src/components/ConversationControls.jsx
@@ -6,11 +6,14 @@ function ConversationControls({
onSkipForward,
onRaiseHandOrNevermind,
isRaisedHand,
+ humanInterjection,
}) {
return (
-
-
+ {/* */}
+ {!humanInterjection && (
+
+ )}
diff --git a/client/src/components/Council.jsx b/client/src/components/Council.jsx
index 980dbbf..8b0fb1f 100644
--- a/client/src/components/Council.jsx
+++ b/client/src/components/Council.jsx
@@ -18,6 +18,8 @@ function Council({ options }) {
const [audioMessages, setAudioMessages] = useState([]); // To store multiple ArrayBuffers
const [isReady, setIsReady] = useState(false);
const [isRaisedHand, setIsRaisedHand] = useState(false);
+ const [humanInterjection, setHumanInterjection] = useState(false);
+ const [skipForward, setSkipForward] = useState(false);
const socketRef = useRef(null); // Using useRef to persist socket instance
@@ -71,11 +73,18 @@ function Council({ options }) {
setIsReady(true);
}
+ function handleOnSkipForward() {
+ setSkipForward(!skipForward);
+ }
+
function handleOnRaiseHandOrNevermind() {
- console.log("Setting isRaisedHand...");
setIsRaisedHand((prev) => !prev);
}
+ function handleOnHumanInterjection(value) {
+ setHumanInterjection(value);
+ }
+
function displayResetWarning() {
setActiveOverlay("reset");
}
@@ -96,17 +105,23 @@ function Council({ options }) {
className="text-container"
style={{ justifyContent: "end" }}
>
+ {humanInterjection && }
{isReady && (
)}
diff --git a/client/src/components/HumanInput.jsx b/client/src/components/HumanInput.jsx
index 4763592..cb4012c 100644
--- a/client/src/components/HumanInput.jsx
+++ b/client/src/components/HumanInput.jsx
@@ -2,11 +2,10 @@ import React from "react";
function HumanInput() {
return (
-
+
);
diff --git a/client/src/components/Output.jsx b/client/src/components/Output.jsx
index caac28b..696db49 100644
--- a/client/src/components/Output.jsx
+++ b/client/src/components/Output.jsx
@@ -8,17 +8,29 @@ function Output({
isActiveOverlay,
isRaisedHand,
onIsReady,
+ onHumanInterjection,
+ humanInterjection,
+ skipForward,
}) {
const [currentMessageIndex, setCurrentMessageIndex] = useState(0);
const [currentTextMessage, setCurrentTextMessage] = useState(null);
const [currentAudioMessage, setCurrentAudioMessage] = useState(null);
+ const [stopAudio, setStopAudio] = useState(false);
- // useEffect for raising hand or nevermind when a food is talking
+ useEffect(() => {
+ if (currentTextMessage && currentAudioMessage) {
+ console.log("Skipping forward");
+ proceedToNextMessage();
+ }
+ }, [skipForward]);
+
+ // useEffect for checking for raised hand when changing message index (inbetween food talking)
useEffect(() => {
if (!isRaisedHand) {
tryFindTextAndAudio();
} else {
- console.log("Human interjection time!");
+ setStopAudio(!stopAudio);
+ onHumanInterjection(true);
}
}, [currentMessageIndex]);
@@ -29,6 +41,7 @@ function Output({
currentTextMessage === null &&
currentAudioMessage === null
) {
+ onHumanInterjection(false);
tryFindTextAndAudio();
}
}, [isRaisedHand]);
@@ -37,10 +50,6 @@ function Output({
tryFindTextAndAudio();
}, [textMessages, audioMessages]);
- useEffect(() => {
- console.log("Hand raised: ", isRaisedHand);
- }, [isRaisedHand]);
-
function tryFindTextAndAudio() {
const textMessage = textMessages[currentMessageIndex];
const audioMessage = audioMessages.find(
@@ -62,21 +71,28 @@ function Output({
}
}
- function handleOnFinishedPlaying() {
+ function proceedToNextMessage() {
setCurrentTextMessage((prev) => null);
setCurrentAudioMessage((prev) => null);
setCurrentMessageIndex((prev) => prev + 1);
}
+ function handleOnFinishedPlaying() {
+ proceedToNextMessage();
+ }
+
return (
<>
-
+ {!humanInterjection && (
+
+ )}
>
);
From ce22340e22b7560754170004879403165d30920a Mon Sep 17 00:00:00 2001
From: albin-karlsson <55614148+albin-karlsson@users.noreply.github.com>
Date: Thu, 25 Apr 2024 20:21:11 +0200
Subject: [PATCH 3/4] Fix bug with audio playing if adding new input
---
client/src/components/AudioOutput.jsx | 2 ++
.../src/components/ConversationControls.jsx | 2 ++
client/src/components/Council.jsx | 5 +++++
client/src/components/Output.jsx | 22 ++++++++++---------
4 files changed, 21 insertions(+), 10 deletions(-)
diff --git a/client/src/components/AudioOutput.jsx b/client/src/components/AudioOutput.jsx
index 6180d4d..5676de8 100644
--- a/client/src/components/AudioOutput.jsx
+++ b/client/src/components/AudioOutput.jsx
@@ -7,6 +7,8 @@ function AudioOutput({ currentAudioMessage, onFinishedPlaying, stopAudio }) {
useEffect(() => {
audioRef.current && audioRef.current.pause();
+
+ console.log("Stopping audio...");
}, [stopAudio]);
useEffect(() => {
diff --git a/client/src/components/ConversationControls.jsx b/client/src/components/ConversationControls.jsx
index 3b73c00..ae0fabd 100644
--- a/client/src/components/ConversationControls.jsx
+++ b/client/src/components/ConversationControls.jsx
@@ -5,6 +5,7 @@ function ConversationControls({
onPauseResume,
onSkipForward,
onRaiseHandOrNevermind,
+ onSubmit,
isRaisedHand,
humanInterjection,
}) {
@@ -14,6 +15,7 @@ function ConversationControls({
{!humanInterjection && (
)}
+ {humanInterjection &&
}
diff --git a/client/src/components/Council.jsx b/client/src/components/Council.jsx
index 8b0fb1f..58df860 100644
--- a/client/src/components/Council.jsx
+++ b/client/src/components/Council.jsx
@@ -77,6 +77,10 @@ function Council({ options }) {
setSkipForward(!skipForward);
}
+ function handleOnSubmit() {
+ console.log("Submitting new issue");
+ }
+
function handleOnRaiseHandOrNevermind() {
setIsRaisedHand((prev) => !prev);
}
@@ -120,6 +124,7 @@ function Council({ options }) {
diff --git a/client/src/components/Output.jsx b/client/src/components/Output.jsx
index 696db49..1a8d865 100644
--- a/client/src/components/Output.jsx
+++ b/client/src/components/Output.jsx
@@ -19,18 +19,18 @@ function Output({
useEffect(() => {
if (currentTextMessage && currentAudioMessage) {
- console.log("Skipping forward");
proceedToNextMessage();
}
}, [skipForward]);
// useEffect for checking for raised hand when changing message index (inbetween food talking)
useEffect(() => {
- if (!isRaisedHand) {
- tryFindTextAndAudio();
- } else {
+ // Check to see if the hand is raised
+ if (isRaisedHand) {
setStopAudio(!stopAudio);
onHumanInterjection(true);
+ } else {
+ findTextAndAudio();
}
}, [currentMessageIndex]);
@@ -42,15 +42,15 @@ function Output({
currentAudioMessage === null
) {
onHumanInterjection(false);
- tryFindTextAndAudio();
+ findTextAndAudio();
}
}, [isRaisedHand]);
useEffect(() => {
- tryFindTextAndAudio();
+ findTextAndAudio();
}, [textMessages, audioMessages]);
- function tryFindTextAndAudio() {
+ function findTextAndAudio() {
const textMessage = textMessages[currentMessageIndex];
const audioMessage = audioMessages.find(
(a) => a.message_index === currentMessageIndex
@@ -60,10 +60,12 @@ function Output({
textMessage &&
audioMessage &&
!currentTextMessage &&
- !currentAudioMessage
+ !currentAudioMessage &&
+ !isRaisedHand
) {
console.log("Both found!");
+ // Set isReady to true in the parent component to render the controls (for skipping forward et.c.)
onIsReady();
setCurrentTextMessage((prev) => textMessage);
@@ -72,8 +74,8 @@ function Output({
}
function proceedToNextMessage() {
- setCurrentTextMessage((prev) => null);
- setCurrentAudioMessage((prev) => null);
+ setCurrentTextMessage(() => null);
+ setCurrentAudioMessage(() => null);
setCurrentMessageIndex((prev) => prev + 1);
}
From 0c2895100c0116af4f1992bdc5edcefdb2d2b764 Mon Sep 17 00:00:00 2001
From: albin-karlsson <55614148+albin-karlsson@users.noreply.github.com>
Date: Thu, 25 Apr 2024 22:25:36 +0200
Subject: [PATCH 4/4] Add human interjection
---
client/src/components/Council.jsx | 63 +++++++++++++++++++++++-----
client/src/components/HumanInput.jsx | 7 +++-
client/src/components/Output.jsx | 34 ++++++++++++---
3 files changed, 87 insertions(+), 17 deletions(-)
diff --git a/client/src/components/Council.jsx b/client/src/components/Council.jsx
index 58df860..92e5984 100644
--- a/client/src/components/Council.jsx
+++ b/client/src/components/Council.jsx
@@ -20,6 +20,10 @@ function Council({ options }) {
const [isRaisedHand, setIsRaisedHand] = useState(false);
const [humanInterjection, setHumanInterjection] = useState(false);
const [skipForward, setSkipForward] = useState(false);
+ const [newTopic, setNewTopic] = useState("");
+ const [interjectionCounter, setInterjectionCounter] = useState(-1000);
+ const [interjectionReplyRecieved, setInterjectionReplyRecieved] =
+ useState(false);
const socketRef = useRef(null); // Using useRef to persist socket instance
@@ -35,10 +39,16 @@ function Council({ options }) {
};
useEffect(() => {
+ initializeConversation(); // Call the function to start the conversation when component mounts
+ }, []);
+
+ // Function to initialize or restart the conversation
+ const initializeConversation = (customTopic) => {
+ const topicToSend = customTopic || topic; // Use custom topic if provided, else use default topic
+
socketRef.current = io();
- // Send initial data to start the conversation
- let promptsAndOptions = {
+ const promptsAndOptions = {
options: {
...globalOptions,
humanName,
@@ -46,28 +56,37 @@ function Council({ options }) {
neverMindPrompt: false,
},
name: "New room",
- topic,
+ topic: topicToSend,
characters: foods,
};
+
socketRef.current.emit("start_conversation", promptsAndOptions);
- // Listen for conversation text updates
socketRef.current.on("conversation_update", (textMessage) => {
+ setInterjectionCounter((prev) => prev + 1);
setTextMessages((prev) => [...prev, textMessage]);
});
- // Listen for audio updates
socketRef.current.on("audio_update", (audioMessage) => {
+ setInterjectionCounter((prev) => prev + 1);
setAudioMessages((prevAudioMessages) => [
...prevAudioMessages,
audioMessage,
]);
});
+ };
- return () => {
- socketRef.current.disconnect();
- };
- }, []);
+ useEffect(() => {
+ if (interjectionCounter === 2) {
+ setInterjectionReplyRecieved(true);
+ setHumanInterjection(false);
+ setIsRaisedHand(false);
+ }
+ }, [interjectionCounter]);
+
+ function handleOnResetInterjectionReply() {
+ setInterjectionReplyRecieved(false);
+ }
function handleOnIsReady() {
setIsReady(true);
@@ -78,7 +97,21 @@ function Council({ options }) {
}
function handleOnSubmit() {
- console.log("Submitting new issue");
+ const promptsAndOptions = {
+ options: {
+ ...globalOptions,
+ humanName,
+ raiseHandPrompt: newTopic,
+ neverMindPrompt: false,
+ },
+ name: "New room",
+ topic: newTopic,
+ characters: foods,
+ };
+
+ setInterjectionCounter(() => 0);
+
+ socketRef.current.emit("raise_hand", promptsAndOptions);
}
function handleOnRaiseHandOrNevermind() {
@@ -89,6 +122,10 @@ function Council({ options }) {
setHumanInterjection(value);
}
+ function handleOnAddNewTopic(newTopic) {
+ setNewTopic(newTopic);
+ }
+
function displayResetWarning() {
setActiveOverlay("reset");
}
@@ -109,7 +146,9 @@ function Council({ options }) {
className="text-container"
style={{ justifyContent: "end" }}
>
- {humanInterjection &&
}
+ {humanInterjection && (
+
+ )}
{isReady && (
diff --git a/client/src/components/Output.jsx b/client/src/components/Output.jsx
index 1a8d865..0d73d99 100644
--- a/client/src/components/Output.jsx
+++ b/client/src/components/Output.jsx
@@ -11,12 +11,28 @@ function Output({
onHumanInterjection,
humanInterjection,
skipForward,
+ interjectionReplyRecieved,
+ onResetInterjectionReply,
}) {
const [currentMessageIndex, setCurrentMessageIndex] = useState(0);
const [currentTextMessage, setCurrentTextMessage] = useState(null);
const [currentAudioMessage, setCurrentAudioMessage] = useState(null);
const [stopAudio, setStopAudio] = useState(false);
+ useEffect(() => {
+ if (interjectionReplyRecieved) {
+ console.log(
+ "Should be about dancing: ",
+ textMessages[textMessages.length - 1].text
+ );
+
+ setCurrentTextMessage(textMessages[textMessages.length - 1]);
+ setCurrentAudioMessage(audioMessages[audioMessages.length - 1]);
+
+ onResetInterjectionReply();
+ }
+ }, [interjectionReplyRecieved]);
+
useEffect(() => {
if (currentTextMessage && currentAudioMessage) {
proceedToNextMessage();
@@ -39,7 +55,8 @@ function Output({
if (
!isRaisedHand &&
currentTextMessage === null &&
- currentAudioMessage === null
+ currentAudioMessage === null &&
+ !interjectionReplyRecieved
) {
onHumanInterjection(false);
findTextAndAudio();
@@ -47,21 +64,26 @@ function Output({
}, [isRaisedHand]);
useEffect(() => {
+ console.log("Amount of text messages :", textMessages.length);
+ console.log("Amount of voice messages :", audioMessages.length);
+
+ console.log("Text messages:", textMessages);
+ console.log("Audio messages:", audioMessages);
+
findTextAndAudio();
}, [textMessages, audioMessages]);
function findTextAndAudio() {
const textMessage = textMessages[currentMessageIndex];
- const audioMessage = audioMessages.find(
- (a) => a.message_index === currentMessageIndex
- );
+ const audioMessage = audioMessages.find((a) => a.id === textMessage.id);
if (
textMessage &&
audioMessage &&
!currentTextMessage &&
!currentAudioMessage &&
- !isRaisedHand
+ !isRaisedHand &&
+ !interjectionReplyRecieved
) {
console.log("Both found!");
@@ -76,6 +98,8 @@ function Output({
function proceedToNextMessage() {
setCurrentTextMessage(() => null);
setCurrentAudioMessage(() => null);
+
+ console.log("Current index: ", currentMessageIndex);
setCurrentMessageIndex((prev) => prev + 1);
}