Skip to content

Commit

Permalink
Merge pull request #9 from oaknational/rc1
Browse files Browse the repository at this point in the history
build: release candidate
  • Loading branch information
simonrose121 authored Aug 28, 2024
2 parents 5483e1f + efffd02 commit 4fe7ca2
Show file tree
Hide file tree
Showing 10 changed files with 83 additions and 77 deletions.
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
"oaknational",
"oaknationalacademy",
"oclif",
"Onboarded",
"openai",
"openapi",
"paragraphise",
Expand Down
2 changes: 1 addition & 1 deletion apps/nextjs/src/app/aila/[id]/download/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ export function DownloadPageContents({ chat }: Readonly<DownloadPageProps>) {

const { survey } = usePosthogFeedbackSurvey({
closeDialog: () => null,
surveyName: "Chat Feedback With Stats",
surveyName: "End of Aila generation survey launch aug24",
});

useEffect(() => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@ import { usePosthogFeedbackSurvey } from "hooks/surveys/usePosthogFeedbackSurvey

import FeedBack from "@/components/Feedback";

const ChatActions = ({ closeDialog }: { closeDialog: () => void }) => {
const EndOfLessonFeedback = ({ closeDialog }: { closeDialog: () => void }) => {
const { survey, submitSurvey, closeDialogWithPostHogDismiss } =
usePosthogFeedbackSurvey({
closeDialog,
surveyName: "Chat Feedback With Stats",
surveyName: "End of Aila generation survey launch aug24",
});

return (
Expand All @@ -26,4 +26,4 @@ const ChatActions = ({ closeDialog }: { closeDialog: () => void }) => {
);
};

export default ChatActions;
export default EndOfLessonFeedback;
4 changes: 2 additions & 2 deletions apps/nextjs/src/components/DialogControl/DialogContents.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ import { Message } from "ai";

import { useDialog } from "../AppComponents/DialogContext";
import { Icon } from "../Icon";
import ChatActions from "./ContentOptions/ChatActions";
import DemoInterstitialDialog from "./ContentOptions/DemoInterstitialDialog";
import DemoShareLockedDialog from "./ContentOptions/DemoShareLockedDialog";
import EndOfLessonFeedback from "./ContentOptions/EndOfLessonFeedback";
import ReportContentDialog from "./ContentOptions/ReportContentDialog";
import ShareChatDialog from "./ContentOptions/ShareChatDialog";
import {
Expand Down Expand Up @@ -61,7 +61,7 @@ const DialogContents = ({
<DemoShareLockedDialog closeDialog={closeDialog} />
)}
{dialogWindow === "feedback" && (
<ChatActions closeDialog={closeDialog} />
<EndOfLessonFeedback closeDialog={closeDialog} />
)}
{dialogWindow === "report-content" && (
<ReportContentDialog
Expand Down
85 changes: 34 additions & 51 deletions apps/nextjs/src/components/Feedback/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,63 +13,40 @@ const FeedBack = ({
closeDialogWithPostHogDismiss,
}: {
survey: Survey;
submitSurvey: (usersResponse: {
$survey_response: string;
$survey_response_1: string;
$survey_response_2: string;
}) => void;
submitSurvey: (usersResponse: { [key: string]: string }) => void;
closeDialogWithPostHogDismiss: () => void;
onSubmit: () => void;
}) => {
const numbersOfHoursSaved = [0, 1, 2, 3, 4, 5, 6];
const [usersResponse, setUsersResponse] = useState({
$survey_response: "",
$survey_response_1: "",
$survey_response_2: "",
});
const rating = [
{ text: "Poor", number: 1 },
{ text: "Needs Improvement", number: 2 },
{ text: "Satisfactory", number: 3 },
{ text: "Good", number: 4 },
{ text: "Excellent", number: 5 },
];

const [usersResponse, setUsersResponse] = useState<{ [key: string]: string }>(
{},
);
console.log("usersResponse", usersResponse);
if (!survey?.id) return null;

return (
<Flex
className="h-full w-full"
direction="column"
justify="start"
align="start"
>
<p className="mb-20 text-3xl font-bold">Before you continue...</p>
<p className="mb-20 text-3xl font-bold">Before you continue...</p>

<form
onSubmit={async (e) => {
e.preventDefault();
}}
className="flex w-full flex-col gap-14"
>
{survey?.questions.map((question) => {
if (question.type === "open") {
return (
<div
key={question.question}
className="flex flex-col items-start justify-start"
>
<label
htmlFor={question.question}
className="mb-16 text-center text-xl "
>
{question.question}
</label>
<textarea
className="h-32 w-full min-w-[300px] rounded border-2 border-black p-10"
onChange={(e) => {
setUsersResponse({
...usersResponse,
$survey_response_1: e.target.value,
});
}}
id={question.question}
/>
</div>
);
}
{survey?.questions.map((question, i) => {
if (question.type === "rating") {
return (
<div
Expand All @@ -78,32 +55,38 @@ const FeedBack = ({
>
<label
htmlFor={question.question}
className="mb-16 text-left text-xl "
className="mb-16 text-left text-2xl font-bold"
>
{question.question}
</label>
<div className="flex w-full justify-between gap-6">
{numbersOfHoursSaved.map((number) => {
{rating.map((feedback) => {
return (
<button
key={number}
className={`flex flex-col items-center justify-center gap-6 `}
key={feedback.text}
className={`flex flex-col items-center justify-center gap-6`}
onClick={() => {
setUsersResponse({
...usersResponse,
$survey_response: number.toString(),
});
setUsersResponse((prevState) => ({
...prevState,
[`$survey_response_${i}`]:
feedback.number.toString(),
}));
}}
>
<span
className={`text-2xl font-bold ${usersResponse.$survey_response === number.toString() ? `text-[#287C34]` : `text-black`}`}
className={`text-lg ${
usersResponse[`$survey_response_${i}`] ===
feedback.toString()
? `text-[#287C34]`
: `text-black`
}`}
>
{number}
{number === numbersOfHoursSaved.length - 1 ? "+" : ""}
{feedback.text}
</span>
<span
className={
usersResponse.$survey_response === number.toString()
usersResponse[`$survey_response_${i}`] ===
feedback.toString()
? "opacity-100"
: "opacity-0"
}
Expand All @@ -118,7 +101,7 @@ const FeedBack = ({
);
}
})}
<div className="flex justify-between ">
<div className="flex justify-between">
<ChatButton
variant="text-link"
onClick={() => closeDialogWithPostHogDismiss()}
Expand Down
5 changes: 4 additions & 1 deletion apps/nextjs/src/hooks/surveys/usePosthogFeedbackSurvey.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ type UsePosthogFeedbackSurveyProps = {
| "Chat Feedback"
| "Moderation feedback"
| "Chat Feedback With Stats"
| "Report Content";
| "Report Content"
| "End of Aila generation survey launch aug24";
};
export const usePosthogFeedbackSurvey = ({
closeDialog,
Expand All @@ -19,9 +20,11 @@ export const usePosthogFeedbackSurvey = ({
const [survey, setSurvey] = useState<Survey | undefined>(undefined);

const posthog = usePostHog();

useEffect(() => {
posthog.getSurveys((surveys) => {
const filteredSurveys = surveys.filter((survey) => survey.type === "api");

const matchingSurvey = filteredSurveys.find(
(survey) => survey.name === surveyName,
);
Expand Down
5 changes: 5 additions & 0 deletions packages/api/src/router/auth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,12 @@ export const authRouter = router({

await clerkClient.users.updateUserMetadata(userId, {
publicMetadata: {
// legacy field for demo users. To remove after transition to labs namespace
isDemoUser,
labs: {
isDemoUser,
isOnboarded: !!input.termsOfUse,
},
},
privateMetadata: {
acceptedPrivacyPolicy: input.privacyPolicy,
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/functions/slack/notifyModeration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export const notifyModeration = inngest.createFunction(
},
{
type: "mrkdwn",
text: `*Categories*: ${args.categories.join("`, `")}`,
text: `*Categories*: \`${args.categories.join("`, `")}\``,
},
],
},
Expand Down
24 changes: 24 additions & 0 deletions packages/core/src/llm/openai.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { ChatOpenAI as LangchainChatOpenAI } from "langchain/chat_models/openai";
import { BaseLLMParams } from "langchain/llms/base";
import {
AzureOpenAIInput,
Expand Down Expand Up @@ -103,8 +104,31 @@ function createOpenAILangchainClient({
});
}

function createOpenAILangchainChatClient({
app,
fields = {},
}: {
app: string;
fields?: Partial<OpenAIInput> &
Partial<AzureOpenAIInput> &
BaseLLMParams & {
configuration?: ClientOptions;
};
}) {
const defaultHeaders = heliconeHeaders({ app });
return new LangchainChatOpenAI({
...fields,
configuration: {
apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.HELICONE_EU_HOST,
defaultHeaders,
},
});
}

export {
createOpenAIClient,
createOpenAIModerationsClient,
createOpenAILangchainClient,
createOpenAILangchainChatClient,
};
26 changes: 8 additions & 18 deletions packages/core/src/models/prompts.ts
Original file line number Diff line number Diff line change
@@ -1,21 +1,11 @@
import { PrismaClientWithAccelerate } from "@oakai/db";
import defaultLogger, { Logger } from "@oakai/logger";
import { Logger as InngestLogger } from "inngest/middleware/logger";
import { ChatOpenAI } from "langchain/chat_models/openai";
import { PromptTemplate } from "langchain/prompts";
import { BaseMessage, SystemMessage } from "langchain/schema";
import untruncateJson from "untruncate-json";

const checkProductionOpenAiUsage = () => {
if (
process.env.VERCEL_ENV === "production" &&
process.env.BYPASS_HELICONE_CHECK !== "true"
) {
throw new Error(
"Security: Helicone is required for production OpenAI calls",
);
}
};
import { createOpenAILangchainChatClient } from "../llm/openai";

type CompletionMeta = {
timeTaken: number;
Expand Down Expand Up @@ -85,13 +75,13 @@ export class Prompts {

const streaming = typeof onPartialResponse !== "undefined";

checkProductionOpenAiUsage();
const model = new ChatOpenAI({
// TODO The temperature should be a variable we can pass per prompt
temperature: 0.2,
// TODO The model should be a variable we can pass per prompt
modelName: "gpt-4",
streaming,
const model = createOpenAILangchainChatClient({
app: "quiz-designer",
fields: {
temperature: 0.2,
modelName: "gpt-4",
streaming,
},
});

const llmInput = [new SystemMessage(promptText)];
Expand Down

0 comments on commit 4fe7ca2

Please sign in to comment.