From b89ca2082eb226ccd149244e47c3e169e446f170 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Thomas=20L=C3=89VEIL?= Date: Sat, 25 Mar 2023 17:32:59 +0100 Subject: [PATCH] Precise error messages (#150) * Introduce a component to display error messages * precise error message when api key is invalid --- components/Chat/Chat.tsx | 13 ++++--------- components/Chat/ErrorMessageDiv.tsx | 17 +++++++++++++++++ pages/api/models.ts | 11 ++++++++++- pages/index.tsx | 27 ++++++++++++++++++++++----- types/index.ts | 6 ++++++ 5 files changed, 59 insertions(+), 15 deletions(-) create mode 100644 components/Chat/ErrorMessageDiv.tsx diff --git a/components/Chat/Chat.tsx b/components/Chat/Chat.tsx index 6bd5152..ef6d8b5 100644 --- a/components/Chat/Chat.tsx +++ b/components/Chat/Chat.tsx @@ -1,9 +1,10 @@ -import { Conversation, KeyValuePair, Message, OpenAIModel } from "@/types"; +import { Conversation, ErrorMessage, KeyValuePair, Message, OpenAIModel } from "@/types"; import { FC, MutableRefObject, useCallback, useEffect, useRef, useState } from "react"; import { useTranslation } from "next-i18next"; import { ChatInput } from "./ChatInput"; import { ChatLoader } from "./ChatLoader"; import { ChatMessage } from "./ChatMessage"; +import { ErrorMessageDiv } from "./ErrorMessageDiv"; import { ModelSelect } from "./ModelSelect"; import { SystemPrompt } from "./SystemPrompt"; @@ -13,7 +14,7 @@ interface Props { apiKey: string; serverSideApiKeyIsSet: boolean; messageIsStreaming: boolean; - modelError: boolean; + modelError: ErrorMessage | null; messageError: boolean; loading: boolean; lightMode: "light" | "dark"; @@ -76,13 +77,7 @@ export const Chat: FC = ({ conversation, models, apiKey, serverSideApiKey
{t('OpenAI API Key Required')}
{t('Please set your OpenAI API key in the bottom left of the sidebar.')}
- ) : modelError ? ( -
-
{t('Error fetching models.')}
-
{t('Make sure your OpenAI API key is set in the bottom left of the sidebar.')}
-
{t('If you completed this step, OpenAI may be experiencing issues.')}
-
- ) : ( + ) : modelError ? : ( <>
= ({ error }) => { + return ( +
+
{error.title} {error.code ? ({error.code}) : "" }
+ { error.messageLines.map((line, index) => ( +
{line}
+ )) } +
+ ); +}; diff --git a/pages/api/models.ts b/pages/api/models.ts index 433b7f7..839f6aa 100644 --- a/pages/api/models.ts +++ b/pages/api/models.ts @@ -17,7 +17,16 @@ const handler = async (req: Request): Promise => { } }); - if (response.status !== 200) { + if (response.status === 401) { + return new Response( + response.body, + { + status: 500, + headers: response.headers + } + ); + } else if (response.status !== 200) { + console.error(`OpenAI API returned an error ${response.status}: ${await response.text()}`) throw new Error("OpenAI API returned an error"); } diff --git a/pages/index.tsx b/pages/index.tsx index 8720213..70e7582 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -1,7 +1,7 @@ import { Chat } from "@/components/Chat/Chat"; import { Navbar } from "@/components/Mobile/Navbar"; import { Sidebar } from "@/components/Sidebar/Sidebar"; -import { ChatBody, ChatFolder, Conversation, KeyValuePair, Message, OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types"; +import { ChatBody, ChatFolder, Conversation, ErrorMessage, KeyValuePair, Message, OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types"; import { cleanConversationHistory, cleanSelectedConversation } from "@/utils/app/clean"; import { DEFAULT_SYSTEM_PROMPT } from "@/utils/app/const"; import { saveConversation, saveConversations, updateConversation } from "@/utils/app/conversation"; @@ -18,6 +18,7 @@ interface HomeProps { serverSideApiKeyIsSet: boolean; } + const Home: React.FC = ({ serverSideApiKeyIsSet }) => { const { t } = useTranslation('chat') const [folders, setFolders] = useState([]); @@ -30,7 +31,7 @@ const Home: React.FC = ({ serverSideApiKeyIsSet }) => { const [showSidebar, setShowSidebar] = useState(true); const [apiKey, setApiKey] = useState(""); const [messageError, setMessageError] = useState(false); - const [modelError, setModelError] = useState(false); + const [modelError, setModelError] = useState(null); const [currentMessage, setCurrentMessage] = useState(); const stopConversationRef = useRef(false); @@ -179,6 +180,15 @@ const Home: React.FC = ({ serverSideApiKeyIsSet }) => { }; const fetchModels = async (key: string) => { + const error = { + title: t('Error fetching models.'), + code: null, + messageLines: [ + t('Make sure your OpenAI API key is set in the bottom left of the sidebar.'), + t('If you completed this step, OpenAI may be experiencing issues.') + ] + } as ErrorMessage; + const response = await fetch("/api/models", { method: "POST", headers: { @@ -190,19 +200,26 @@ const Home: React.FC = ({ serverSideApiKeyIsSet }) => { }); if (!response.ok) { - setModelError(true); + try { + const data = await response.json(); + Object.assign(error, { + code: data.error?.code, + messageLines: [data.error?.message] + }) + } catch (e) { } + setModelError(error); return; } const data = await response.json(); if (!data) { - setModelError(true); + setModelError(error); return; } setModels(data); - setModelError(false); + setModelError(null); }; const handleLightMode = (mode: "dark" | "light") => { diff --git a/types/index.ts b/types/index.ts index 264e159..e8ff886 100644 --- a/types/index.ts +++ b/types/index.ts @@ -61,3 +61,9 @@ export interface LocalStorage { // added folders (3/23/23) folders: ChatFolder[]; } + +export interface ErrorMessage { + code: String | null, + title: String, + messageLines: String[] +} \ No newline at end of file