error handling (#27)

This commit is contained in:
Mckay Wrigley 2023-03-20 07:17:58 -06:00 committed by GitHub
parent a0751994b1
commit dd439bb4a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 125 additions and 49 deletions

View File

@ -24,10 +24,11 @@ Expect frequent improvements.
- [ ] Folders - [ ] Folders
- [ ] Custom model settings - [ ] Custom model settings
- [ ] Prompt templates - [ ] Prompt templates
- [ ] Regenerate responses - [ ] Regenerate & edit responses
**Recent updates:** **Recent updates:**
- [x] Error handling (3/20/23)
- [x] GPT-4 support (access required) (3/20/23) - [x] GPT-4 support (access required) (3/20/23)
- [x] Search conversations (3/19/23) - [x] Search conversations (3/19/23)
- [x] Code syntax highlighting (3/18/23) - [x] Code syntax highlighting (3/18/23)

View File

@ -1,21 +1,26 @@
import { Conversation, Message, OpenAIModel } from "@/types"; import { Conversation, Message, OpenAIModel } from "@/types";
import { FC, useEffect, useRef } from "react"; import { FC, useEffect, useRef, useState } from "react";
import { ChatInput } from "./ChatInput"; import { ChatInput } from "./ChatInput";
import { ChatLoader } from "./ChatLoader"; import { ChatLoader } from "./ChatLoader";
import { ChatMessage } from "./ChatMessage"; import { ChatMessage } from "./ChatMessage";
import { ModelSelect } from "./ModelSelect"; import { ModelSelect } from "./ModelSelect";
import { Regenerate } from "./Regenerate";
interface Props { interface Props {
conversation: Conversation; conversation: Conversation;
models: OpenAIModel[]; models: OpenAIModel[];
messageIsStreaming: boolean; messageIsStreaming: boolean;
modelError: boolean;
messageError: boolean;
loading: boolean; loading: boolean;
lightMode: "light" | "dark"; lightMode: "light" | "dark";
onSend: (message: Message) => void; onSend: (message: Message, isResend: boolean) => void;
onModelChange: (conversation: Conversation, model: OpenAIModel) => void; onModelChange: (conversation: Conversation, model: OpenAIModel) => void;
} }
export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, loading, lightMode, onSend, onModelChange }) => { export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, modelError, messageError, loading, lightMode, onSend, onModelChange }) => {
const [currentMessage, setCurrentMessage] = useState<Message>();
const messagesEndRef = useRef<HTMLDivElement>(null); const messagesEndRef = useRef<HTMLDivElement>(null);
const scrollToBottom = () => { const scrollToBottom = () => {
@ -28,45 +33,68 @@ export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, load
return ( return (
<div className="flex-1 overflow-scroll dark:bg-[#343541]"> <div className="flex-1 overflow-scroll dark:bg-[#343541]">
<div> {modelError ? (
{conversation.messages.length === 0 ? ( <div className="flex flex-col justify-center mx-auto h-full w-[300px] sm:w-[500px] space-y-6">
<> <div className="text-center text-red-500">Error fetching models.</div>
<div className="flex justify-center pt-8"> <div className="text-center text-red-500">Make sure your OpenAI API key is set in the bottom left of the sidebar or in a .env.local file and refresh.</div>
<ModelSelect <div className="text-center text-red-500">If you completed this step, OpenAI may be experiencing issues.</div>
model={conversation.model} </div>
models={models} ) : (
onModelChange={(model) => onModelChange(conversation, model)} <>
/> <div>
</div> {conversation.messages.length === 0 ? (
<>
<div className="flex justify-center pt-8">
<ModelSelect
model={conversation.model}
models={models}
onModelChange={(model) => onModelChange(conversation, model)}
/>
</div>
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">{loading ? "Loading..." : "Chatbot UI"}</div> <div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">{models.length === 0 ? "Loading..." : "Chatbot UI"}</div>
</> </>
) : ( ) : (
<> <>
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {conversation.model.name}</div> <div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {conversation.model.name}</div>
{conversation.messages.map((message, index) => ( {conversation.messages.map((message, index) => (
<ChatMessage <ChatMessage
key={index} key={index}
message={message} message={message}
lightMode={lightMode} lightMode={lightMode}
/> />
))} ))}
{loading && <ChatLoader />} {loading && <ChatLoader />}
<div <div
className="bg-white dark:bg-[#343541] h-24 sm:h-32" className="bg-white dark:bg-[#343541] h-24 sm:h-32"
ref={messagesEndRef} ref={messagesEndRef}
/>
</>
)}
</div>
{messageError ? (
<Regenerate
onRegenerate={() => {
if (currentMessage) {
onSend(currentMessage, true);
}
}}
/> />
</> ) : (
)} <ChatInput
</div> messageIsStreaming={messageIsStreaming}
onSend={(message) => {
<ChatInput setCurrentMessage(message);
messageIsStreaming={messageIsStreaming} onSend(message, false);
onSend={onSend} }}
/> />
)}
</>
)}
</div> </div>
); );
}; };

View File

@ -0,0 +1,21 @@
import { IconRefresh } from "@tabler/icons-react";
import { FC } from "react";
interface Props {
onRegenerate: () => void;
}
export const Regenerate: FC<Props> = ({ onRegenerate }) => {
return (
<div className="fixed sm:absolute bottom-4 sm:bottom-8 w-full sm:w-1/2 px-2 left-0 sm:left-[280px] lg:left-[200px] right-0 ml-auto mr-auto">
<div className="text-center mb-4 text-red-500">Sorry, there was an error.</div>
<button
className="flex items-center justify-center w-full h-12 bg-neutral-100 dark:bg-[#444654] text-neutral-500 dark:text-neutral-200 text-sm font-semibold rounded-lg border border-b-neutral-300 dark:border-none"
onClick={onRegenerate}
>
<IconRefresh className="mr-2" />
<div>Regenerate response</div>
</button>
</div>
);
};

View File

@ -16,17 +16,32 @@ export default function Home() {
const [messageIsStreaming, setMessageIsStreaming] = useState<boolean>(false); const [messageIsStreaming, setMessageIsStreaming] = useState<boolean>(false);
const [showSidebar, setShowSidebar] = useState<boolean>(true); const [showSidebar, setShowSidebar] = useState<boolean>(true);
const [apiKey, setApiKey] = useState<string>(""); const [apiKey, setApiKey] = useState<string>("");
const [messageError, setMessageError] = useState<boolean>(false);
const [modelError, setModelError] = useState<boolean>(false);
const handleSend = async (message: Message) => { const handleSend = async (message: Message, isResend: boolean) => {
if (selectedConversation) { if (selectedConversation) {
let updatedConversation: Conversation = { let updatedConversation: Conversation;
...selectedConversation,
messages: [...selectedConversation.messages, message] if (isResend) {
}; const updatedMessages = [...selectedConversation.messages];
updatedMessages.pop();
updatedConversation = {
...selectedConversation,
messages: [...updatedMessages, message]
};
} else {
updatedConversation = {
...selectedConversation,
messages: [...selectedConversation.messages, message]
};
}
setSelectedConversation(updatedConversation); setSelectedConversation(updatedConversation);
setLoading(true); setLoading(true);
setMessageIsStreaming(true); setMessageIsStreaming(true);
setMessageError(false);
const response = await fetch("/api/chat", { const response = await fetch("/api/chat", {
method: "POST", method: "POST",
@ -42,6 +57,8 @@ export default function Home() {
if (!response.ok) { if (!response.ok) {
setLoading(false); setLoading(false);
setMessageIsStreaming(false);
setMessageError(true);
return; return;
} }
@ -50,6 +67,8 @@ export default function Home() {
if (!data) { if (!data) {
setLoading(false); setLoading(false);
setMessageIsStreaming(false); setMessageIsStreaming(false);
setMessageError(true);
return; return;
} }
@ -218,8 +237,6 @@ export default function Home() {
}; };
const fetchModels = async (key: string) => { const fetchModels = async (key: string) => {
setLoading(true);
const response = await fetch("/api/models", { const response = await fetch("/api/models", {
method: "POST", method: "POST",
headers: { headers: {
@ -229,13 +246,20 @@ export default function Home() {
key key
}) })
}); });
const data = await response.json();
if (data) { if (!response.ok) {
setModels(data); setModelError(true);
return;
} }
setLoading(false); const data = await response.json();
if (!data) {
setModelError(true);
return;
}
setModels(data);
}; };
useEffect(() => { useEffect(() => {
@ -336,6 +360,8 @@ export default function Home() {
<Chat <Chat
conversation={selectedConversation} conversation={selectedConversation}
messageIsStreaming={messageIsStreaming} messageIsStreaming={messageIsStreaming}
modelError={modelError}
messageError={messageError}
models={models} models={models}
loading={loading} loading={loading}
lightMode={lightMode} lightMode={lightMode}