error handling (#27)

This commit is contained in:
Mckay Wrigley 2023-03-20 07:17:58 -06:00 committed by GitHub
parent a0751994b1
commit dd439bb4a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 125 additions and 49 deletions

View File

@ -24,10 +24,11 @@ Expect frequent improvements.
- [ ] Folders
- [ ] Custom model settings
- [ ] Prompt templates
- [ ] Regenerate responses
- [ ] Regenerate & edit responses
**Recent updates:**
- [x] Error handling (3/20/23)
- [x] GPT-4 support (access required) (3/20/23)
- [x] Search conversations (3/19/23)
- [x] Code syntax highlighting (3/18/23)

View File

@ -1,21 +1,26 @@
import { Conversation, Message, OpenAIModel } from "@/types";
import { FC, useEffect, useRef } from "react";
import { FC, useEffect, useRef, useState } from "react";
import { ChatInput } from "./ChatInput";
import { ChatLoader } from "./ChatLoader";
import { ChatMessage } from "./ChatMessage";
import { ModelSelect } from "./ModelSelect";
import { Regenerate } from "./Regenerate";
interface Props {
conversation: Conversation;
models: OpenAIModel[];
messageIsStreaming: boolean;
modelError: boolean;
messageError: boolean;
loading: boolean;
lightMode: "light" | "dark";
onSend: (message: Message) => void;
onSend: (message: Message, isResend: boolean) => void;
onModelChange: (conversation: Conversation, model: OpenAIModel) => void;
}
export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, loading, lightMode, onSend, onModelChange }) => {
export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, modelError, messageError, loading, lightMode, onSend, onModelChange }) => {
const [currentMessage, setCurrentMessage] = useState<Message>();
const messagesEndRef = useRef<HTMLDivElement>(null);
const scrollToBottom = () => {
@ -28,45 +33,68 @@ export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, load
return (
<div className="flex-1 overflow-scroll dark:bg-[#343541]">
<div>
{conversation.messages.length === 0 ? (
<>
<div className="flex justify-center pt-8">
<ModelSelect
model={conversation.model}
models={models}
onModelChange={(model) => onModelChange(conversation, model)}
/>
</div>
{modelError ? (
<div className="flex flex-col justify-center mx-auto h-full w-[300px] sm:w-[500px] space-y-6">
<div className="text-center text-red-500">Error fetching models.</div>
<div className="text-center text-red-500">Make sure your OpenAI API key is set in the bottom left of the sidebar or in a .env.local file and refresh.</div>
<div className="text-center text-red-500">If you completed this step, OpenAI may be experiencing issues.</div>
</div>
) : (
<>
<div>
{conversation.messages.length === 0 ? (
<>
<div className="flex justify-center pt-8">
<ModelSelect
model={conversation.model}
models={models}
onModelChange={(model) => onModelChange(conversation, model)}
/>
</div>
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">{loading ? "Loading..." : "Chatbot UI"}</div>
</>
) : (
<>
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {conversation.model.name}</div>
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">{models.length === 0 ? "Loading..." : "Chatbot UI"}</div>
</>
) : (
<>
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {conversation.model.name}</div>
{conversation.messages.map((message, index) => (
<ChatMessage
key={index}
message={message}
lightMode={lightMode}
/>
))}
{conversation.messages.map((message, index) => (
<ChatMessage
key={index}
message={message}
lightMode={lightMode}
/>
))}
{loading && <ChatLoader />}
{loading && <ChatLoader />}
<div
className="bg-white dark:bg-[#343541] h-24 sm:h-32"
ref={messagesEndRef}
<div
className="bg-white dark:bg-[#343541] h-24 sm:h-32"
ref={messagesEndRef}
/>
</>
)}
</div>
{messageError ? (
<Regenerate
onRegenerate={() => {
if (currentMessage) {
onSend(currentMessage, true);
}
}}
/>
</>
)}
</div>
<ChatInput
messageIsStreaming={messageIsStreaming}
onSend={onSend}
/>
) : (
<ChatInput
messageIsStreaming={messageIsStreaming}
onSend={(message) => {
setCurrentMessage(message);
onSend(message, false);
}}
/>
)}
</>
)}
</div>
);
};

View File

@ -0,0 +1,21 @@
import { IconRefresh } from "@tabler/icons-react";
import { FC } from "react";
interface Props {
onRegenerate: () => void;
}
export const Regenerate: FC<Props> = ({ onRegenerate }) => {
return (
<div className="fixed sm:absolute bottom-4 sm:bottom-8 w-full sm:w-1/2 px-2 left-0 sm:left-[280px] lg:left-[200px] right-0 ml-auto mr-auto">
<div className="text-center mb-4 text-red-500">Sorry, there was an error.</div>
<button
className="flex items-center justify-center w-full h-12 bg-neutral-100 dark:bg-[#444654] text-neutral-500 dark:text-neutral-200 text-sm font-semibold rounded-lg border border-b-neutral-300 dark:border-none"
onClick={onRegenerate}
>
<IconRefresh className="mr-2" />
<div>Regenerate response</div>
</button>
</div>
);
};

View File

@ -16,17 +16,32 @@ export default function Home() {
const [messageIsStreaming, setMessageIsStreaming] = useState<boolean>(false);
const [showSidebar, setShowSidebar] = useState<boolean>(true);
const [apiKey, setApiKey] = useState<string>("");
const [messageError, setMessageError] = useState<boolean>(false);
const [modelError, setModelError] = useState<boolean>(false);
const handleSend = async (message: Message) => {
const handleSend = async (message: Message, isResend: boolean) => {
if (selectedConversation) {
let updatedConversation: Conversation = {
...selectedConversation,
messages: [...selectedConversation.messages, message]
};
let updatedConversation: Conversation;
if (isResend) {
const updatedMessages = [...selectedConversation.messages];
updatedMessages.pop();
updatedConversation = {
...selectedConversation,
messages: [...updatedMessages, message]
};
} else {
updatedConversation = {
...selectedConversation,
messages: [...selectedConversation.messages, message]
};
}
setSelectedConversation(updatedConversation);
setLoading(true);
setMessageIsStreaming(true);
setMessageError(false);
const response = await fetch("/api/chat", {
method: "POST",
@ -42,6 +57,8 @@ export default function Home() {
if (!response.ok) {
setLoading(false);
setMessageIsStreaming(false);
setMessageError(true);
return;
}
@ -50,6 +67,8 @@ export default function Home() {
if (!data) {
setLoading(false);
setMessageIsStreaming(false);
setMessageError(true);
return;
}
@ -218,8 +237,6 @@ export default function Home() {
};
const fetchModels = async (key: string) => {
setLoading(true);
const response = await fetch("/api/models", {
method: "POST",
headers: {
@ -229,13 +246,20 @@ export default function Home() {
key
})
});
const data = await response.json();
if (data) {
setModels(data);
if (!response.ok) {
setModelError(true);
return;
}
setLoading(false);
const data = await response.json();
if (!data) {
setModelError(true);
return;
}
setModels(data);
};
useEffect(() => {
@ -336,6 +360,8 @@ export default function Home() {
<Chat
conversation={selectedConversation}
messageIsStreaming={messageIsStreaming}
modelError={modelError}
messageError={messageError}
models={models}
loading={loading}
lightMode={lightMode}