Merge pull request #9 from nauxliu/disable-input-when-message-is-streaming

Ignore new message when the current message is not finished yet
This commit is contained in:
Mckay Wrigley 2023-03-18 10:08:34 -06:00 committed by GitHub
commit 903c7f6806
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 14 additions and 7 deletions

View File

@ -8,13 +8,14 @@ import { ModelSelect } from "./ModelSelect";
interface Props {
model: OpenAIModel;
messages: Message[];
messageIsStreaming: boolean,
loading: boolean;
lightMode: "light" | "dark";
onSend: (message: Message) => void;
onSelect: (model: OpenAIModel) => void;
}
export const Chat: FC<Props> = ({ model, messages, loading, lightMode, onSend, onSelect }) => {
export const Chat: FC<Props> = ({ model, messages, messageIsStreaming, loading, lightMode, onSend, onSelect }) => {
const messagesEndRef = useRef<HTMLDivElement>(null);
const scrollToBottom = () => {
@ -57,7 +58,7 @@ export const Chat: FC<Props> = ({ model, messages, loading, lightMode, onSend, o
</div>
<div className="h-[80px] sm:h-[140px] w-[340px] sm:w-[400px] md:w-[500px] lg:w-[700px] xl:w-[800px] mx-auto">
<ChatInput onSend={onSend} />
<ChatInput messageIsStreaming={messageIsStreaming} onSend={onSend} />
</div>
</div>
);

View File

@ -3,10 +3,11 @@ import { IconSend } from "@tabler/icons-react";
import { FC, KeyboardEvent, useEffect, useRef, useState } from "react";
interface Props {
messageIsStreaming: boolean,
onSend: (message: Message) => void;
}
export const ChatInput: FC<Props> = ({ onSend }) => {
export const ChatInput: FC<Props> = ({ onSend, messageIsStreaming }) => {
const [content, setContent] = useState<string>();
const [isTyping, setIsTyping] = useState<boolean>(false);
@ -23,6 +24,10 @@ export const ChatInput: FC<Props> = ({ onSend }) => {
};
const handleSend = () => {
if (messageIsStreaming) {
return;
}
if (!content) {
alert("Please enter a message");
return;

View File

@ -11,7 +11,7 @@ export default function Home() {
const [loading, setLoading] = useState<boolean>(false);
const [model, setModel] = useState<OpenAIModel>(OpenAIModel.GPT_3_5);
const [lightMode, setLightMode] = useState<"dark" | "light">("dark");
const [disabled, setDisabled] = useState<boolean>(false);
const [messageIsStreaming, setmessageIsStreaming] = useState<boolean>(false);
const [showSidebar, setShowSidebar] = useState<boolean>(true);
const handleSend = async (message: Message) => {
@ -23,7 +23,7 @@ export default function Home() {
setSelectedConversation(updatedConversation);
setLoading(true);
setDisabled(true);
setmessageIsStreaming(true);
const response = await fetch("/api/chat", {
method: "POST",
@ -111,7 +111,7 @@ export default function Home() {
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));
setDisabled(false);
setmessageIsStreaming(false);
}
};
@ -230,7 +230,7 @@ export default function Home() {
<div className={`flex h-screen text-white ${lightMode}`}>
{showSidebar ? (
<Sidebar
loading={disabled}
loading={messageIsStreaming}
conversations={conversations}
lightMode={lightMode}
selectedConversation={selectedConversation}
@ -250,6 +250,7 @@ export default function Home() {
)}
<Chat
messageIsStreaming={messageIsStreaming}
model={model}
messages={selectedConversation.messages}
loading={loading}