Add GPT-4 support (#25)
* mobile ui updates * fixes sidebar btn * return if null * mobile input blur * handle mobile enter key * new convo name * new delete mechanism * test height * revert * change padding * remove overflow * check relative * padding * done * retry * test * test * should work now * test * test * more * max h * revert * done
This commit is contained in:
parent
9a4824818e
commit
7810a3e7dc
|
@ -1,4 +1,4 @@
|
|||
import { Message, OpenAIModel, OpenAIModelNames } from "@/types";
|
||||
import { Conversation, Message, OpenAIModel } from "@/types";
|
||||
import { FC, useEffect, useRef } from "react";
|
||||
import { ChatInput } from "./ChatInput";
|
||||
import { ChatLoader } from "./ChatLoader";
|
||||
|
@ -6,16 +6,16 @@ import { ChatMessage } from "./ChatMessage";
|
|||
import { ModelSelect } from "./ModelSelect";
|
||||
|
||||
interface Props {
|
||||
model: OpenAIModel;
|
||||
messages: Message[];
|
||||
conversation: Conversation;
|
||||
models: OpenAIModel[];
|
||||
messageIsStreaming: boolean;
|
||||
loading: boolean;
|
||||
lightMode: "light" | "dark";
|
||||
onSend: (message: Message) => void;
|
||||
onSelect: (model: OpenAIModel) => void;
|
||||
onModelChange: (conversation: Conversation, model: OpenAIModel) => void;
|
||||
}
|
||||
|
||||
export const Chat: FC<Props> = ({ model, messages, messageIsStreaming, loading, lightMode, onSend, onSelect }) => {
|
||||
export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, loading, lightMode, onSend, onModelChange }) => {
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const scrollToBottom = () => {
|
||||
|
@ -24,27 +24,28 @@ export const Chat: FC<Props> = ({ model, messages, messageIsStreaming, loading,
|
|||
|
||||
useEffect(() => {
|
||||
scrollToBottom();
|
||||
}, [messages]);
|
||||
}, [conversation.messages]);
|
||||
|
||||
return (
|
||||
<div className="flex-1 overflow-scroll dark:bg-[#343541]">
|
||||
<div>
|
||||
{messages.length === 0 ? (
|
||||
{conversation.messages.length === 0 ? (
|
||||
<>
|
||||
<div className="flex justify-center pt-8">
|
||||
<ModelSelect
|
||||
model={model}
|
||||
onSelect={onSelect}
|
||||
model={conversation.model}
|
||||
models={models}
|
||||
onModelChange={(model) => onModelChange(conversation, model)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">Chatbot UI</div>
|
||||
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">{loading ? "Loading..." : "Chatbot UI"}</div>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {OpenAIModelNames[model]}</div>
|
||||
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {conversation.model.name}</div>
|
||||
|
||||
{messages.map((message, index) => (
|
||||
{conversation.messages.map((message, index) => (
|
||||
<ChatMessage
|
||||
key={index}
|
||||
message={message}
|
||||
|
|
|
@ -1,27 +1,30 @@
|
|||
import { OpenAIModel, OpenAIModelNames } from "@/types";
|
||||
import { OpenAIModel } from "@/types";
|
||||
import { FC } from "react";
|
||||
|
||||
interface Props {
|
||||
model: OpenAIModel;
|
||||
onSelect: (model: OpenAIModel) => void;
|
||||
models: OpenAIModel[];
|
||||
onModelChange: (model: OpenAIModel) => void;
|
||||
}
|
||||
|
||||
export const ModelSelect: FC<Props> = ({ model, onSelect }) => {
|
||||
export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
|
||||
return (
|
||||
<div className="flex flex-col">
|
||||
<label className="text-left mb-2 dark:text-neutral-400 text-neutral-700">Model</label>
|
||||
<select
|
||||
className="w-[300px] p-3 dark:text-white dark:bg-[#343541] border border-neutral-500 rounded-lg appearance-none focus:shadow-outline text-neutral-900 cursor-pointer"
|
||||
placeholder="Select a model"
|
||||
value={model}
|
||||
onChange={(e) => onSelect(e.target.value as OpenAIModel)}
|
||||
value={model.id}
|
||||
onChange={(e) => {
|
||||
onModelChange(models.find((model) => model.id === e.target.value) as OpenAIModel);
|
||||
}}
|
||||
>
|
||||
{Object.entries(OpenAIModelNames).map(([value, name]) => (
|
||||
{models.map((model) => (
|
||||
<option
|
||||
key={value}
|
||||
value={value}
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
>
|
||||
{name}
|
||||
{model.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { Message, OpenAIModel } from "@/types";
|
||||
import { OpenAIStream } from "@/utils";
|
||||
import { OpenAIStream } from "@/utils/server";
|
||||
|
||||
export const config = {
|
||||
runtime: "edge"
|
||||
|
@ -23,7 +23,7 @@ const handler = async (req: Request): Promise<Response> => {
|
|||
break;
|
||||
}
|
||||
charCount += message.content.length;
|
||||
messagesToSend = [message, ...messagesToSend]
|
||||
messagesToSend = [message, ...messagesToSend];
|
||||
}
|
||||
|
||||
const stream = await OpenAIStream(model, key, messagesToSend);
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
import { OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types";
|
||||
|
||||
export const config = {
|
||||
runtime: "edge"
|
||||
};
|
||||
|
||||
const handler = async (req: Request): Promise<Response> => {
|
||||
try {
|
||||
const { key } = (await req.json()) as {
|
||||
key: string;
|
||||
};
|
||||
|
||||
const response = await fetch("https://api.openai.com/v1/models", {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`
|
||||
}
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error("OpenAI API returned an error");
|
||||
}
|
||||
|
||||
const json = await response.json();
|
||||
|
||||
const models: OpenAIModel[] = json.data
|
||||
.map((model: any) => {
|
||||
for (const [key, value] of Object.entries(OpenAIModelID)) {
|
||||
if (value === model.id) {
|
||||
return {
|
||||
id: model.id,
|
||||
name: OpenAIModels[value].name
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
return new Response(JSON.stringify(models), { status: 200 });
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return new Response("Error", { status: 500 });
|
||||
}
|
||||
};
|
||||
|
||||
export default handler;
|
|
@ -1,7 +1,8 @@
|
|||
import { Chat } from "@/components/Chat/Chat";
|
||||
import { Navbar } from "@/components/Mobile/Navbar";
|
||||
import { Sidebar } from "@/components/Sidebar/Sidebar";
|
||||
import { Conversation, Message, OpenAIModel } from "@/types";
|
||||
import { Conversation, Message, OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types";
|
||||
import { cleanConversationHistory, cleanSelectedConversation } from "@/utils/app";
|
||||
import { IconArrowBarLeft, IconArrowBarRight } from "@tabler/icons-react";
|
||||
import Head from "next/head";
|
||||
import { useEffect, useState } from "react";
|
||||
|
@ -10,7 +11,7 @@ export default function Home() {
|
|||
const [conversations, setConversations] = useState<Conversation[]>([]);
|
||||
const [selectedConversation, setSelectedConversation] = useState<Conversation>();
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
const [model, setModel] = useState<OpenAIModel>(OpenAIModel.GPT_3_5);
|
||||
const [models, setModels] = useState<OpenAIModel[]>([]);
|
||||
const [lightMode, setLightMode] = useState<"dark" | "light">("dark");
|
||||
const [messageIsStreaming, setMessageIsStreaming] = useState<boolean>(false);
|
||||
const [showSidebar, setShowSidebar] = useState<boolean>(true);
|
||||
|
@ -33,7 +34,7 @@ export default function Home() {
|
|||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
model: updatedConversation.model,
|
||||
messages: updatedConversation.messages,
|
||||
key: apiKey
|
||||
})
|
||||
|
@ -47,6 +48,8 @@ export default function Home() {
|
|||
const data = response.body;
|
||||
|
||||
if (!data) {
|
||||
setLoading(false);
|
||||
setMessageIsStreaming(false);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -144,13 +147,35 @@ export default function Home() {
|
|||
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversation));
|
||||
};
|
||||
|
||||
const handleChangeModel = (conversation: Conversation, model: OpenAIModel) => {
|
||||
const updatedConversation = {
|
||||
...conversation,
|
||||
model
|
||||
};
|
||||
|
||||
const updatedConversations = conversations.map((c) => {
|
||||
if (c.id === updatedConversation.id) {
|
||||
return updatedConversation;
|
||||
}
|
||||
|
||||
return c;
|
||||
});
|
||||
|
||||
setConversations(updatedConversations);
|
||||
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));
|
||||
|
||||
setSelectedConversation(updatedConversation);
|
||||
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversation));
|
||||
};
|
||||
|
||||
const handleNewConversation = () => {
|
||||
const lastConversation = conversations[conversations.length - 1];
|
||||
|
||||
const newConversation: Conversation = {
|
||||
id: lastConversation ? lastConversation.id + 1 : 1,
|
||||
name: `Conversation ${lastConversation ? lastConversation.id + 1 : 1}`,
|
||||
messages: []
|
||||
messages: [],
|
||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
||||
};
|
||||
|
||||
const updatedConversations = [...conversations, newConversation];
|
||||
|
@ -160,7 +185,6 @@ export default function Home() {
|
|||
setSelectedConversation(newConversation);
|
||||
localStorage.setItem("selectedConversation", JSON.stringify(newConversation));
|
||||
|
||||
setModel(OpenAIModel.GPT_3_5);
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
|
@ -181,7 +205,8 @@ export default function Home() {
|
|||
setSelectedConversation({
|
||||
id: 1,
|
||||
name: "New conversation",
|
||||
messages: []
|
||||
messages: [],
|
||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
||||
});
|
||||
localStorage.removeItem("selectedConversation");
|
||||
}
|
||||
|
@ -192,6 +217,27 @@ export default function Home() {
|
|||
localStorage.setItem("apiKey", apiKey);
|
||||
};
|
||||
|
||||
const fetchModels = async () => {
|
||||
setLoading(true);
|
||||
|
||||
const response = await fetch("/api/models", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
key: apiKey
|
||||
})
|
||||
});
|
||||
const data = await response.json();
|
||||
|
||||
if (data) {
|
||||
setModels(data);
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const theme = localStorage.getItem("theme");
|
||||
if (theme) {
|
||||
|
@ -208,21 +254,27 @@ export default function Home() {
|
|||
}
|
||||
|
||||
const conversationHistory = localStorage.getItem("conversationHistory");
|
||||
|
||||
if (conversationHistory) {
|
||||
setConversations(JSON.parse(conversationHistory));
|
||||
const parsedConversationHistory: Conversation[] = JSON.parse(conversationHistory);
|
||||
const cleanedConversationHistory = cleanConversationHistory(parsedConversationHistory);
|
||||
setConversations(cleanedConversationHistory);
|
||||
}
|
||||
|
||||
const selectedConversation = localStorage.getItem("selectedConversation");
|
||||
if (selectedConversation) {
|
||||
setSelectedConversation(JSON.parse(selectedConversation));
|
||||
const parsedSelectedConversation: Conversation = JSON.parse(selectedConversation);
|
||||
const cleanedSelectedConversation = cleanSelectedConversation(parsedSelectedConversation);
|
||||
setSelectedConversation(cleanedSelectedConversation);
|
||||
} else {
|
||||
setSelectedConversation({
|
||||
id: 1,
|
||||
name: "New conversation",
|
||||
messages: []
|
||||
messages: [],
|
||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
||||
});
|
||||
}
|
||||
|
||||
fetchModels();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
|
@ -242,7 +294,6 @@ export default function Home() {
|
|||
href="/favicon.ico"
|
||||
/>
|
||||
</Head>
|
||||
|
||||
{selectedConversation && (
|
||||
<div className={`flex flex-col h-screen w-screen text-white ${lightMode}`}>
|
||||
<div className="sm:hidden w-full fixed top-0">
|
||||
|
@ -283,13 +334,13 @@ export default function Home() {
|
|||
)}
|
||||
|
||||
<Chat
|
||||
conversation={selectedConversation}
|
||||
messageIsStreaming={messageIsStreaming}
|
||||
model={model}
|
||||
messages={selectedConversation.messages}
|
||||
models={models}
|
||||
loading={loading}
|
||||
lightMode={lightMode}
|
||||
onSend={handleSend}
|
||||
onSelect={setModel}
|
||||
onModelChange={handleChangeModel}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -1,13 +1,22 @@
|
|||
export enum OpenAIModel {
|
||||
GPT_3_5 = "gpt-3.5-turbo",
|
||||
GPT_3_5_LEGACY = "gpt-3.5-turbo-0301"
|
||||
// GPT_4 = "gpt-4"
|
||||
export interface OpenAIModel {
|
||||
id: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
export const OpenAIModelNames: Record<OpenAIModel, string> = {
|
||||
[OpenAIModel.GPT_3_5]: "Default (GPT-3.5)",
|
||||
[OpenAIModel.GPT_3_5_LEGACY]: "Legacy (GPT-3.5)"
|
||||
// [OpenAIModel.GPT_4]: "GPT-4"
|
||||
export enum OpenAIModelID {
|
||||
GPT_3_5 = "gpt-3.5-turbo",
|
||||
GPT_4 = "gpt-4"
|
||||
}
|
||||
|
||||
export const OpenAIModels: Record<OpenAIModelID, OpenAIModel> = {
|
||||
[OpenAIModelID.GPT_3_5]: {
|
||||
id: OpenAIModelID.GPT_3_5,
|
||||
name: "Default (GPT-3.5)"
|
||||
},
|
||||
[OpenAIModelID.GPT_4]: {
|
||||
id: OpenAIModelID.GPT_4,
|
||||
name: "GPT-4"
|
||||
}
|
||||
};
|
||||
|
||||
export interface Message {
|
||||
|
@ -21,4 +30,13 @@ export interface Conversation {
|
|||
id: number;
|
||||
name: string;
|
||||
messages: Message[];
|
||||
model: OpenAIModel;
|
||||
}
|
||||
|
||||
// keep track of local storage schema
|
||||
export interface LocalStorage {
|
||||
apiKey: string;
|
||||
conversationHistory: Conversation[];
|
||||
selectedConversation: Conversation;
|
||||
theme: "light" | "dark";
|
||||
}
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
import { Conversation, OpenAIModelID, OpenAIModels } from "@/types";
|
||||
|
||||
export const cleanConversationHistory = (history: Conversation[]) => {
|
||||
// added model for each conversation (3/20/23)
|
||||
|
||||
if (history.length === 0) {
|
||||
return history;
|
||||
} else {
|
||||
return history.map((conversation) => {
|
||||
if (conversation.model) {
|
||||
return conversation;
|
||||
} else {
|
||||
return {
|
||||
...conversation,
|
||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const cleanSelectedConversation = (conversation: Conversation) => {
|
||||
// added model for each conversation (3/20/23)
|
||||
|
||||
if (conversation.model) {
|
||||
return conversation;
|
||||
} else {
|
||||
return {
|
||||
...conversation,
|
||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
||||
};
|
||||
}
|
||||
};
|
|
@ -12,7 +12,7 @@ export const OpenAIStream = async (model: OpenAIModel, key: string, messages: Me
|
|||
},
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
model: model.id,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
Loading…
Reference in New Issue