add custom system prompt (#39)
This commit is contained in:
parent
6e19d44020
commit
0d6ff739a2
|
@ -20,14 +20,15 @@ Expect frequent improvements.
|
||||||
|
|
||||||
**Next up:**
|
**Next up:**
|
||||||
|
|
||||||
|
- [ ] More custom model settings
|
||||||
|
- [ ] Regenerate & edit responses
|
||||||
- [ ] Saving via data export
|
- [ ] Saving via data export
|
||||||
- [ ] Folders
|
- [ ] Folders
|
||||||
- [ ] Custom model settings
|
|
||||||
- [ ] Prompt templates
|
- [ ] Prompt templates
|
||||||
- [ ] Regenerate & edit responses
|
|
||||||
|
|
||||||
**Recent updates:**
|
**Recent updates:**
|
||||||
|
|
||||||
|
- [x] Custom system prompt (3/21/23)
|
||||||
- [x] Error handling (3/20/23)
|
- [x] Error handling (3/20/23)
|
||||||
- [x] GPT-4 support (access required) (3/20/23)
|
- [x] GPT-4 support (access required) (3/20/23)
|
||||||
- [x] Search conversations (3/19/23)
|
- [x] Search conversations (3/19/23)
|
||||||
|
@ -52,7 +53,7 @@ Modify the system prompt in `utils/index.ts`.
|
||||||
|
|
||||||
Host your own live version of Chatbot UI with Vercel.
|
Host your own live version of Chatbot UI with Vercel.
|
||||||
|
|
||||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmckaywrigley%2Fchatbot-ui&envDescription=Your%20OpenAI%20API%20Key.%20Chat%20will%20not%20work%20if%20you%20don't%20provide%20it.)
|
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmckaywrigley%2Fchatbot-ui)
|
||||||
|
|
||||||
**Replit**
|
**Replit**
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
import { Conversation, Message, OpenAIModel } from "@/types";
|
import { Conversation, KeyValuePair, Message, OpenAIModel } from "@/types";
|
||||||
import { FC, useEffect, useRef, useState } from "react";
|
import { FC, useEffect, useRef, useState } from "react";
|
||||||
import { ChatInput } from "./ChatInput";
|
import { ChatInput } from "./ChatInput";
|
||||||
import { ChatLoader } from "./ChatLoader";
|
import { ChatLoader } from "./ChatLoader";
|
||||||
import { ChatMessage } from "./ChatMessage";
|
import { ChatMessage } from "./ChatMessage";
|
||||||
import { ModelSelect } from "./ModelSelect";
|
import { ModelSelect } from "./ModelSelect";
|
||||||
import { Regenerate } from "./Regenerate";
|
import { Regenerate } from "./Regenerate";
|
||||||
|
import { SystemPrompt } from "./SystemPrompt";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
conversation: Conversation;
|
conversation: Conversation;
|
||||||
|
@ -15,20 +16,10 @@ interface Props {
|
||||||
loading: boolean;
|
loading: boolean;
|
||||||
lightMode: "light" | "dark";
|
lightMode: "light" | "dark";
|
||||||
onSend: (message: Message, isResend: boolean) => void;
|
onSend: (message: Message, isResend: boolean) => void;
|
||||||
onModelChange: (conversation: Conversation, model: OpenAIModel) => void;
|
onUpdateConversation: (conversation: Conversation, data: KeyValuePair) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const Chat: FC<Props> = ({
|
export const Chat: FC<Props> = ({ conversation, models, messageIsStreaming, modelError, messageError, loading, lightMode, onSend, onUpdateConversation }) => {
|
||||||
conversation,
|
|
||||||
models,
|
|
||||||
messageIsStreaming,
|
|
||||||
modelError,
|
|
||||||
messageError,
|
|
||||||
loading,
|
|
||||||
lightMode,
|
|
||||||
onSend,
|
|
||||||
onModelChange,
|
|
||||||
}) => {
|
|
||||||
const [currentMessage, setCurrentMessage] = useState<Message>();
|
const [currentMessage, setCurrentMessage] = useState<Message>();
|
||||||
|
|
||||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||||
|
@ -46,38 +37,36 @@ export const Chat: FC<Props> = ({
|
||||||
{modelError ? (
|
{modelError ? (
|
||||||
<div className="flex flex-col justify-center mx-auto h-full w-[300px] sm:w-[500px] space-y-6">
|
<div className="flex flex-col justify-center mx-auto h-full w-[300px] sm:w-[500px] space-y-6">
|
||||||
<div className="text-center text-red-500">Error fetching models.</div>
|
<div className="text-center text-red-500">Error fetching models.</div>
|
||||||
<div className="text-center text-red-500">
|
<div className="text-center text-red-500">Make sure your OpenAI API key is set in the bottom left of the sidebar or in a .env.local file and refresh.</div>
|
||||||
Make sure your OpenAI API key is set in the bottom left of the
|
<div className="text-center text-red-500">If you completed this step, OpenAI may be experiencing issues.</div>
|
||||||
sidebar or in a .env.local file and refresh.
|
|
||||||
</div>
|
|
||||||
<div className="text-center text-red-500">
|
|
||||||
If you completed this step, OpenAI may be experiencing issues.
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
<div>
|
<div>
|
||||||
{conversation.messages.length === 0 ? (
|
{conversation.messages.length === 0 ? (
|
||||||
<>
|
<>
|
||||||
<div className="flex justify-center pt-8">
|
<div className="flex flex-col mx-auto pt-12 space-y-10 w-[350px] sm:w-[600px]">
|
||||||
<ModelSelect
|
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200">{models.length === 0 ? "Loading..." : "Chatbot UI"}</div>
|
||||||
model={conversation.model}
|
|
||||||
models={models}
|
|
||||||
onModelChange={(model) =>
|
|
||||||
onModelChange(conversation, model)
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="text-4xl text-center text-neutral-600 dark:text-neutral-200 pt-[160px] sm:pt-[280px]">
|
{models.length > 0 && (
|
||||||
{models.length === 0 ? "Loading..." : "Chatbot UI"}
|
<div className="flex flex-col h-full space-y-4 border p-4 rounded border-neutral-500">
|
||||||
|
<ModelSelect
|
||||||
|
model={conversation.model}
|
||||||
|
models={models}
|
||||||
|
onModelChange={(model) => onUpdateConversation(conversation, { key: "model", value: model })}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SystemPrompt
|
||||||
|
conversation={conversation}
|
||||||
|
onChangePrompt={(prompt) => onUpdateConversation(conversation, { key: "prompt", value: prompt })}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">
|
<div className="flex justify-center py-2 text-neutral-500 bg-neutral-100 dark:bg-[#444654] dark:text-neutral-200 text-sm border border-b-neutral-300 dark:border-none">Model: {conversation.model.name}</div>
|
||||||
Model: {conversation.model.name}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{conversation.messages.map((message, index) => (
|
{conversation.messages.map((message, index) => (
|
||||||
<ChatMessage
|
<ChatMessage
|
||||||
|
|
|
@ -45,10 +45,8 @@ export const ChatInput: FC<Props> = ({ onSend, messageIsStreaming, model }) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
const isMobile = () => {
|
const isMobile = () => {
|
||||||
const userAgent =
|
const userAgent = typeof window.navigator === "undefined" ? "" : navigator.userAgent;
|
||||||
typeof window.navigator === "undefined" ? "" : navigator.userAgent;
|
const mobileRegex = /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini|Mobile|mobile|CriOS/i;
|
||||||
const mobileRegex =
|
|
||||||
/Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini|Mobile|mobile|CriOS/i;
|
|
||||||
return mobileRegex.test(userAgent);
|
return mobileRegex.test(userAgent);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -72,12 +70,12 @@ export const ChatInput: FC<Props> = ({ onSend, messageIsStreaming, model }) => {
|
||||||
<div className="fixed sm:absolute bottom-4 sm:bottom-8 w-full sm:w-1/2 px-2 left-0 sm:left-[280px] lg:left-[200px] right-0 ml-auto mr-auto">
|
<div className="fixed sm:absolute bottom-4 sm:bottom-8 w-full sm:w-1/2 px-2 left-0 sm:left-[280px] lg:left-[200px] right-0 ml-auto mr-auto">
|
||||||
<textarea
|
<textarea
|
||||||
ref={textareaRef}
|
ref={textareaRef}
|
||||||
className="rounded-lg pl-4 pr-8 py-3 w-full focus:outline-none max-h-[280px] dark:bg-[#40414F] dark:border-opacity-50 dark:border-neutral-800 dark:text-neutral-100 border border-neutral-300 shadow text-neutral-900"
|
className="rounded-lg pl-4 pr-8 py-3 w-full focus:outline-none dark:bg-[#40414F] dark:border-opacity-50 dark:border-neutral-800 dark:text-neutral-100 border border-neutral-300 shadow text-neutral-900"
|
||||||
style={{
|
style={{
|
||||||
resize: "none",
|
resize: "none",
|
||||||
bottom: `${textareaRef?.current?.scrollHeight}px`,
|
bottom: `${textareaRef?.current?.scrollHeight}px`,
|
||||||
maxHeight: "400px",
|
maxHeight: "400px",
|
||||||
overflow: "auto",
|
overflow: "auto"
|
||||||
}}
|
}}
|
||||||
placeholder="Type a message..."
|
placeholder="Type a message..."
|
||||||
value={content}
|
value={content}
|
||||||
|
|
|
@ -20,9 +20,7 @@ export const ChatMessage: FC<Props> = ({ message, lightMode }) => {
|
||||||
|
|
||||||
<div className="prose dark:prose-invert mt-[-2px]">
|
<div className="prose dark:prose-invert mt-[-2px]">
|
||||||
{message.role === "user" ? (
|
{message.role === "user" ? (
|
||||||
<div className="prose dark:prose-invert whitespace-pre-wrap">
|
<div className="prose dark:prose-invert whitespace-pre-wrap">{message.content}</div>
|
||||||
{message.content}
|
|
||||||
</div>
|
|
||||||
) : (
|
) : (
|
||||||
<ReactMarkdown
|
<ReactMarkdown
|
||||||
remarkPlugins={[remarkGfm]}
|
remarkPlugins={[remarkGfm]}
|
||||||
|
@ -38,11 +36,23 @@ export const ChatMessage: FC<Props> = ({ message, lightMode }) => {
|
||||||
{...props}
|
{...props}
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<code className={className} {...props}>
|
<code
|
||||||
|
className={className}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
{children}
|
{children}
|
||||||
</code>
|
</code>
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
|
table({ children }) {
|
||||||
|
return <table className="border-collapse border border-black dark:border-white py-1 px-3">{children}</table>;
|
||||||
|
},
|
||||||
|
th({ children }) {
|
||||||
|
return <th className="border border-black dark:border-white break-words py-1 px-3 bg-gray-500 text-white">{children}</th>;
|
||||||
|
},
|
||||||
|
td({ children }) {
|
||||||
|
return <td className="border border-black dark:border-white break-words py-1 px-3">{children}</td>;
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{message.content}
|
{message.content}
|
||||||
|
|
|
@ -12,7 +12,7 @@ export const ModelSelect: FC<Props> = ({ model, models, onModelChange }) => {
|
||||||
<div className="flex flex-col">
|
<div className="flex flex-col">
|
||||||
<label className="text-left mb-2 dark:text-neutral-400 text-neutral-700">Model</label>
|
<label className="text-left mb-2 dark:text-neutral-400 text-neutral-700">Model</label>
|
||||||
<select
|
<select
|
||||||
className="w-[300px] p-3 dark:text-white dark:bg-[#343541] border border-neutral-500 rounded-lg appearance-none focus:shadow-outline text-neutral-900 cursor-pointer"
|
className="w-full p-3 dark:text-white dark:bg-[#343541] border border-neutral-500 rounded-lg appearance-none focus:shadow-outline text-neutral-900 cursor-pointer"
|
||||||
placeholder="Select a model"
|
placeholder="Select a model"
|
||||||
value={model.id}
|
value={model.id}
|
||||||
onChange={(e) => {
|
onChange={(e) => {
|
||||||
|
|
|
@ -0,0 +1,65 @@
|
||||||
|
import { Conversation } from "@/types";
|
||||||
|
import { DEFAULT_SYSTEM_PROMPT } from "@/utils/app/const";
|
||||||
|
import { FC, useEffect, useRef, useState } from "react";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
conversation: Conversation;
|
||||||
|
onChangePrompt: (prompt: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const SystemPrompt: FC<Props> = ({ conversation, onChangePrompt }) => {
|
||||||
|
const [value, setValue] = useState<string>("");
|
||||||
|
|
||||||
|
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||||
|
|
||||||
|
const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||||
|
const value = e.target.value;
|
||||||
|
const maxLength = 4000;
|
||||||
|
|
||||||
|
if (value.length > maxLength) {
|
||||||
|
alert(`Prompt limit is ${maxLength} characters`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setValue(value);
|
||||||
|
|
||||||
|
if (value.length > 0) {
|
||||||
|
onChangePrompt(value);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (textareaRef && textareaRef.current) {
|
||||||
|
textareaRef.current.style.height = "inherit";
|
||||||
|
textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`;
|
||||||
|
}
|
||||||
|
}, [value]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (conversation.prompt) {
|
||||||
|
setValue(conversation.prompt);
|
||||||
|
} else {
|
||||||
|
setValue(DEFAULT_SYSTEM_PROMPT);
|
||||||
|
}
|
||||||
|
}, [conversation]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex flex-col">
|
||||||
|
<label className="text-left dark:text-neutral-400 text-neutral-700 mb-2">System Prompt</label>
|
||||||
|
<textarea
|
||||||
|
ref={textareaRef}
|
||||||
|
className="w-full rounded-lg px-4 py-2 focus:outline-none dark:bg-[#40414F] dark:border-opacity-50 dark:border-neutral-800 dark:text-neutral-100 border border-neutral-500 shadow text-neutral-900"
|
||||||
|
style={{
|
||||||
|
resize: "none",
|
||||||
|
bottom: `${textareaRef?.current?.scrollHeight}px`,
|
||||||
|
maxHeight: "300px",
|
||||||
|
overflow: "auto"
|
||||||
|
}}
|
||||||
|
placeholder="Enter a prompt"
|
||||||
|
value={value}
|
||||||
|
rows={1}
|
||||||
|
onChange={handleChange}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
};
|
|
@ -19,7 +19,7 @@ export const Key: FC<Props> = ({ apiKey, onApiKeyChange }) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleUpdateKey = (newKey: string) => {
|
const handleUpdateKey = (newKey: string) => {
|
||||||
onApiKeyChange(newKey);
|
onApiKeyChange(newKey.trim());
|
||||||
setIsChanging(false);
|
setIsChanging(false);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { Conversation } from "@/types";
|
import { Conversation, KeyValuePair } from "@/types";
|
||||||
import { IconArrowBarLeft, IconPlus } from "@tabler/icons-react";
|
import { IconArrowBarLeft, IconPlus } from "@tabler/icons-react";
|
||||||
import { FC, useEffect, useState } from "react";
|
import { FC, useEffect, useState } from "react";
|
||||||
import { Conversations } from "./Conversations";
|
import { Conversations } from "./Conversations";
|
||||||
|
@ -16,11 +16,11 @@ interface Props {
|
||||||
onSelectConversation: (conversation: Conversation) => void;
|
onSelectConversation: (conversation: Conversation) => void;
|
||||||
onDeleteConversation: (conversation: Conversation) => void;
|
onDeleteConversation: (conversation: Conversation) => void;
|
||||||
onToggleSidebar: () => void;
|
onToggleSidebar: () => void;
|
||||||
onRenameConversation: (conversation: Conversation, name: string) => void;
|
onUpdateConversation: (conversation: Conversation, data: KeyValuePair) => void;
|
||||||
onApiKeyChange: (apiKey: string) => void;
|
onApiKeyChange: (apiKey: string) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const Sidebar: FC<Props> = ({ loading, conversations, lightMode, selectedConversation, apiKey, onNewConversation, onToggleLightMode, onSelectConversation, onDeleteConversation, onToggleSidebar, onRenameConversation, onApiKeyChange }) => {
|
export const Sidebar: FC<Props> = ({ loading, conversations, lightMode, selectedConversation, apiKey, onNewConversation, onToggleLightMode, onSelectConversation, onDeleteConversation, onToggleSidebar, onUpdateConversation, onApiKeyChange }) => {
|
||||||
const [searchTerm, setSearchTerm] = useState<string>("");
|
const [searchTerm, setSearchTerm] = useState<string>("");
|
||||||
const [filteredConversations, setFilteredConversations] = useState<Conversation[]>(conversations);
|
const [filteredConversations, setFilteredConversations] = useState<Conversation[]>(conversations);
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ export const Sidebar: FC<Props> = ({ loading, conversations, lightMode, selected
|
||||||
setSearchTerm("");
|
setSearchTerm("");
|
||||||
}}
|
}}
|
||||||
onRenameConversation={(conversation, name) => {
|
onRenameConversation={(conversation, name) => {
|
||||||
onRenameConversation(conversation, name);
|
onUpdateConversation(conversation, { key: "name", value: name });
|
||||||
setSearchTerm("");
|
setSearchTerm("");
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { Message, OpenAIModel, OpenAIModelID } from "@/types";
|
import { ChatBody, Message, OpenAIModelID } from "@/types";
|
||||||
|
import { DEFAULT_SYSTEM_PROMPT } from "@/utils/app/const";
|
||||||
import { OpenAIStream } from "@/utils/server";
|
import { OpenAIStream } from "@/utils/server";
|
||||||
import tiktokenModel from "@dqbd/tiktoken/encoders/cl100k_base.json";
|
import tiktokenModel from "@dqbd/tiktoken/encoders/cl100k_base.json";
|
||||||
import { init, Tiktoken } from "@dqbd/tiktoken/lite/init";
|
import { init, Tiktoken } from "@dqbd/tiktoken/lite/init";
|
||||||
|
@ -11,11 +12,7 @@ export const config = {
|
||||||
|
|
||||||
const handler = async (req: Request): Promise<Response> => {
|
const handler = async (req: Request): Promise<Response> => {
|
||||||
try {
|
try {
|
||||||
const { model, messages, key } = (await req.json()) as {
|
const { model, messages, key, prompt } = (await req.json()) as ChatBody;
|
||||||
model: OpenAIModel;
|
|
||||||
messages: Message[];
|
|
||||||
key: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
await init((imports) => WebAssembly.instantiate(wasm, imports));
|
await init((imports) => WebAssembly.instantiate(wasm, imports));
|
||||||
const encoding = new Tiktoken(tiktokenModel.bpe_ranks, tiktokenModel.special_tokens, tiktokenModel.pat_str);
|
const encoding = new Tiktoken(tiktokenModel.bpe_ranks, tiktokenModel.special_tokens, tiktokenModel.pat_str);
|
||||||
|
@ -37,7 +34,12 @@ const handler = async (req: Request): Promise<Response> => {
|
||||||
|
|
||||||
encoding.free();
|
encoding.free();
|
||||||
|
|
||||||
const stream = await OpenAIStream(model, key, messagesToSend);
|
let promptToSend = prompt;
|
||||||
|
if (!promptToSend) {
|
||||||
|
promptToSend = DEFAULT_SYSTEM_PROMPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
const stream = await OpenAIStream(model, promptToSend, key, messagesToSend);
|
||||||
|
|
||||||
return new Response(stream);
|
return new Response(stream);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|
195
pages/index.tsx
195
pages/index.tsx
|
@ -1,8 +1,10 @@
|
||||||
import { Chat } from "@/components/Chat/Chat";
|
import { Chat } from "@/components/Chat/Chat";
|
||||||
import { Navbar } from "@/components/Mobile/Navbar";
|
import { Navbar } from "@/components/Mobile/Navbar";
|
||||||
import { Sidebar } from "@/components/Sidebar/Sidebar";
|
import { Sidebar } from "@/components/Sidebar/Sidebar";
|
||||||
import { Conversation, Message, OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types";
|
import { ChatBody, Conversation, KeyValuePair, Message, OpenAIModel, OpenAIModelID, OpenAIModels } from "@/types";
|
||||||
import { cleanConversationHistory, cleanSelectedConversation } from "@/utils/app";
|
import { cleanConversationHistory, cleanSelectedConversation } from "@/utils/app/clean";
|
||||||
|
import { DEFAULT_SYSTEM_PROMPT } from "@/utils/app/const";
|
||||||
|
import { saveConversation, saveConversations, updateConversation } from "@/utils/app/conversation";
|
||||||
import { IconArrowBarLeft, IconArrowBarRight } from "@tabler/icons-react";
|
import { IconArrowBarLeft, IconArrowBarRight } from "@tabler/icons-react";
|
||||||
import Head from "next/head";
|
import Head from "next/head";
|
||||||
import { useEffect, useState } from "react";
|
import { useEffect, useState } from "react";
|
||||||
|
@ -43,16 +45,19 @@ export default function Home() {
|
||||||
setMessageIsStreaming(true);
|
setMessageIsStreaming(true);
|
||||||
setMessageError(false);
|
setMessageError(false);
|
||||||
|
|
||||||
|
const chatBody: ChatBody = {
|
||||||
|
model: updatedConversation.model,
|
||||||
|
messages: updatedConversation.messages,
|
||||||
|
key: apiKey,
|
||||||
|
prompt: updatedConversation.prompt
|
||||||
|
};
|
||||||
|
|
||||||
const response = await fetch("/api/chat", {
|
const response = await fetch("/api/chat", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json"
|
"Content-Type": "application/json"
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify(chatBody)
|
||||||
model: updatedConversation.model,
|
|
||||||
messages: updatedConversation.messages,
|
|
||||||
key: apiKey
|
|
||||||
})
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
|
@ -118,7 +123,7 @@ export default function Home() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversation));
|
saveConversation(updatedConversation);
|
||||||
|
|
||||||
const updatedConversations: Conversation[] = conversations.map((conversation) => {
|
const updatedConversations: Conversation[] = conversations.map((conversation) => {
|
||||||
if (conversation.id === selectedConversation.id) {
|
if (conversation.id === selectedConversation.id) {
|
||||||
|
@ -134,108 +139,12 @@ export default function Home() {
|
||||||
|
|
||||||
setConversations(updatedConversations);
|
setConversations(updatedConversations);
|
||||||
|
|
||||||
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));
|
saveConversations(updatedConversations);
|
||||||
|
|
||||||
setMessageIsStreaming(false);
|
setMessageIsStreaming(false);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleLightMode = (mode: "dark" | "light") => {
|
|
||||||
setLightMode(mode);
|
|
||||||
localStorage.setItem("theme", mode);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleRenameConversation = (conversation: Conversation, name: string) => {
|
|
||||||
const updatedConversation = {
|
|
||||||
...conversation,
|
|
||||||
name
|
|
||||||
};
|
|
||||||
|
|
||||||
const updatedConversations = conversations.map((c) => {
|
|
||||||
if (c.id === updatedConversation.id) {
|
|
||||||
return updatedConversation;
|
|
||||||
}
|
|
||||||
|
|
||||||
return c;
|
|
||||||
});
|
|
||||||
|
|
||||||
setConversations(updatedConversations);
|
|
||||||
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));
|
|
||||||
|
|
||||||
setSelectedConversation(updatedConversation);
|
|
||||||
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversation));
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleChangeModel = (conversation: Conversation, model: OpenAIModel) => {
|
|
||||||
const updatedConversation = {
|
|
||||||
...conversation,
|
|
||||||
model
|
|
||||||
};
|
|
||||||
|
|
||||||
const updatedConversations = conversations.map((c) => {
|
|
||||||
if (c.id === updatedConversation.id) {
|
|
||||||
return updatedConversation;
|
|
||||||
}
|
|
||||||
|
|
||||||
return c;
|
|
||||||
});
|
|
||||||
|
|
||||||
setConversations(updatedConversations);
|
|
||||||
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));
|
|
||||||
|
|
||||||
setSelectedConversation(updatedConversation);
|
|
||||||
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversation));
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleNewConversation = () => {
|
|
||||||
const lastConversation = conversations[conversations.length - 1];
|
|
||||||
|
|
||||||
const newConversation: Conversation = {
|
|
||||||
id: lastConversation ? lastConversation.id + 1 : 1,
|
|
||||||
name: `Conversation ${lastConversation ? lastConversation.id + 1 : 1}`,
|
|
||||||
messages: [],
|
|
||||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
|
||||||
};
|
|
||||||
|
|
||||||
const updatedConversations = [...conversations, newConversation];
|
|
||||||
setConversations(updatedConversations);
|
|
||||||
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));
|
|
||||||
|
|
||||||
setSelectedConversation(newConversation);
|
|
||||||
localStorage.setItem("selectedConversation", JSON.stringify(newConversation));
|
|
||||||
|
|
||||||
setLoading(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSelectConversation = (conversation: Conversation) => {
|
|
||||||
setSelectedConversation(conversation);
|
|
||||||
localStorage.setItem("selectedConversation", JSON.stringify(conversation));
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleDeleteConversation = (conversation: Conversation) => {
|
|
||||||
const updatedConversations = conversations.filter((c) => c.id !== conversation.id);
|
|
||||||
setConversations(updatedConversations);
|
|
||||||
localStorage.setItem("conversationHistory", JSON.stringify(updatedConversations));
|
|
||||||
|
|
||||||
if (updatedConversations.length > 0) {
|
|
||||||
setSelectedConversation(updatedConversations[updatedConversations.length - 1]);
|
|
||||||
localStorage.setItem("selectedConversation", JSON.stringify(updatedConversations[updatedConversations.length - 1]));
|
|
||||||
} else {
|
|
||||||
setSelectedConversation({
|
|
||||||
id: 1,
|
|
||||||
name: "New conversation",
|
|
||||||
messages: [],
|
|
||||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
|
||||||
});
|
|
||||||
localStorage.removeItem("selectedConversation");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleApiKeyChange = (apiKey: string) => {
|
|
||||||
setApiKey(apiKey);
|
|
||||||
localStorage.setItem("apiKey", apiKey);
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchModels = async (key: string) => {
|
const fetchModels = async (key: string) => {
|
||||||
const response = await fetch("/api/models", {
|
const response = await fetch("/api/models", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
|
@ -262,6 +171,75 @@ export default function Home() {
|
||||||
setModels(data);
|
setModels(data);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const handleLightMode = (mode: "dark" | "light") => {
|
||||||
|
setLightMode(mode);
|
||||||
|
localStorage.setItem("theme", mode);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleApiKeyChange = (apiKey: string) => {
|
||||||
|
setApiKey(apiKey);
|
||||||
|
localStorage.setItem("apiKey", apiKey);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleSelectConversation = (conversation: Conversation) => {
|
||||||
|
setSelectedConversation(conversation);
|
||||||
|
saveConversation(conversation);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleNewConversation = () => {
|
||||||
|
const lastConversation = conversations[conversations.length - 1];
|
||||||
|
|
||||||
|
const newConversation: Conversation = {
|
||||||
|
id: lastConversation ? lastConversation.id + 1 : 1,
|
||||||
|
name: `Conversation ${lastConversation ? lastConversation.id + 1 : 1}`,
|
||||||
|
messages: [],
|
||||||
|
model: OpenAIModels[OpenAIModelID.GPT_3_5],
|
||||||
|
prompt: DEFAULT_SYSTEM_PROMPT
|
||||||
|
};
|
||||||
|
|
||||||
|
const updatedConversations = [...conversations, newConversation];
|
||||||
|
|
||||||
|
setSelectedConversation(newConversation);
|
||||||
|
setConversations(updatedConversations);
|
||||||
|
|
||||||
|
saveConversation(newConversation);
|
||||||
|
saveConversations(updatedConversations);
|
||||||
|
|
||||||
|
setLoading(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleDeleteConversation = (conversation: Conversation) => {
|
||||||
|
const updatedConversations = conversations.filter((c) => c.id !== conversation.id);
|
||||||
|
setConversations(updatedConversations);
|
||||||
|
saveConversations(updatedConversations);
|
||||||
|
|
||||||
|
if (updatedConversations.length > 0) {
|
||||||
|
setSelectedConversation(updatedConversations[updatedConversations.length - 1]);
|
||||||
|
saveConversation(updatedConversations[updatedConversations.length - 1]);
|
||||||
|
} else {
|
||||||
|
setSelectedConversation({
|
||||||
|
id: 1,
|
||||||
|
name: "New conversation",
|
||||||
|
messages: [],
|
||||||
|
model: OpenAIModels[OpenAIModelID.GPT_3_5],
|
||||||
|
prompt: DEFAULT_SYSTEM_PROMPT
|
||||||
|
});
|
||||||
|
localStorage.removeItem("selectedConversation");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUpdateConversation = (conversation: Conversation, data: KeyValuePair) => {
|
||||||
|
const updatedConversation = {
|
||||||
|
...conversation,
|
||||||
|
[data.key]: data.value
|
||||||
|
};
|
||||||
|
|
||||||
|
const { single, all } = updateConversation(updatedConversation, conversations);
|
||||||
|
|
||||||
|
setSelectedConversation(single);
|
||||||
|
setConversations(all);
|
||||||
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const theme = localStorage.getItem("theme");
|
const theme = localStorage.getItem("theme");
|
||||||
if (theme) {
|
if (theme) {
|
||||||
|
@ -294,7 +272,8 @@ export default function Home() {
|
||||||
id: 1,
|
id: 1,
|
||||||
name: "New conversation",
|
name: "New conversation",
|
||||||
messages: [],
|
messages: [],
|
||||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
model: OpenAIModels[OpenAIModelID.GPT_3_5],
|
||||||
|
prompt: DEFAULT_SYSTEM_PROMPT
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -341,7 +320,7 @@ export default function Home() {
|
||||||
onSelectConversation={handleSelectConversation}
|
onSelectConversation={handleSelectConversation}
|
||||||
onDeleteConversation={handleDeleteConversation}
|
onDeleteConversation={handleDeleteConversation}
|
||||||
onToggleSidebar={() => setShowSidebar(!showSidebar)}
|
onToggleSidebar={() => setShowSidebar(!showSidebar)}
|
||||||
onRenameConversation={handleRenameConversation}
|
onUpdateConversation={handleUpdateConversation}
|
||||||
onApiKeyChange={handleApiKeyChange}
|
onApiKeyChange={handleApiKeyChange}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
@ -366,7 +345,7 @@ export default function Home() {
|
||||||
loading={loading}
|
loading={loading}
|
||||||
lightMode={lightMode}
|
lightMode={lightMode}
|
||||||
onSend={handleSend}
|
onSend={handleSend}
|
||||||
onModelChange={handleChangeModel}
|
onUpdateConversation={handleUpdateConversation}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -31,6 +31,19 @@ export interface Conversation {
|
||||||
name: string;
|
name: string;
|
||||||
messages: Message[];
|
messages: Message[];
|
||||||
model: OpenAIModel;
|
model: OpenAIModel;
|
||||||
|
prompt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChatBody {
|
||||||
|
model: OpenAIModel;
|
||||||
|
messages: Message[];
|
||||||
|
key: string;
|
||||||
|
prompt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface KeyValuePair {
|
||||||
|
key: string;
|
||||||
|
value: any;
|
||||||
}
|
}
|
||||||
|
|
||||||
// keep track of local storage schema
|
// keep track of local storage schema
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
import { Conversation, OpenAIModelID, OpenAIModels } from "@/types";
|
||||||
|
import { DEFAULT_SYSTEM_PROMPT } from "./const";
|
||||||
|
|
||||||
|
export const cleanSelectedConversation = (conversation: Conversation) => {
|
||||||
|
// added model for each conversation (3/20/23)
|
||||||
|
// added system prompt for each conversation (3/21/23)
|
||||||
|
|
||||||
|
let updatedConversation = conversation;
|
||||||
|
|
||||||
|
// check for model on each conversation
|
||||||
|
if (!updatedConversation.model) {
|
||||||
|
updatedConversation = {
|
||||||
|
...updatedConversation,
|
||||||
|
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for system prompt on each conversation
|
||||||
|
if (!updatedConversation.prompt) {
|
||||||
|
updatedConversation = {
|
||||||
|
...updatedConversation,
|
||||||
|
prompt: DEFAULT_SYSTEM_PROMPT
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return updatedConversation;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const cleanConversationHistory = (history: Conversation[]) => {
|
||||||
|
// added model for each conversation (3/20/23)
|
||||||
|
// added system prompt for each conversation (3/21/23)
|
||||||
|
|
||||||
|
let updatedHistory = [...history];
|
||||||
|
|
||||||
|
// check for model on each conversation
|
||||||
|
if (!updatedHistory.every((conversation) => conversation.model)) {
|
||||||
|
updatedHistory = updatedHistory.map((conversation) => ({
|
||||||
|
...conversation,
|
||||||
|
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// check for system prompt on each conversation
|
||||||
|
if (!updatedHistory.every((conversation) => conversation.prompt)) {
|
||||||
|
updatedHistory = updatedHistory.map((conversation) => ({
|
||||||
|
...conversation,
|
||||||
|
systemPrompt: DEFAULT_SYSTEM_PROMPT
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
return updatedHistory;
|
||||||
|
};
|
|
@ -0,0 +1 @@
|
||||||
|
export const DEFAULT_SYSTEM_PROMPT = "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.";
|
|
@ -0,0 +1,27 @@
|
||||||
|
import { Conversation } from "@/types";
|
||||||
|
|
||||||
|
export const updateConversation = (updatedConversation: Conversation, allConversations: Conversation[]) => {
|
||||||
|
const updatedConversations = allConversations.map((c) => {
|
||||||
|
if (c.id === updatedConversation.id) {
|
||||||
|
return updatedConversation;
|
||||||
|
}
|
||||||
|
|
||||||
|
return c;
|
||||||
|
});
|
||||||
|
|
||||||
|
saveConversation(updatedConversation);
|
||||||
|
saveConversations(updatedConversations);
|
||||||
|
|
||||||
|
return {
|
||||||
|
single: updatedConversation,
|
||||||
|
all: updatedConversations
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const saveConversation = (conversation: Conversation) => {
|
||||||
|
localStorage.setItem("selectedConversation", JSON.stringify(conversation));
|
||||||
|
};
|
||||||
|
|
||||||
|
export const saveConversations = (conversations: Conversation[]) => {
|
||||||
|
localStorage.setItem("conversationHistory", JSON.stringify(conversations));
|
||||||
|
};
|
|
@ -1,33 +0,0 @@
|
||||||
import { Conversation, OpenAIModelID, OpenAIModels } from "@/types";
|
|
||||||
|
|
||||||
export const cleanConversationHistory = (history: Conversation[]) => {
|
|
||||||
// added model for each conversation (3/20/23)
|
|
||||||
|
|
||||||
if (history.length === 0) {
|
|
||||||
return history;
|
|
||||||
} else {
|
|
||||||
return history.map((conversation) => {
|
|
||||||
if (conversation.model) {
|
|
||||||
return conversation;
|
|
||||||
} else {
|
|
||||||
return {
|
|
||||||
...conversation,
|
|
||||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const cleanSelectedConversation = (conversation: Conversation) => {
|
|
||||||
// added model for each conversation (3/20/23)
|
|
||||||
|
|
||||||
if (conversation.model) {
|
|
||||||
return conversation;
|
|
||||||
} else {
|
|
||||||
return {
|
|
||||||
...conversation,
|
|
||||||
model: OpenAIModels[OpenAIModelID.GPT_3_5]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
|
@ -1,10 +1,7 @@
|
||||||
import { Message, OpenAIModel } from "@/types";
|
import { Message, OpenAIModel } from "@/types";
|
||||||
import { createParser, ParsedEvent, ReconnectInterval } from "eventsource-parser";
|
import { createParser, ParsedEvent, ReconnectInterval } from "eventsource-parser";
|
||||||
|
|
||||||
export const OpenAIStream = async (model: OpenAIModel, key: string, messages: Message[]) => {
|
export const OpenAIStream = async (model: OpenAIModel, systemPrompt: string, key: string, messages: Message[]) => {
|
||||||
const encoder = new TextEncoder();
|
|
||||||
const decoder = new TextDecoder();
|
|
||||||
|
|
||||||
const res = await fetch("https://api.openai.com/v1/chat/completions", {
|
const res = await fetch("https://api.openai.com/v1/chat/completions", {
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
|
@ -16,7 +13,7 @@ export const OpenAIStream = async (model: OpenAIModel, key: string, messages: Me
|
||||||
messages: [
|
messages: [
|
||||||
{
|
{
|
||||||
role: "system",
|
role: "system",
|
||||||
content: `You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown format.`
|
content: systemPrompt
|
||||||
},
|
},
|
||||||
...messages
|
...messages
|
||||||
],
|
],
|
||||||
|
@ -30,6 +27,9 @@ export const OpenAIStream = async (model: OpenAIModel, key: string, messages: Me
|
||||||
throw new Error("OpenAI API returned an error");
|
throw new Error("OpenAI API returned an error");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const encoder = new TextEncoder();
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
|
||||||
const stream = new ReadableStream({
|
const stream = new ReadableStream({
|
||||||
async start(controller) {
|
async start(controller) {
|
||||||
const onParse = (event: ParsedEvent | ReconnectInterval) => {
|
const onParse = (event: ParsedEvent | ReconnectInterval) => {
|
||||||
|
|
Loading…
Reference in New Issue