chatbot-ui starter
This commit is contained in:
parent
4c9730e4cd
commit
a6503fb498
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"extends": "next/core-web-vitals"
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
71
README.md
71
README.md
|
@ -1,72 +1 @@
|
|||
# Chatbot UI
|
||||
|
||||
A simple chatbot starter kit for OpenAI's chat model using Next.js, TypeScript, and Tailwind CSS.
|
||||
|
||||
See a [demo](https://twitter.com/mckaywrigley/status/1634549098954248193?s=46&t=AowqkodyK6B4JccSOxSPew).
|
||||
|
||||

|
||||
|
||||
## Features
|
||||
|
||||
Chatbot UI provides a simple, fully-functional chat interface that you can use to start building your own chatbot apps powered by OpenAI.
|
||||
|
||||
It has everything you need to hit the ground running.
|
||||
|
||||
Modify the chat interface in `components/Chat`.
|
||||
|
||||
Tweak the system prompt in `utils/index.ts`.
|
||||
|
||||
Tweak the assistant prompt in `pages/index.tsx`.
|
||||
|
||||
## Deploy
|
||||
|
||||
**Vercel**
|
||||
|
||||
Host your own live version of Chatbot UI with Vercel.
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmckaywrigley%2Fchatbot-ui&env=OPENAI_API_KEY&envDescription=OpenAI%20API%20Key%20needed%20for%20chat.&envLink=https%3A%2F%2Fopenai.com%2Fproduct&project-name=chatbot-ui&repository-name=chatbot-ui)
|
||||
|
||||
**Replit**
|
||||
|
||||
Fork Chatbot UI on Replit [here](https://replit.com/@MckayWrigley/chatbot-ui).
|
||||
|
||||
## Running Locally
|
||||
|
||||
**1. Clone Repo**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/mckaywrigley/chatbot-ui.git
|
||||
```
|
||||
|
||||
**2. Install Dependencies**
|
||||
|
||||
```bash
|
||||
npm i
|
||||
```
|
||||
|
||||
**3. Provide OpenAI API Key**
|
||||
|
||||
Create a .env.local file in the root of the repo with your OpenAI API Key:
|
||||
|
||||
```bash
|
||||
OPENAI_API_KEY=<YOUR_KEY>
|
||||
```
|
||||
|
||||
**4. Run App**
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
**5. Start Building**
|
||||
|
||||
You should be able to start chatting with the bot.
|
||||
|
||||
Now, go build the app into whatever kind of chatbot you want!
|
||||
|
||||
## Contact
|
||||
|
||||
If you have any questions, feel free to reach out to me on [Twitter](https://twitter.com/mckaywrigley).
|
||||
|
||||
I'd also love to see what you build with this starter kit - share your projects with me!
|
||||
# chatbot-ui-pro
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
import { Message } from "@/types";
|
||||
import { FC } from "react";
|
||||
import { ChatInput } from "./ChatInput";
|
||||
import { ChatLoader } from "./ChatLoader";
|
||||
import { ChatMessage } from "./ChatMessage";
|
||||
|
||||
interface Props {
|
||||
messages: Message[];
|
||||
loading: boolean;
|
||||
onSend: (message: Message) => void;
|
||||
}
|
||||
|
||||
export const Chat: FC<Props> = ({ messages, loading, onSend }) => {
|
||||
return (
|
||||
<div className="flex flex-col rounded-lg px-2 sm:p-4 sm:border border-neutral-300">
|
||||
{messages.map((message, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className="my-1 sm:my-1.5"
|
||||
>
|
||||
<ChatMessage message={message} />
|
||||
</div>
|
||||
))}
|
||||
|
||||
{loading && (
|
||||
<div className="my-1 sm:my-1.5">
|
||||
<ChatLoader />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mt-4 sm:mt-8 bottom-[56px] left-0 w-full">
|
||||
<ChatInput onSend={onSend} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,65 @@
|
|||
import { Message } from "@/types";
|
||||
import { IconArrowUp } from "@tabler/icons-react";
|
||||
import { FC, KeyboardEvent, useEffect, useRef, useState } from "react";
|
||||
|
||||
interface Props {
|
||||
onSend: (message: Message) => void;
|
||||
}
|
||||
|
||||
export const ChatInput: FC<Props> = ({ onSend }) => {
|
||||
const [content, setContent] = useState<string>();
|
||||
|
||||
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||
|
||||
const handleChange = (e: React.ChangeEvent<HTMLTextAreaElement>) => {
|
||||
const value = e.target.value;
|
||||
if (value.length > 4000) {
|
||||
alert("Message limit is 4000 characters");
|
||||
return;
|
||||
}
|
||||
|
||||
setContent(value);
|
||||
};
|
||||
|
||||
const handleSend = () => {
|
||||
if (!content) {
|
||||
alert("Please enter a message");
|
||||
return;
|
||||
}
|
||||
onSend({ role: "user", content });
|
||||
setContent("");
|
||||
};
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (e.key === "Enter" && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
handleSend();
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (textareaRef && textareaRef.current) {
|
||||
textareaRef.current.style.height = "inherit";
|
||||
textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`;
|
||||
}
|
||||
}, [content]);
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
className="min-h-[44px] rounded-lg pl-4 pr-12 py-2 w-full focus:outline-none focus:ring-1 focus:ring-neutral-300 border-2 border-neutral-200"
|
||||
style={{ resize: "none" }}
|
||||
placeholder="Type a message..."
|
||||
value={content}
|
||||
rows={1}
|
||||
onChange={handleChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
/>
|
||||
|
||||
<button onClick={() => handleSend()}>
|
||||
<IconArrowUp className="absolute right-2 bottom-3 h-8 w-8 hover:cursor-pointer rounded-full p-1 bg-blue-500 text-white hover:opacity-80" />
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,17 @@
|
|||
import { IconDots } from "@tabler/icons-react";
|
||||
import { FC } from "react";
|
||||
|
||||
interface Props {}
|
||||
|
||||
export const ChatLoader: FC<Props> = () => {
|
||||
return (
|
||||
<div className="flex flex-col flex-start">
|
||||
<div
|
||||
className={`flex items-center bg-neutral-200 text-neutral-900 rounded-2xl px-4 py-2 w-fit`}
|
||||
style={{ overflowWrap: "anywhere" }}
|
||||
>
|
||||
<IconDots className="animate-pulse" />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,19 @@
|
|||
import { Message } from "@/types";
|
||||
import { FC } from "react";
|
||||
|
||||
interface Props {
|
||||
message: Message;
|
||||
}
|
||||
|
||||
export const ChatMessage: FC<Props> = ({ message }) => {
|
||||
return (
|
||||
<div className={`flex flex-col ${message.role === "assistant" ? "items-start" : "items-end"}`}>
|
||||
<div
|
||||
className={`flex items-center ${message.role === "assistant" ? "bg-neutral-200 text-neutral-900" : "bg-blue-500 text-white"} rounded-2xl px-3 py-2 max-w-[67%] whitespace-pre-wrap`}
|
||||
style={{ overflowWrap: "anywhere" }}
|
||||
>
|
||||
{message.content}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,5 @@
|
|||
import { FC } from "react";
|
||||
|
||||
export const Footer: FC = () => {
|
||||
return <div className="flex h-[30px] sm:h-[50px] border-t border-neutral-300 py-2 px-8 items-center sm:justify-between justify-center"></div>;
|
||||
};
|
|
@ -0,0 +1,16 @@
|
|||
import { FC } from "react";
|
||||
|
||||
export const Navbar: FC = () => {
|
||||
return (
|
||||
<div className="flex h-[50px] sm:h-[60px] border-b border-neutral-300 py-2 px-2 sm:px-8 items-center justify-between">
|
||||
<div className="font-bold text-3xl flex items-center">
|
||||
<a
|
||||
className="ml-2 hover:opacity-50"
|
||||
href="https://code-scaffold.vercel.app"
|
||||
>
|
||||
Chatbot UI
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2023 Mckay Wrigley
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,6 @@
|
|||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
}
|
||||
|
||||
module.exports = nextConfig
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"name": "ai-chatbot-starter",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"@tabler/icons-react": "^2.9.0",
|
||||
"@types/node": "18.15.0",
|
||||
"@types/react": "18.0.28",
|
||||
"@types/react-dom": "18.0.11",
|
||||
"eslint": "8.36.0",
|
||||
"eslint-config-next": "13.2.4",
|
||||
"eventsource-parser": "^0.1.0",
|
||||
"next": "13.2.4",
|
||||
"openai": "^3.2.1",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0",
|
||||
"typescript": "4.9.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"autoprefixer": "^10.4.14",
|
||||
"postcss": "^8.4.21",
|
||||
"tailwindcss": "^3.2.7"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import "@/styles/globals.css";
|
||||
import type { AppProps } from "next/app";
|
||||
import { Inter } from "next/font/google";
|
||||
|
||||
const inter = Inter({ subsets: ["latin"] });
|
||||
|
||||
export default function App({ Component, pageProps }: AppProps<{}>) {
|
||||
return (
|
||||
<main className={inter.className}>
|
||||
<Component {...pageProps} />
|
||||
</main>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
import { Html, Head, Main, NextScript } from 'next/document'
|
||||
|
||||
export default function Document() {
|
||||
return (
|
||||
<Html lang="en">
|
||||
<Head />
|
||||
<body>
|
||||
<Main />
|
||||
<NextScript />
|
||||
</body>
|
||||
</Html>
|
||||
)
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
import { Message } from "@/types";
|
||||
import { OpenAIStream } from "@/utils";
|
||||
|
||||
export const config = {
|
||||
runtime: "edge"
|
||||
};
|
||||
|
||||
const handler = async (req: Request): Promise<Response> => {
|
||||
try {
|
||||
const { messages } = (await req.json()) as {
|
||||
messages: Message[];
|
||||
};
|
||||
|
||||
const charLimit = 12000;
|
||||
let charCount = 0;
|
||||
let messagesToSend = [];
|
||||
|
||||
for (let i = 0; i < messages.length; i++) {
|
||||
const message = messages[i];
|
||||
if (charCount + message.content.length > charLimit) {
|
||||
break;
|
||||
}
|
||||
charCount += message.content.length;
|
||||
messagesToSend.push(message);
|
||||
}
|
||||
|
||||
const stream = await OpenAIStream(messagesToSend);
|
||||
|
||||
return new Response(stream);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
return new Response("Error", { status: 500 });
|
||||
}
|
||||
};
|
||||
|
||||
export default handler;
|
|
@ -0,0 +1,127 @@
|
|||
import { Chat } from "@/components/Chat/Chat";
|
||||
import { Footer } from "@/components/Layout/Footer";
|
||||
import { Navbar } from "@/components/Layout/Navbar";
|
||||
import { Message } from "@/types";
|
||||
import Head from "next/head";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
|
||||
export default function Home() {
|
||||
const [messages, setMessages] = useState<Message[]>([]);
|
||||
const [loading, setLoading] = useState<boolean>(false);
|
||||
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const scrollToBottom = () => {
|
||||
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
|
||||
};
|
||||
|
||||
const handleSend = async (message: Message) => {
|
||||
const updatedMessages = [...messages, message];
|
||||
|
||||
setMessages(updatedMessages);
|
||||
setLoading(true);
|
||||
|
||||
const response = await fetch("/api/chat", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
body: JSON.stringify({
|
||||
messages: updatedMessages
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
setLoading(false);
|
||||
throw new Error(response.statusText);
|
||||
}
|
||||
|
||||
const data = response.body;
|
||||
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(false);
|
||||
|
||||
const reader = data.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let done = false;
|
||||
let isFirst = true;
|
||||
|
||||
while (!done) {
|
||||
const { value, done: doneReading } = await reader.read();
|
||||
done = doneReading;
|
||||
const chunkValue = decoder.decode(value);
|
||||
|
||||
if (isFirst) {
|
||||
isFirst = false;
|
||||
setMessages((messages) => [
|
||||
...messages,
|
||||
{
|
||||
role: "assistant",
|
||||
content: chunkValue
|
||||
}
|
||||
]);
|
||||
} else {
|
||||
setMessages((messages) => {
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
const updatedMessage = {
|
||||
...lastMessage,
|
||||
content: lastMessage.content + chunkValue
|
||||
};
|
||||
return [...messages.slice(0, -1), updatedMessage];
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
scrollToBottom();
|
||||
}, [messages]);
|
||||
|
||||
useEffect(() => {
|
||||
setMessages([
|
||||
{
|
||||
role: "assistant",
|
||||
content: `Hi there! I'm Chatbot UI, an AI assistant. I can help you with things like answering questions, providing information, and helping with tasks. How can I help you?`
|
||||
}
|
||||
]);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>Chatbot UI</title>
|
||||
<meta
|
||||
name="description"
|
||||
content="A simple chatbot starter kit for OpenAI's chat model using Next.js, TypeScript, and Tailwind CSS."
|
||||
/>
|
||||
<meta
|
||||
name="viewport"
|
||||
content="width=device-width, initial-scale=1"
|
||||
/>
|
||||
<link
|
||||
rel="icon"
|
||||
href="/favicon.ico"
|
||||
/>
|
||||
</Head>
|
||||
|
||||
<div className="flex flex-col h-screen">
|
||||
<Navbar />
|
||||
|
||||
<div className="flex-1 overflow-auto sm:px-10 pb-4 sm:pb-10">
|
||||
<div className="max-w-[800px] mx-auto mt-4 sm:mt-12">
|
||||
<Chat
|
||||
messages={messages}
|
||||
loading={loading}
|
||||
onSend={handleSend}
|
||||
/>
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
</div>
|
||||
<Footer />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
module.exports = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
Binary file not shown.
After Width: | Height: | Size: 474 KiB |
|
@ -0,0 +1,3 @@
|
|||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
|
@ -0,0 +1,8 @@
|
|||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: ["./app/**/*.{js,ts,jsx,tsx}", "./pages/**/*.{js,ts,jsx,tsx}", "./components/**/*.{js,ts,jsx,tsx}"],
|
||||
theme: {
|
||||
extend: {}
|
||||
},
|
||||
plugins: []
|
||||
};
|
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"incremental": true,
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
export enum OpenAIModel {
|
||||
DAVINCI_TURBO = "gpt-3.5-turbo"
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
role: Role;
|
||||
content: string;
|
||||
}
|
||||
|
||||
export type Role = "assistant" | "user";
|
|
@ -0,0 +1,64 @@
|
|||
import { Message, OpenAIModel } from "@/types";
|
||||
import { createParser, ParsedEvent, ReconnectInterval } from "eventsource-parser";
|
||||
|
||||
export const OpenAIStream = async (messages: Message[]) => {
|
||||
const encoder = new TextEncoder();
|
||||
const decoder = new TextDecoder();
|
||||
|
||||
const res = await fetch("https://api.openai.com/v1/chat/completions", {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: `Bearer ${process.env.OPENAI_API_KEY}`
|
||||
},
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
model: OpenAIModel.DAVINCI_TURBO,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: `You are a helpful, friendly, assistant.`
|
||||
},
|
||||
...messages
|
||||
],
|
||||
max_tokens: 800,
|
||||
temperature: 0.0,
|
||||
stream: true
|
||||
})
|
||||
});
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error("OpenAI API returned an error");
|
||||
}
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
const onParse = (event: ParsedEvent | ReconnectInterval) => {
|
||||
if (event.type === "event") {
|
||||
const data = event.data;
|
||||
|
||||
if (data === "[DONE]") {
|
||||
controller.close();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const text = json.choices[0].delta.content;
|
||||
const queue = encoder.encode(text);
|
||||
controller.enqueue(queue);
|
||||
} catch (e) {
|
||||
controller.error(e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const parser = createParser(onParse);
|
||||
|
||||
for await (const chunk of res.body as any) {
|
||||
parser.feed(decoder.decode(chunk));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return stream;
|
||||
};
|
Loading…
Reference in New Issue