diff --git a/.env.local.example b/.env.local.example deleted file mode 100644 index 752af4e..0000000 --- a/.env.local.example +++ /dev/null @@ -1,8 +0,0 @@ -# Chatbot UI -DEFAULT_MODEL=gpt-3.5-turbo -NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT=You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown. -OPENAI_API_KEY=YOUR_KEY - -# Google -GOOGLE_API_KEY=YOUR_API_KEY -GOOGLE_CSE_ID=YOUR_ENGINE_ID diff --git a/.github/workflows/deploy-docker-image.yaml b/.github/workflows/deploy-docker-image.yaml deleted file mode 100644 index 3e7ad3c..0000000 --- a/.github/workflows/deploy-docker-image.yaml +++ /dev/null @@ -1,69 +0,0 @@ -name: Docker - -# This workflow uses actions that are not certified by GitHub. -# They are provided by a third-party and are governed by -# separate terms of service, privacy policy, and support -# documentation. - -on: - push: - branches: ['main'] - -env: - # Use docker.io for Docker Hub if empty - REGISTRY: ghcr.io - # github.repository as / - IMAGE_NAME: ${{ github.repository }} - -jobs: - build: - runs-on: ubuntu-latest - permissions: - contents: read - packages: write - # This is used to complete the identity challenge - # with sigstore/fulcio when running outside of PRs. - id-token: write - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2.1.0 - - # Workaround: https://github.com/docker/build-push-action/issues/461 - - name: Setup Docker buildx - uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf - - # Login against a Docker registry except on PR - # https://github.com/docker/login-action - - name: Log into registry ${{ env.REGISTRY }} - if: github.event_name != 'pull_request' - uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c - with: - registry: ${{ env.REGISTRY }} - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - # Extract metadata (tags, labels) for Docker - # https://github.com/docker/metadata-action - - name: Extract Docker metadata - id: meta - uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 - with: - images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - - # Build and push Docker image with Buildx (don't push on PR) - # https://github.com/docker/build-push-action - - name: Build and push Docker image - id: build-and-push - uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a - with: - context: . - platforms: "linux/amd64,linux/arm64" - push: ${{ github.event_name != 'pull_request' }} - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha - cache-to: type=gha,mode=max diff --git a/.github/workflows/run-test-suite.yml b/.github/workflows/run-test-suite.yml deleted file mode 100644 index c0914db..0000000 --- a/.github/workflows/run-test-suite.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Run Unit Tests -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - test: - runs-on: ubuntu-latest - container: - image: node:16 - - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Install dependencies - run: npm ci - - - name: Run Vitest Suite - run: npm test diff --git a/.gitignore b/.gitignore index 5be3dc7..1499042 100644 --- a/.gitignore +++ b/.gitignore @@ -1,40 +1,2 @@ -# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. - -# dependencies -/node_modules -/.pnp -.pnp.js - -# testing -/coverage -/test-results - -# next.js -/.next/ -/out/ -/dist - -# production -/build - -# misc -.DS_Store -*.pem - -# debug -npm-debug.log* -yarn-debug.log* -yarn-error.log* -.pnpm-debug.log* - -# local env files -.env*.local - -# vercel -.vercel - -# typescript -*.tsbuildinfo -next-env.d.ts -.idea -pnpm-lock.yaml +**/.DS_Store +models \ No newline at end of file diff --git a/README.md b/README.md deleted file mode 100644 index 0a7352e..0000000 --- a/README.md +++ /dev/null @@ -1,105 +0,0 @@ -# Chatbot UI - -Chatbot UI is an open source chat UI for AI models. - -See a [demo](https://twitter.com/mckaywrigley/status/1640380021423603713?s=46&t=AowqkodyK6B4JccSOxSPew). - -![Chatbot UI](./public/screenshots/screenshot-0402023.jpg) - -## Updates - -Chatbot UI will be updated over time. - -Expect frequent improvements. - -**Next up:** - -- [ ] Sharing -- [ ] "Bots" - -## Deploy - -**Vercel** - -Host your own live version of Chatbot UI with Vercel. - -[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmckaywrigley%2Fchatbot-ui) - -**Docker** - -Build locally: - -```shell -docker build -t chatgpt-ui . -docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 chatgpt-ui -``` - -Pull from ghcr: - -``` -docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 ghcr.io/mckaywrigley/chatbot-ui:main -``` - -## Running Locally - -**1. Clone Repo** - -```bash -git clone https://github.com/mckaywrigley/chatbot-ui.git -``` - -**2. Install Dependencies** - -```bash -npm i -``` - -**3. Provide OpenAI API Key** - -Create a .env.local file in the root of the repo with your OpenAI API Key: - -```bash -OPENAI_API_KEY=YOUR_KEY -``` - -> You can set `OPENAI_API_HOST` where access to the official OpenAI host is restricted or unavailable, allowing users to configure an alternative host for their specific needs. - -> Additionally, if you have multiple OpenAI Organizations, you can set `OPENAI_ORGANIZATION` to specify one. - -**4. Run App** - -```bash -npm run dev -``` - -**5. Use It** - -You should be able to start chatting. - -## Configuration - -When deploying the application, the following environment variables can be set: - -| Environment Variable | Default value | Description | -| --------------------------------- | ------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------- | -| OPENAI_API_KEY | | The default API key used for authentication with OpenAI | -| OPENAI_API_HOST | `https://api.openai.com` | The base url, for Azure use `https://.openai.azure.com` | -| OPENAI_API_TYPE | `openai` | The API type, options are `openai` or `azure` | -| OPENAI_API_VERSION | `2023-03-15-preview` | Only applicable for Azure OpenAI | -| AZURE_DEPLOYMENT_ID | | Needed when Azure OpenAI, Ref [Azure OpenAI API](https://learn.microsoft.com/zh-cn/azure/cognitive-services/openai/reference#completions) | -| OPENAI_ORGANIZATION | | Your OpenAI organization ID | -| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations, for Azure use `gpt-35-turbo` | -| NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT | [see here](utils/app/const.ts) | The default system prompt to use on new conversations | -| NEXT_PUBLIC_DEFAULT_TEMPERATURE | 1 | The default temperature to use on new conversations | -| GOOGLE_API_KEY | | See [Custom Search JSON API documentation][GCSE] | -| GOOGLE_CSE_ID | | See [Custom Search JSON API documentation][GCSE] | - -If you do not provide an OpenAI API key with `OPENAI_API_KEY`, users will have to provide their own key. - -If you don't have an OpenAI API key, you can get one [here](https://platform.openai.com/account/api-keys). - -## Contact - -If you have any questions, feel free to reach out to Mckay on [Twitter](https://twitter.com/mckaywrigley). - -[GCSE]: https://developers.google.com/custom-search/v1/overview diff --git a/SECURITY.md b/SECURITY.md deleted file mode 100644 index 42f7994..0000000 --- a/SECURITY.md +++ /dev/null @@ -1,53 +0,0 @@ -# Security Policy - - -This security policy outlines the process for reporting vulnerabilities and secrets found within this GitHub repository. It is essential that all contributors and users adhere to this policy in order to maintain a secure and stable environment. - -## Reporting a Vulnerability - -If you discover a vulnerability within the code, dependencies, or any other component of this repository, please follow these steps: - -1. **Do not disclose the vulnerability publicly.** Publicly disclosing a vulnerability may put the project at risk and could potentially harm other users. - -2. **Contact the repository maintainer(s) privately.** Send a private message or email to the maintainer(s) with a detailed description of the vulnerability. Include the following information: - - - The affected component(s) - - Steps to reproduce the issue - - Potential impact of the vulnerability - - Any possible mitigations or workarounds - -3. **Wait for a response from the maintainer(s).** Please be patient, as they may need time to investigate and verify the issue. The maintainer(s) should acknowledge receipt of your report and provide an estimated time frame for addressing the vulnerability. - -4. **Cooperate with the maintainer(s).** If requested, provide additional information or assistance to help resolve the issue. - -5. **Do not disclose the vulnerability until the maintainer(s) have addressed it.** Once the issue has been resolved, the maintainer(s) may choose to publicly disclose the vulnerability and credit you for the discovery. - -## Reporting Secrets - -If you discover any secrets, such as API keys or passwords, within the repository, follow these steps: - -1. **Do not share the secret or use it for unauthorized purposes.** Misusing a secret could have severe consequences for the project and its users. - -2. **Contact the repository maintainer(s) privately.** Notify them of the discovered secret, its location, and any potential risks associated with it. - -3. **Wait for a response and further instructions.** - -## Responsible Disclosure - -We encourage responsible disclosure of vulnerabilities and secrets. If you follow the steps outlined in this policy, we will work with you to understand and address the issue. We will not take legal action against individuals who discover and report vulnerabilities or secrets in accordance with this policy. - -## Patching and Updates - -We are committed to maintaining the security of our project. When vulnerabilities are reported and confirmed, we will: - -1. Work diligently to develop and apply a patch or implement a mitigation strategy. -2. Keep the reporter informed about the progress of the fix. -3. Update the repository with the necessary patches and document the changes in the release notes or changelog. -4. Credit the reporter for the discovery, if they wish to be acknowledged. - -## Contributing to Security - -We welcome contributions that help improve the security of our project. If you have suggestions or want to contribute code to address security issues, please follow the standard contribution guidelines for this repository. When submitting a pull request related to security, please mention that it addresses a security issue and provide any necessary context. - -By adhering to this security policy, you contribute to the overall security and stability of the project. Thank you for your cooperation and responsible handling of vulnerabilities and secrets. - diff --git a/api/Dockerfile b/api/Dockerfile new file mode 100644 index 0000000..731a2b8 --- /dev/null +++ b/api/Dockerfile @@ -0,0 +1,26 @@ +# Define the image argument and provide a default value +ARG IMAGE=ghcr.io/abetlen/llama-cpp-python:latest + +# Define the model file name and download url +ARG MODEL_FILE=llama-2-7b-chat.bin +ARG MODEL_DOWNLOAD_URL=https://huggingface.co/TheBloke/Nous-Hermes-Llama-2-7B-GGML/resolve/main/nous-hermes-llama-2-7b.ggmlv3.q4_0.bin + +FROM ${IMAGE} + +ARG MODEL_FILE +ARG MODEL_DOWNLOAD_URL + +# Download the model file +RUN apt-get update -y && \ + apt-get install --yes curl && \ + mkdir -p /models && \ + curl -L -o /models/${MODEL_FILE} ${MODEL_DOWNLOAD_URL} + +WORKDIR /app + +COPY . . + +EXPOSE 8000 + +# Run the server start script +CMD ["/bin/sh", "/app/run.sh"] \ No newline at end of file diff --git a/api/run.sh b/api/run.sh new file mode 100755 index 0000000..3faf55c --- /dev/null +++ b/api/run.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +make build + +# Get the number of available threads on the system +n_threads=$(grep -c ^processor /proc/cpuinfo) + +# Define context window +n_ctx=4096 + +# Offload everything to CPU +n_gpu_layers=0 + +# Define batch size +n_batch=2096 +# If total RAM is less than 8GB, set batch size to 1024 +total_ram=$(cat /proc/meminfo | grep MemTotal | awk '{print $2}') +if [ $total_ram -lt 8000000 ]; then + n_batch=1024 +fi + +echo "Initializing server with:" +echo "Batch size: $n_batch" +echo "Number of CPU threads: $n_threads" +echo "Number of GPU layers: $n_gpu_layers" +echo "Context window: $n_ctx" + +python3 -m llama_cpp.server --n_ctx $n_ctx --n_threads $n_threads --n_gpu_layers $n_gpu_layers --n_batch $n_batch \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 85fa1d7..1557e63 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,9 +1,20 @@ version: '3.6' services: - chatgpt: - build: . + llama-gpt-api: + build: + context: ./api + dockerfile: Dockerfile + environment: + MODEL: '/models/llama-2-7b-chat.bin' + + llama-gpt-ui: + build: + context: ./ui + dockerfile: Dockerfile ports: - 3000:3000 environment: - - 'OPENAI_API_KEY=' + - 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX' + - 'OPENAI_API_HOST=http://llama-gpt-api:8000' + - 'DEFAULT_MODEL=/models/llama-2-7b-chat.bin' diff --git a/license b/license index 5421da5..d4a3a57 100644 --- a/license +++ b/license @@ -1,5 +1,6 @@ MIT License +Copyright (c) 2023 Umbrel, Inc. Copyright (c) 2023 Mckay Wrigley Permission is hereby granted, free of charge, to any person obtaining a copy diff --git a/public/favicon.ico b/public/favicon.ico deleted file mode 100644 index 13e7072..0000000 Binary files a/public/favicon.ico and /dev/null differ diff --git a/public/screenshots/screenshot-0402023.jpg b/public/screenshots/screenshot-0402023.jpg deleted file mode 100644 index 0ba0812..0000000 Binary files a/public/screenshots/screenshot-0402023.jpg and /dev/null differ diff --git a/.dockerignore b/ui/.dockerignore similarity index 100% rename from .dockerignore rename to ui/.dockerignore diff --git a/.eslintrc.json b/ui/.eslintrc.json similarity index 100% rename from .eslintrc.json rename to ui/.eslintrc.json diff --git a/ui/.gitignore b/ui/.gitignore new file mode 100644 index 0000000..5be3dc7 --- /dev/null +++ b/ui/.gitignore @@ -0,0 +1,40 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage +/test-results + +# next.js +/.next/ +/out/ +/dist + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts +.idea +pnpm-lock.yaml diff --git a/CONTRIBUTING.md b/ui/CONTRIBUTING.md similarity index 100% rename from CONTRIBUTING.md rename to ui/CONTRIBUTING.md diff --git a/Dockerfile b/ui/Dockerfile similarity index 100% rename from Dockerfile rename to ui/Dockerfile diff --git a/Makefile b/ui/Makefile similarity index 100% rename from Makefile rename to ui/Makefile diff --git a/__tests__/utils/app/importExports.test.ts b/ui/__tests__/utils/app/importExports.test.ts similarity index 100% rename from __tests__/utils/app/importExports.test.ts rename to ui/__tests__/utils/app/importExports.test.ts diff --git a/components/Buttons/SidebarActionButton/SidebarActionButton.tsx b/ui/components/Buttons/SidebarActionButton/SidebarActionButton.tsx similarity index 100% rename from components/Buttons/SidebarActionButton/SidebarActionButton.tsx rename to ui/components/Buttons/SidebarActionButton/SidebarActionButton.tsx diff --git a/components/Buttons/SidebarActionButton/index.ts b/ui/components/Buttons/SidebarActionButton/index.ts similarity index 100% rename from components/Buttons/SidebarActionButton/index.ts rename to ui/components/Buttons/SidebarActionButton/index.ts diff --git a/components/Chat/Chat.tsx b/ui/components/Chat/Chat.tsx similarity index 91% rename from components/Chat/Chat.tsx rename to ui/components/Chat/Chat.tsx index fa6b69d..332ce8a 100644 --- a/components/Chat/Chat.tsx +++ b/ui/components/Chat/Chat.tsx @@ -348,42 +348,18 @@ export const Chat = memo(({ stopConversationRef }: Props) => { }, [messagesEndRef]); return ( -
+
{!(apiKey || serverSideApiKeyIsSet) ? (
- Welcome to Chatbot UI + LlamaGPT
-
{`Chatbot UI is an open source clone of OpenAI's ChatGPT UI.`}
-
- Important: Chatbot UI is 100% unaffiliated with OpenAI. -
+
LlamaGPT 100% unaffiliated with OpenAI.
- Chatbot UI allows you to plug in your API key to use this UI with - their API. -
-
- It is only used to communicate - with their API. -
-
- {t( - 'Please set your OpenAI API key in the bottom left of the sidebar.', - )} -
-
- {t("If you don't have an OpenAI API key, you can get one here: ")} - - openai.com - + LlamaGPT allows you to self-host your own LLM.
@@ -405,12 +381,12 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
) : ( - 'Chatbot UI' + 'LlamaGPT' )}
{models.length > 0 && ( -
+
{ ) : ( <> -
+
{t('Model')}: {selectedConversation?.model.name} | {t('Temp')} : {selectedConversation?.temperature} |
{showSettings && (
-
+
@@ -482,7 +458,7 @@ export const Chat = memo(({ stopConversationRef }: Props) => { {loading && }
diff --git a/components/Chat/ChatInput.tsx b/ui/components/Chat/ChatInput.tsx similarity index 95% rename from components/Chat/ChatInput.tsx rename to ui/components/Chat/ChatInput.tsx index 64f8df6..30fa6c0 100644 --- a/components/Chat/ChatInput.tsx +++ b/ui/components/Chat/ChatInput.tsx @@ -257,11 +257,11 @@ export const ChatInput = ({ }, []); return ( -
+
{messageIsStreaming && ( )} -
+
{showPluginSelect && ( -
+
{ @@ -379,7 +379,7 @@ export const ChatInput = ({ )}
- ); }; diff --git a/components/Chat/ChatLoader.tsx b/ui/components/Chat/ChatLoader.tsx similarity index 89% rename from components/Chat/ChatLoader.tsx rename to ui/components/Chat/ChatLoader.tsx index e666d57..eb6c9dc 100644 --- a/components/Chat/ChatLoader.tsx +++ b/ui/components/Chat/ChatLoader.tsx @@ -6,7 +6,7 @@ interface Props { } export const ChatLoader: FC = () => { return (
diff --git a/components/Chat/ChatMessage.tsx b/ui/components/Chat/ChatMessage.tsx similarity index 98% rename from components/Chat/ChatMessage.tsx rename to ui/components/Chat/ChatMessage.tsx index 512551f..dd04a21 100644 --- a/components/Chat/ChatMessage.tsx +++ b/ui/components/Chat/ChatMessage.tsx @@ -128,8 +128,8 @@ export const ChatMessage: FC = memo(({ message, messageIndex, onEdit }) =
@@ -149,7 +149,7 @@ export const ChatMessage: FC = memo(({ message, messageIndex, onEdit }) =