Initialize the project to use self-hosted Llama model
This commit is contained in:
parent
fa3f6e93bb
commit
75cd9d075f
|
@ -1,8 +0,0 @@
|
|||
# Chatbot UI
|
||||
DEFAULT_MODEL=gpt-3.5-turbo
|
||||
NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT=You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.
|
||||
OPENAI_API_KEY=YOUR_KEY
|
||||
|
||||
# Google
|
||||
GOOGLE_API_KEY=YOUR_API_KEY
|
||||
GOOGLE_CSE_ID=YOUR_ENGINE_ID
|
|
@ -1,69 +0,0 @@
|
|||
name: Docker
|
||||
|
||||
# This workflow uses actions that are not certified by GitHub.
|
||||
# They are provided by a third-party and are governed by
|
||||
# separate terms of service, privacy policy, and support
|
||||
# documentation.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
|
||||
env:
|
||||
# Use docker.io for Docker Hub if empty
|
||||
REGISTRY: ghcr.io
|
||||
# github.repository as <account>/<repo>
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
# This is used to complete the identity challenge
|
||||
# with sigstore/fulcio when running outside of PRs.
|
||||
id-token: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2.1.0
|
||||
|
||||
# Workaround: https://github.com/docker/build-push-action/issues/461
|
||||
- name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf
|
||||
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a
|
||||
with:
|
||||
context: .
|
||||
platforms: "linux/amd64,linux/arm64"
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
|
@ -1,24 +0,0 @@
|
|||
name: Run Unit Tests
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: node:16
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Run Vitest Suite
|
||||
run: npm test
|
|
@ -1,40 +1,2 @@
|
|||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
/test-results
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
/dist
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
.idea
|
||||
pnpm-lock.yaml
|
||||
**/.DS_Store
|
||||
models
|
105
README.md
105
README.md
|
@ -1,105 +0,0 @@
|
|||
# Chatbot UI
|
||||
|
||||
Chatbot UI is an open source chat UI for AI models.
|
||||
|
||||
See a [demo](https://twitter.com/mckaywrigley/status/1640380021423603713?s=46&t=AowqkodyK6B4JccSOxSPew).
|
||||
|
||||

|
||||
|
||||
## Updates
|
||||
|
||||
Chatbot UI will be updated over time.
|
||||
|
||||
Expect frequent improvements.
|
||||
|
||||
**Next up:**
|
||||
|
||||
- [ ] Sharing
|
||||
- [ ] "Bots"
|
||||
|
||||
## Deploy
|
||||
|
||||
**Vercel**
|
||||
|
||||
Host your own live version of Chatbot UI with Vercel.
|
||||
|
||||
[](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmckaywrigley%2Fchatbot-ui)
|
||||
|
||||
**Docker**
|
||||
|
||||
Build locally:
|
||||
|
||||
```shell
|
||||
docker build -t chatgpt-ui .
|
||||
docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 chatgpt-ui
|
||||
```
|
||||
|
||||
Pull from ghcr:
|
||||
|
||||
```
|
||||
docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 ghcr.io/mckaywrigley/chatbot-ui:main
|
||||
```
|
||||
|
||||
## Running Locally
|
||||
|
||||
**1. Clone Repo**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/mckaywrigley/chatbot-ui.git
|
||||
```
|
||||
|
||||
**2. Install Dependencies**
|
||||
|
||||
```bash
|
||||
npm i
|
||||
```
|
||||
|
||||
**3. Provide OpenAI API Key**
|
||||
|
||||
Create a .env.local file in the root of the repo with your OpenAI API Key:
|
||||
|
||||
```bash
|
||||
OPENAI_API_KEY=YOUR_KEY
|
||||
```
|
||||
|
||||
> You can set `OPENAI_API_HOST` where access to the official OpenAI host is restricted or unavailable, allowing users to configure an alternative host for their specific needs.
|
||||
|
||||
> Additionally, if you have multiple OpenAI Organizations, you can set `OPENAI_ORGANIZATION` to specify one.
|
||||
|
||||
**4. Run App**
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
**5. Use It**
|
||||
|
||||
You should be able to start chatting.
|
||||
|
||||
## Configuration
|
||||
|
||||
When deploying the application, the following environment variables can be set:
|
||||
|
||||
| Environment Variable | Default value | Description |
|
||||
| --------------------------------- | ------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| OPENAI_API_KEY | | The default API key used for authentication with OpenAI |
|
||||
| OPENAI_API_HOST | `https://api.openai.com` | The base url, for Azure use `https://<endpoint>.openai.azure.com` |
|
||||
| OPENAI_API_TYPE | `openai` | The API type, options are `openai` or `azure` |
|
||||
| OPENAI_API_VERSION | `2023-03-15-preview` | Only applicable for Azure OpenAI |
|
||||
| AZURE_DEPLOYMENT_ID | | Needed when Azure OpenAI, Ref [Azure OpenAI API](https://learn.microsoft.com/zh-cn/azure/cognitive-services/openai/reference#completions) |
|
||||
| OPENAI_ORGANIZATION | | Your OpenAI organization ID |
|
||||
| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations, for Azure use `gpt-35-turbo` |
|
||||
| NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT | [see here](utils/app/const.ts) | The default system prompt to use on new conversations |
|
||||
| NEXT_PUBLIC_DEFAULT_TEMPERATURE | 1 | The default temperature to use on new conversations |
|
||||
| GOOGLE_API_KEY | | See [Custom Search JSON API documentation][GCSE] |
|
||||
| GOOGLE_CSE_ID | | See [Custom Search JSON API documentation][GCSE] |
|
||||
|
||||
If you do not provide an OpenAI API key with `OPENAI_API_KEY`, users will have to provide their own key.
|
||||
|
||||
If you don't have an OpenAI API key, you can get one [here](https://platform.openai.com/account/api-keys).
|
||||
|
||||
## Contact
|
||||
|
||||
If you have any questions, feel free to reach out to Mckay on [Twitter](https://twitter.com/mckaywrigley).
|
||||
|
||||
[GCSE]: https://developers.google.com/custom-search/v1/overview
|
53
SECURITY.md
53
SECURITY.md
|
@ -1,53 +0,0 @@
|
|||
# Security Policy
|
||||
|
||||
|
||||
This security policy outlines the process for reporting vulnerabilities and secrets found within this GitHub repository. It is essential that all contributors and users adhere to this policy in order to maintain a secure and stable environment.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you discover a vulnerability within the code, dependencies, or any other component of this repository, please follow these steps:
|
||||
|
||||
1. **Do not disclose the vulnerability publicly.** Publicly disclosing a vulnerability may put the project at risk and could potentially harm other users.
|
||||
|
||||
2. **Contact the repository maintainer(s) privately.** Send a private message or email to the maintainer(s) with a detailed description of the vulnerability. Include the following information:
|
||||
|
||||
- The affected component(s)
|
||||
- Steps to reproduce the issue
|
||||
- Potential impact of the vulnerability
|
||||
- Any possible mitigations or workarounds
|
||||
|
||||
3. **Wait for a response from the maintainer(s).** Please be patient, as they may need time to investigate and verify the issue. The maintainer(s) should acknowledge receipt of your report and provide an estimated time frame for addressing the vulnerability.
|
||||
|
||||
4. **Cooperate with the maintainer(s).** If requested, provide additional information or assistance to help resolve the issue.
|
||||
|
||||
5. **Do not disclose the vulnerability until the maintainer(s) have addressed it.** Once the issue has been resolved, the maintainer(s) may choose to publicly disclose the vulnerability and credit you for the discovery.
|
||||
|
||||
## Reporting Secrets
|
||||
|
||||
If you discover any secrets, such as API keys or passwords, within the repository, follow these steps:
|
||||
|
||||
1. **Do not share the secret or use it for unauthorized purposes.** Misusing a secret could have severe consequences for the project and its users.
|
||||
|
||||
2. **Contact the repository maintainer(s) privately.** Notify them of the discovered secret, its location, and any potential risks associated with it.
|
||||
|
||||
3. **Wait for a response and further instructions.**
|
||||
|
||||
## Responsible Disclosure
|
||||
|
||||
We encourage responsible disclosure of vulnerabilities and secrets. If you follow the steps outlined in this policy, we will work with you to understand and address the issue. We will not take legal action against individuals who discover and report vulnerabilities or secrets in accordance with this policy.
|
||||
|
||||
## Patching and Updates
|
||||
|
||||
We are committed to maintaining the security of our project. When vulnerabilities are reported and confirmed, we will:
|
||||
|
||||
1. Work diligently to develop and apply a patch or implement a mitigation strategy.
|
||||
2. Keep the reporter informed about the progress of the fix.
|
||||
3. Update the repository with the necessary patches and document the changes in the release notes or changelog.
|
||||
4. Credit the reporter for the discovery, if they wish to be acknowledged.
|
||||
|
||||
## Contributing to Security
|
||||
|
||||
We welcome contributions that help improve the security of our project. If you have suggestions or want to contribute code to address security issues, please follow the standard contribution guidelines for this repository. When submitting a pull request related to security, please mention that it addresses a security issue and provide any necessary context.
|
||||
|
||||
By adhering to this security policy, you contribute to the overall security and stability of the project. Thank you for your cooperation and responsible handling of vulnerabilities and secrets.
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# Define the image argument and provide a default value
|
||||
ARG IMAGE=ghcr.io/abetlen/llama-cpp-python:latest
|
||||
|
||||
# Define the model file name and download url
|
||||
ARG MODEL_FILE=llama-2-7b-chat.bin
|
||||
ARG MODEL_DOWNLOAD_URL=https://huggingface.co/TheBloke/Nous-Hermes-Llama-2-7B-GGML/resolve/main/nous-hermes-llama-2-7b.ggmlv3.q4_0.bin
|
||||
|
||||
FROM ${IMAGE}
|
||||
|
||||
ARG MODEL_FILE
|
||||
ARG MODEL_DOWNLOAD_URL
|
||||
|
||||
# Download the model file
|
||||
RUN apt-get update -y && \
|
||||
apt-get install --yes curl && \
|
||||
mkdir -p /models && \
|
||||
curl -L -o /models/${MODEL_FILE} ${MODEL_DOWNLOAD_URL}
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
# Run the server start script
|
||||
CMD ["/bin/sh", "/app/run.sh"]
|
|
@ -0,0 +1,28 @@
|
|||
#!/bin/bash
|
||||
|
||||
make build
|
||||
|
||||
# Get the number of available threads on the system
|
||||
n_threads=$(grep -c ^processor /proc/cpuinfo)
|
||||
|
||||
# Define context window
|
||||
n_ctx=4096
|
||||
|
||||
# Offload everything to CPU
|
||||
n_gpu_layers=0
|
||||
|
||||
# Define batch size
|
||||
n_batch=2096
|
||||
# If total RAM is less than 8GB, set batch size to 1024
|
||||
total_ram=$(cat /proc/meminfo | grep MemTotal | awk '{print $2}')
|
||||
if [ $total_ram -lt 8000000 ]; then
|
||||
n_batch=1024
|
||||
fi
|
||||
|
||||
echo "Initializing server with:"
|
||||
echo "Batch size: $n_batch"
|
||||
echo "Number of CPU threads: $n_threads"
|
||||
echo "Number of GPU layers: $n_gpu_layers"
|
||||
echo "Context window: $n_ctx"
|
||||
|
||||
python3 -m llama_cpp.server --n_ctx $n_ctx --n_threads $n_threads --n_gpu_layers $n_gpu_layers --n_batch $n_batch
|
|
@ -1,9 +1,20 @@
|
|||
version: '3.6'
|
||||
|
||||
services:
|
||||
chatgpt:
|
||||
build: .
|
||||
llama-gpt-api:
|
||||
build:
|
||||
context: ./api
|
||||
dockerfile: Dockerfile
|
||||
environment:
|
||||
MODEL: '/models/llama-2-7b-chat.bin'
|
||||
|
||||
llama-gpt-ui:
|
||||
build:
|
||||
context: ./ui
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- 3000:3000
|
||||
environment:
|
||||
- 'OPENAI_API_KEY='
|
||||
- 'OPENAI_API_KEY=sk-XXXXXXXXXXXXXXXXXXXX'
|
||||
- 'OPENAI_API_HOST=http://llama-gpt-api:8000'
|
||||
- 'DEFAULT_MODEL=/models/llama-2-7b-chat.bin'
|
||||
|
|
1
license
1
license
|
@ -1,5 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2023 Umbrel, Inc.
|
||||
Copyright (c) 2023 Mckay Wrigley
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 15 KiB |
Binary file not shown.
Before Width: | Height: | Size: 108 KiB |
|
@ -0,0 +1,40 @@
|
|||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
/test-results
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
/dist
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
.idea
|
||||
pnpm-lock.yaml
|
|
@ -348,42 +348,18 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
|
|||
}, [messagesEndRef]);
|
||||
|
||||
return (
|
||||
<div className="relative flex-1 overflow-hidden bg-white dark:bg-[#343541]">
|
||||
<div className="relative flex-1 overflow-hidden bg-white dark:bg-[#100e14]">
|
||||
{!(apiKey || serverSideApiKeyIsSet) ? (
|
||||
<div className="mx-auto flex h-full w-[300px] flex-col justify-center space-y-6 sm:w-[600px]">
|
||||
<div className="text-center text-4xl font-bold text-black dark:text-white">
|
||||
Welcome to Chatbot UI
|
||||
LlamaGPT
|
||||
</div>
|
||||
<div className="text-center text-lg text-black dark:text-white">
|
||||
<div className="mb-8">{`Chatbot UI is an open source clone of OpenAI's ChatGPT UI.`}</div>
|
||||
<div className="mb-2 font-bold">
|
||||
Important: Chatbot UI is 100% unaffiliated with OpenAI.
|
||||
</div>
|
||||
<div className="mb-8">LlamaGPT 100% unaffiliated with OpenAI.</div>
|
||||
</div>
|
||||
<div className="text-center text-gray-500 dark:text-gray-400">
|
||||
<div className="mb-2">
|
||||
Chatbot UI allows you to plug in your API key to use this UI with
|
||||
their API.
|
||||
</div>
|
||||
<div className="mb-2">
|
||||
It is <span className="italic">only</span> used to communicate
|
||||
with their API.
|
||||
</div>
|
||||
<div className="mb-2">
|
||||
{t(
|
||||
'Please set your OpenAI API key in the bottom left of the sidebar.',
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
{t("If you don't have an OpenAI API key, you can get one here: ")}
|
||||
<a
|
||||
href="https://platform.openai.com/account/api-keys"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="text-blue-500 hover:underline"
|
||||
>
|
||||
openai.com
|
||||
</a>
|
||||
LlamaGPT allows you to self-host your own LLM.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -405,12 +381,12 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
|
|||
<Spinner size="16px" className="mx-auto" />
|
||||
</div>
|
||||
) : (
|
||||
'Chatbot UI'
|
||||
'LlamaGPT'
|
||||
)}
|
||||
</div>
|
||||
|
||||
{models.length > 0 && (
|
||||
<div className="flex h-full flex-col space-y-4 rounded-lg border border-neutral-200 p-4 dark:border-neutral-600">
|
||||
<div className="flex h-full flex-col space-y-4 rounded-lg border border-neutral-600 p-4 dark:border-neutral-700">
|
||||
<ModelSelect />
|
||||
|
||||
<SystemPrompt
|
||||
|
@ -439,7 +415,7 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
|
|||
</>
|
||||
) : (
|
||||
<>
|
||||
<div className="sticky top-0 z-10 flex justify-center border border-b-neutral-300 bg-neutral-100 py-2 text-sm text-neutral-500 dark:border-none dark:bg-[#444654] dark:text-neutral-200">
|
||||
<div className="sticky top-0 z-10 flex justify-center border border-b-neutral-300 bg-neutral-100 py-2 text-sm text-neutral-500 dark:border-none dark:bg-[#161519] dark:text-neutral-200">
|
||||
{t('Model')}: {selectedConversation?.model.name} | {t('Temp')}
|
||||
: {selectedConversation?.temperature} |
|
||||
<button
|
||||
|
@ -457,7 +433,7 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
|
|||
</div>
|
||||
{showSettings && (
|
||||
<div className="flex flex-col space-y-10 md:mx-auto md:max-w-xl md:gap-6 md:py-3 md:pt-6 lg:max-w-2xl lg:px-0 xl:max-w-3xl">
|
||||
<div className="flex h-full flex-col space-y-4 border-b border-neutral-200 p-4 dark:border-neutral-600 md:rounded-lg md:border">
|
||||
<div className="flex h-full flex-col space-y-4 border-b border-neutral-600 p-4 dark:border-neutral-700 md:rounded-lg md:border">
|
||||
<ModelSelect />
|
||||
</div>
|
||||
</div>
|
||||
|
@ -482,7 +458,7 @@ export const Chat = memo(({ stopConversationRef }: Props) => {
|
|||
{loading && <ChatLoader />}
|
||||
|
||||
<div
|
||||
className="h-[162px] bg-white dark:bg-[#343541]"
|
||||
className="h-[162px] bg-white dark:bg-transparent"
|
||||
ref={messagesEndRef}
|
||||
/>
|
||||
</>
|
|
@ -257,11 +257,11 @@ export const ChatInput = ({
|
|||
}, []);
|
||||
|
||||
return (
|
||||
<div className="absolute bottom-0 left-0 w-full border-transparent bg-gradient-to-b from-transparent via-white to-white pt-6 dark:border-white/20 dark:via-[#343541] dark:to-[#343541] md:pt-2">
|
||||
<div className="absolute bottom-0 left-0 w-full border-transparent bg-gradient-to-b from-transparent via-white to-white pt-6 dark:border-white/20 dark:via-[#121322] dark:to-[#0f0a28] md:pt-2">
|
||||
<div className="stretch mx-2 mt-4 flex flex-row gap-3 last:mb-2 md:mx-4 md:mt-[52px] md:last:mb-6 lg:mx-auto lg:max-w-3xl">
|
||||
{messageIsStreaming && (
|
||||
<button
|
||||
className="absolute top-0 left-0 right-0 mx-auto mb-3 flex w-fit items-center gap-3 rounded border border-neutral-200 bg-white py-2 px-4 text-black hover:opacity-50 dark:border-neutral-600 dark:bg-[#343541] dark:text-white md:mb-0 md:mt-2"
|
||||
className="absolute top-0 left-0 right-0 mx-auto mb-3 flex w-fit items-center gap-3 rounded border border-neutral-600 bg-white py-2 px-4 text-black hover:opacity-50 dark:border-neutral-700 dark:bg-transparent dark:text-white md:mb-0 md:mt-2"
|
||||
onClick={handleStopConversation}
|
||||
>
|
||||
<IconPlayerStop size={16} /> {t('Stop Generating')}
|
||||
|
@ -272,14 +272,14 @@ export const ChatInput = ({
|
|||
selectedConversation &&
|
||||
selectedConversation.messages.length > 0 && (
|
||||
<button
|
||||
className="absolute top-0 left-0 right-0 mx-auto mb-3 flex w-fit items-center gap-3 rounded border border-neutral-200 bg-white py-2 px-4 text-black hover:opacity-50 dark:border-neutral-600 dark:bg-[#343541] dark:text-white md:mb-0 md:mt-2"
|
||||
className="absolute top-0 left-0 right-0 mx-auto mb-3 flex w-fit items-center gap-3 rounded border border-neutral-600 bg-white py-2 px-4 text-black hover:opacity-50 dark:border-neutral-700 dark:bg-transparent dark:text-white md:mb-0 md:mt-2"
|
||||
onClick={onRegenerate}
|
||||
>
|
||||
<IconRepeat size={16} /> {t('Regenerate response')}
|
||||
</button>
|
||||
)}
|
||||
|
||||
<div className="relative mx-2 flex w-full flex-grow flex-col rounded-md border border-black/10 bg-white shadow-[0_0_10px_rgba(0,0,0,0.10)] dark:border-gray-900/50 dark:bg-[#40414F] dark:text-white dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] sm:mx-4">
|
||||
<div className="relative mx-2 flex w-full flex-grow flex-col rounded-md border border-black/10 bg-white shadow-[0_0_10px_rgba(0,0,0,0.10)] dark:border-gray-900/50 dark:bg-[#27242e] dark:text-white dark:shadow-[0_0_15px_rgba(0,0,0,0.10)] sm:mx-4">
|
||||
<button
|
||||
className="absolute left-2 top-2 rounded-sm p-1 text-neutral-800 opacity-60 hover:bg-neutral-200 hover:text-neutral-900 dark:bg-opacity-50 dark:text-neutral-100 dark:hover:text-neutral-200"
|
||||
onClick={() => setShowPluginSelect(!showPluginSelect)}
|
||||
|
@ -289,7 +289,7 @@ export const ChatInput = ({
|
|||
</button>
|
||||
|
||||
{showPluginSelect && (
|
||||
<div className="absolute left-0 bottom-14 rounded bg-white dark:bg-[#343541]">
|
||||
<div className="absolute left-0 bottom-14 rounded bg-white dark:bg-transparent">
|
||||
<PluginSelect
|
||||
plugin={plugin}
|
||||
onKeyDown={(e: any) => {
|
||||
|
@ -379,7 +379,7 @@ export const ChatInput = ({
|
|||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="px-3 pt-2 pb-3 text-center text-[12px] text-black/50 dark:text-white/50 md:px-4 md:pt-3 md:pb-6">
|
||||
{/* <div className="px-3 pt-2 pb-3 text-center text-[12px] text-black/50 dark:text-white/50 md:px-4 md:pt-3 md:pb-6">
|
||||
<a
|
||||
href="https://github.com/mckaywrigley/chatbot-ui"
|
||||
target="_blank"
|
||||
|
@ -392,7 +392,7 @@ export const ChatInput = ({
|
|||
{t(
|
||||
"Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.",
|
||||
)}
|
||||
</div>
|
||||
</div> */}
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -6,7 +6,7 @@ interface Props { }
|
|||
export const ChatLoader: FC<Props> = () => {
|
||||
return (
|
||||
<div
|
||||
className="group border-b border-black/10 bg-gray-50 text-gray-800 dark:border-gray-900/50 dark:bg-[#444654] dark:text-gray-100"
|
||||
className="group border-b border-black/10 bg-gray-50 text-gray-800 dark:border-gray-800/50 dark:bg-transparent dark:text-gray-100"
|
||||
style={{ overflowWrap: 'anywhere' }}
|
||||
>
|
||||
<div className="m-auto flex gap-4 p-4 text-base md:max-w-2xl md:gap-6 md:py-6 lg:max-w-2xl lg:px-0 xl:max-w-3xl">
|
|
@ -128,8 +128,8 @@ export const ChatMessage: FC<Props> = memo(({ message, messageIndex, onEdit }) =
|
|||
<div
|
||||
className={`group md:px-4 ${
|
||||
message.role === 'assistant'
|
||||
? 'border-b border-black/10 bg-gray-50 text-gray-800 dark:border-gray-900/50 dark:bg-[#444654] dark:text-gray-100'
|
||||
: 'border-b border-black/10 bg-white text-gray-800 dark:border-gray-900/50 dark:bg-[#343541] dark:text-gray-100'
|
||||
? 'border-b border-black/10 bg-gray-50 text-gray-800 dark:border-gray-800/50 dark:bg-transparent dark:text-gray-100'
|
||||
: 'border-b border-black/10 bg-white text-gray-800 dark:border-gray-800/50 dark:bg-transparent dark:text-gray-100'
|
||||
}`}
|
||||
style={{ overflowWrap: 'anywhere' }}
|
||||
>
|
||||
|
@ -149,7 +149,7 @@ export const ChatMessage: FC<Props> = memo(({ message, messageIndex, onEdit }) =
|
|||
<div className="flex w-full flex-col">
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
className="w-full resize-none whitespace-pre-wrap border-none dark:bg-[#343541]"
|
||||
className="w-full resize-none whitespace-pre-wrap border-none dark:bg-[#1d1c21]"
|
||||
value={messageContent}
|
||||
onChange={handleInputChange}
|
||||
onKeyDown={handlePressEnter}
|
|
@ -31,7 +31,7 @@ export const ModelSelect = () => {
|
|||
<label className="mb-2 text-left text-neutral-700 dark:text-neutral-400">
|
||||
{t('Model')}
|
||||
</label>
|
||||
<div className="w-full rounded-lg border border-neutral-200 bg-transparent pr-2 text-neutral-900 dark:border-neutral-600 dark:text-white">
|
||||
<div className="w-full rounded-lg border border-neutral-600 bg-transparent pr-2 text-neutral-900 dark:border-neutral-700 dark:text-white">
|
||||
<select
|
||||
className="w-full bg-transparent p-2"
|
||||
placeholder={t('Select a model') || ''}
|
||||
|
@ -42,7 +42,7 @@ export const ModelSelect = () => {
|
|||
<option
|
||||
key={model.id}
|
||||
value={model.id}
|
||||
className="dark:bg-[#343541] dark:text-white"
|
||||
className="dark:bg-[#1d1c21] dark:text-white"
|
||||
>
|
||||
{model.id === defaultModelId
|
||||
? `Default (${model.name})`
|
||||
|
@ -51,7 +51,7 @@ export const ModelSelect = () => {
|
|||
))}
|
||||
</select>
|
||||
</div>
|
||||
<div className="w-full mt-3 text-left text-neutral-700 dark:text-neutral-400 flex items-center">
|
||||
{/* <div className="w-full mt-3 text-left text-neutral-700 dark:text-neutral-400 flex items-center">
|
||||
<a
|
||||
href="https://platform.openai.com/account/usage"
|
||||
target="_blank"
|
||||
|
@ -60,7 +60,7 @@ export const ModelSelect = () => {
|
|||
<IconExternalLink size={18} className={'inline mr-1'} />
|
||||
{t('View Account Usage')}
|
||||
</a>
|
||||
</div>
|
||||
</div> */}
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -62,7 +62,7 @@ export const PluginSelect: FC<Props> = ({
|
|||
|
||||
return (
|
||||
<div className="flex flex-col">
|
||||
<div className="mb-1 w-full rounded border border-neutral-200 bg-transparent pr-2 text-neutral-900 dark:border-neutral-600 dark:text-white">
|
||||
<div className="mb-1 w-full rounded border border-neutral-600 bg-transparent pr-2 text-neutral-900 dark:border-neutral-700 dark:text-white">
|
||||
<select
|
||||
ref={selectRef}
|
||||
className="w-full cursor-pointer bg-transparent p-2"
|
||||
|
@ -82,7 +82,7 @@ export const PluginSelect: FC<Props> = ({
|
|||
<option
|
||||
key="chatgpt"
|
||||
value="chatgpt"
|
||||
className="dark:bg-[#343541] dark:text-white"
|
||||
className="dark:bg-[#1d1c21] dark:text-white"
|
||||
>
|
||||
ChatGPT
|
||||
</option>
|
||||
|
@ -91,7 +91,7 @@ export const PluginSelect: FC<Props> = ({
|
|||
<option
|
||||
key={plugin.id}
|
||||
value={plugin.id}
|
||||
className="dark:bg-[#343541] dark:text-white"
|
||||
className="dark:bg-[#1d1c21] dark:text-white"
|
||||
>
|
||||
{plugin.name}
|
||||
</option>
|
|
@ -20,14 +20,14 @@ export const PromptList: FC<Props> = ({
|
|||
return (
|
||||
<ul
|
||||
ref={promptListRef}
|
||||
className="z-10 max-h-52 w-full overflow-scroll rounded border border-black/10 bg-white shadow-[0_0_10px_rgba(0,0,0,0.10)] dark:border-neutral-500 dark:bg-[#343541] dark:text-white dark:shadow-[0_0_15px_rgba(0,0,0,0.10)]"
|
||||
className="z-10 max-h-52 w-full overflow-scroll rounded border border-black/10 bg-white shadow-[0_0_10px_rgba(0,0,0,0.10)] dark:border-neutral-600 dark:bg-[#1d1c21] dark:text-white dark:shadow-[0_0_15px_rgba(0,0,0,0.10)]"
|
||||
>
|
||||
{prompts.map((prompt, index) => (
|
||||
<li
|
||||
key={prompt.id}
|
||||
className={`${
|
||||
index === activePromptIndex
|
||||
? 'bg-gray-200 dark:bg-[#202123] dark:text-black'
|
||||
? 'bg-gray-200 dark:bg-[#161519] dark:text-black'
|
||||
: ''
|
||||
} cursor-pointer px-3 py-2 text-sm text-black dark:text-white`}
|
||||
onClick={(e) => {
|
|
@ -15,7 +15,7 @@ export const Regenerate: FC<Props> = ({ onRegenerate }) => {
|
|||
{t('Sorry, there was an error.')}
|
||||
</div>
|
||||
<button
|
||||
className="flex h-12 gap-2 w-full items-center justify-center rounded-lg border border-b-neutral-300 bg-neutral-100 text-sm font-semibold text-neutral-500 dark:border-none dark:bg-[#444654] dark:text-neutral-200"
|
||||
className="flex h-12 gap-2 w-full items-center justify-center rounded-lg border border-b-neutral-300 bg-neutral-100 text-sm font-semibold text-neutral-500 dark:border-none dark:bg-[#232228] dark:text-neutral-200"
|
||||
onClick={onRegenerate}
|
||||
>
|
||||
<IconRefresh />
|
|
@ -198,7 +198,7 @@ export const SystemPrompt: FC<Props> = ({
|
|||
</label>
|
||||
<textarea
|
||||
ref={textareaRef}
|
||||
className="w-full rounded-lg border border-neutral-200 bg-transparent px-4 py-3 text-neutral-900 dark:border-neutral-600 dark:text-neutral-100"
|
||||
className="w-full rounded-lg border border-neutral-600 bg-transparent px-4 py-3 text-neutral-900 dark:border-neutral-700 dark:text-neutral-100"
|
||||
style={{
|
||||
resize: 'none',
|
||||
bottom: `${textareaRef?.current?.scrollHeight}px`,
|
|
@ -83,7 +83,7 @@ export const VariableModal: FC<Props> = ({
|
|||
>
|
||||
<div
|
||||
ref={modalRef}
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#202123] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#161519] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
role="dialog"
|
||||
>
|
||||
<div className="mb-4 text-xl font-bold text-black dark:text-neutral-200">
|
||||
|
@ -102,7 +102,7 @@ export const VariableModal: FC<Props> = ({
|
|||
|
||||
<textarea
|
||||
ref={index === 0 ? nameInputRef : undefined}
|
||||
className="mt-1 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
className="mt-1 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
style={{ resize: 'none' }}
|
||||
placeholder={`Enter a value for ${variable.key}...`}
|
||||
value={variable.value}
|
||||
|
@ -113,7 +113,7 @@ export const VariableModal: FC<Props> = ({
|
|||
))}
|
||||
|
||||
<button
|
||||
className="mt-6 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
className="mt-6 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
onClick={handleSubmit}
|
||||
>
|
||||
Submit
|
|
@ -42,32 +42,32 @@ export const ChatbarSettings = () => {
|
|||
<ClearConversations onClearConversations={handleClearConversations} />
|
||||
) : null}
|
||||
|
||||
<Import onImport={handleImportConversations} />
|
||||
{/* <Import onImport={handleImportConversations} /> */}
|
||||
|
||||
<SidebarButton
|
||||
{/* <SidebarButton
|
||||
text={t('Export data')}
|
||||
icon={<IconFileExport size={18} />}
|
||||
onClick={() => handleExportData()}
|
||||
/>
|
||||
/> */}
|
||||
|
||||
<SidebarButton
|
||||
{/* <SidebarButton
|
||||
text={t('Settings')}
|
||||
icon={<IconSettings size={18} />}
|
||||
onClick={() => setIsSettingDialog(true)}
|
||||
/>
|
||||
/> */}
|
||||
|
||||
{!serverSideApiKeyIsSet ? (
|
||||
{/* {!serverSideApiKeyIsSet ? (
|
||||
<Key apiKey={apiKey} onApiKeyChange={handleApiKeyChange} />
|
||||
) : null}
|
||||
) : null} */}
|
||||
|
||||
{!serverSidePluginKeysSet ? <PluginKeys /> : null}
|
||||
{/* {!serverSidePluginKeysSet ? <PluginKeys /> : null} */}
|
||||
|
||||
<SettingDialog
|
||||
{/* <SettingDialog
|
||||
open={isSettingDialogOpen}
|
||||
onClose={() => {
|
||||
setIsSettingDialog(false);
|
||||
}}
|
||||
/>
|
||||
/> */}
|
||||
</div>
|
||||
);
|
||||
};
|
|
@ -103,7 +103,7 @@ export const ConversationComponent = ({ conversation }: Props) => {
|
|||
return (
|
||||
<div className="relative flex items-center">
|
||||
{isRenaming && selectedConversation?.id === conversation.id ? (
|
||||
<div className="flex w-full items-center gap-3 rounded-lg bg-[#343541]/90 p-3">
|
||||
<div className="flex w-full items-center gap-3 rounded-lg bg-[#1d1c21]/90 p-3">
|
||||
<IconMessage size={18} />
|
||||
<input
|
||||
className="mr-12 flex-1 overflow-hidden overflow-ellipsis border-neutral-400 bg-transparent text-left text-[12.5px] leading-3 text-white outline-none focus:border-neutral-100"
|
||||
|
@ -116,11 +116,11 @@ export const ConversationComponent = ({ conversation }: Props) => {
|
|||
</div>
|
||||
) : (
|
||||
<button
|
||||
className={`flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm transition-colors duration-200 hover:bg-[#343541]/90 ${
|
||||
className={`flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm transition-colors duration-200 hover:bg-[#1d1c21]/90 ${
|
||||
messageIsStreaming ? 'disabled:cursor-not-allowed' : ''
|
||||
} ${
|
||||
selectedConversation?.id === conversation.id
|
||||
? 'bg-[#343541]/90'
|
||||
? 'bg-[#1d1c21]/90'
|
||||
: ''
|
||||
}`}
|
||||
onClick={() => handleSelectConversation(conversation)}
|
|
@ -72,7 +72,7 @@ export const PluginKeys = () => {
|
|||
|
||||
<div
|
||||
ref={modalRef}
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#202123] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#161519] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
role="dialog"
|
||||
>
|
||||
<div className="mb-10 text-4xl">Plugin Keys</div>
|
||||
|
@ -88,7 +88,7 @@ export const PluginKeys = () => {
|
|||
Google API Key
|
||||
</div>
|
||||
<input
|
||||
className="mt-2 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
className="mt-2 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
type="password"
|
||||
value={
|
||||
pluginKeys
|
||||
|
@ -147,7 +147,7 @@ export const PluginKeys = () => {
|
|||
Google CSE ID
|
||||
</div>
|
||||
<input
|
||||
className="mt-2 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
className="mt-2 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
type="password"
|
||||
value={
|
||||
pluginKeys
|
||||
|
@ -203,7 +203,7 @@ export const PluginKeys = () => {
|
|||
/>
|
||||
|
||||
<button
|
||||
className="mt-6 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
className="mt-6 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
onClick={() => {
|
||||
const pluginKey = pluginKeys.find(
|
||||
(p) => p.pluginId === PluginID.GOOGLE_SEARCH,
|
||||
|
@ -220,7 +220,7 @@ export const PluginKeys = () => {
|
|||
|
||||
<button
|
||||
type="button"
|
||||
className="mt-6 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
className="mt-6 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
onClick={() => setIsChanging(false)}
|
||||
>
|
||||
{t('Save')}
|
|
@ -68,7 +68,7 @@ const Folder = ({
|
|||
};
|
||||
|
||||
const highlightDrop = (e: any) => {
|
||||
e.target.style.background = '#343541';
|
||||
e.target.style.background = '#1d1c21';
|
||||
};
|
||||
|
||||
const removeHighlight = (e: any) => {
|
||||
|
@ -95,7 +95,7 @@ const Folder = ({
|
|||
<>
|
||||
<div className="relative flex items-center">
|
||||
{isRenaming ? (
|
||||
<div className="flex w-full items-center gap-3 bg-[#343541]/90 p-3">
|
||||
<div className="flex w-full items-center gap-3 bg-[#1d1c21]/90 p-3">
|
||||
{isOpen ? (
|
||||
<IconCaretDown size={18} />
|
||||
) : (
|
||||
|
@ -112,7 +112,7 @@ const Folder = ({
|
|||
</div>
|
||||
) : (
|
||||
<button
|
||||
className={`flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm transition-colors duration-200 hover:bg-[#343541]/90`}
|
||||
className={`flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm transition-colors duration-200 hover:bg-[#1d1c21]/90`}
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
onDrop={(e) => dropHandler(e)}
|
||||
onDragOver={allowDrop}
|
|
@ -13,7 +13,7 @@ export const Navbar: FC<Props> = ({
|
|||
onNewConversation,
|
||||
}) => {
|
||||
return (
|
||||
<nav className="flex w-full justify-between bg-[#202123] py-3 px-4">
|
||||
<nav className="flex w-full justify-between bg-[#161519] py-3 px-4">
|
||||
<div className="mr-4"></div>
|
||||
|
||||
<div className="max-w-[240px] overflow-hidden text-ellipsis whitespace-nowrap">
|
|
@ -78,7 +78,7 @@ export const PromptComponent = ({ prompt }: Props) => {
|
|||
return (
|
||||
<div className="relative flex items-center">
|
||||
<button
|
||||
className="flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm transition-colors duration-200 hover:bg-[#343541]/90"
|
||||
className="flex w-full cursor-pointer items-center gap-3 rounded-lg p-3 text-sm transition-colors duration-200 hover:bg-[#1d1c21]/90"
|
||||
draggable="true"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
|
@ -63,7 +63,7 @@ export const PromptModal: FC<Props> = ({ prompt, onClose, onUpdatePrompt }) => {
|
|||
|
||||
<div
|
||||
ref={modalRef}
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#202123] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#161519] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
role="dialog"
|
||||
>
|
||||
<div className="text-sm font-bold text-black dark:text-neutral-200">
|
||||
|
@ -71,7 +71,7 @@ export const PromptModal: FC<Props> = ({ prompt, onClose, onUpdatePrompt }) => {
|
|||
</div>
|
||||
<input
|
||||
ref={nameInputRef}
|
||||
className="mt-2 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
className="mt-2 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
placeholder={t('A name for your prompt.') || ''}
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
|
@ -81,7 +81,7 @@ export const PromptModal: FC<Props> = ({ prompt, onClose, onUpdatePrompt }) => {
|
|||
{t('Description')}
|
||||
</div>
|
||||
<textarea
|
||||
className="mt-2 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
className="mt-2 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
style={{ resize: 'none' }}
|
||||
placeholder={t('A description for your prompt.') || ''}
|
||||
value={description}
|
||||
|
@ -93,7 +93,7 @@ export const PromptModal: FC<Props> = ({ prompt, onClose, onUpdatePrompt }) => {
|
|||
{t('Prompt')}
|
||||
</div>
|
||||
<textarea
|
||||
className="mt-2 w-full rounded-lg border border-neutral-500 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
className="mt-2 w-full rounded-lg border border-neutral-600 px-4 py-2 text-neutral-900 shadow focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-[#40414F] dark:text-neutral-100"
|
||||
style={{ resize: 'none' }}
|
||||
placeholder={
|
||||
t(
|
||||
|
@ -107,7 +107,7 @@ export const PromptModal: FC<Props> = ({ prompt, onClose, onUpdatePrompt }) => {
|
|||
|
||||
<button
|
||||
type="button"
|
||||
className="w-full px-4 py-2 mt-6 border rounded-lg shadow border-neutral-500 text-neutral-900 hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
className="w-full px-4 py-2 mt-6 border rounded-lg shadow border-neutral-600 text-neutral-900 hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
onClick={() => {
|
||||
const updatedPrompt = {
|
||||
...prompt,
|
|
@ -22,7 +22,7 @@ const Search: FC<Props> = ({ placeholder, searchTerm, onSearch }) => {
|
|||
return (
|
||||
<div className="relative flex items-center">
|
||||
<input
|
||||
className="w-full flex-1 rounded-md border border-neutral-600 bg-[#202123] px-4 py-3 pr-10 text-[14px] leading-3 text-white"
|
||||
className="w-full flex-1 rounded-md border border-neutral-700 bg-[#161519] px-4 py-3 pr-10 text-[14px] leading-3 text-white"
|
||||
type="text"
|
||||
placeholder={t(placeholder) || ''}
|
||||
value={searchTerm}
|
|
@ -65,7 +65,7 @@ export const SettingDialog: FC<Props> = ({ open, onClose }) => {
|
|||
|
||||
<div
|
||||
ref={modalRef}
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#202123] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
className="dark:border-netural-400 inline-block max-h-[400px] transform overflow-y-auto rounded-lg border border-gray-300 bg-white px-4 pt-5 pb-4 text-left align-bottom shadow-xl transition-all dark:bg-[#161519] sm:my-8 sm:max-h-[600px] sm:w-full sm:max-w-lg sm:p-6 sm:align-middle"
|
||||
role="dialog"
|
||||
>
|
||||
<div className="text-lg pb-4 font-bold text-black dark:text-neutral-200">
|
||||
|
@ -89,7 +89,7 @@ export const SettingDialog: FC<Props> = ({ open, onClose }) => {
|
|||
|
||||
<button
|
||||
type="button"
|
||||
className="w-full px-4 py-2 mt-6 border rounded-lg shadow border-neutral-500 text-neutral-900 hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
className="w-full px-4 py-2 mt-6 border rounded-lg shadow border-neutral-600 text-neutral-900 hover:bg-neutral-100 focus:outline-none dark:border-neutral-800 dark:border-opacity-50 dark:bg-white dark:text-black dark:hover:bg-neutral-300"
|
||||
onClick={() => {
|
||||
handleSave();
|
||||
onClose();
|
|
@ -47,7 +47,7 @@ const Sidebar = <T,>({
|
|||
};
|
||||
|
||||
const highlightDrop = (e: any) => {
|
||||
e.target.style.background = '#343541';
|
||||
e.target.style.background = '#1d1c21';
|
||||
};
|
||||
|
||||
const removeHighlight = (e: any) => {
|
||||
|
@ -57,7 +57,7 @@ const Sidebar = <T,>({
|
|||
return isOpen ? (
|
||||
<div>
|
||||
<div
|
||||
className={`fixed top-0 ${side}-0 z-40 flex h-full w-[260px] flex-none flex-col space-y-2 bg-[#202123] p-2 text-[14px] transition-all sm:relative sm:top-0`}
|
||||
className={`fixed top-0 ${side}-0 z-40 flex h-full w-[260px] flex-none flex-col space-y-2 bg-[#161519] p-2 text-[14px] transition-all sm:relative sm:top-0`}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<button
|
|
@ -0,0 +1,9 @@
|
|||
version: '3.6'
|
||||
|
||||
services:
|
||||
chatgpt:
|
||||
build: .
|
||||
ports:
|
||||
- 3000:3000
|
||||
environment:
|
||||
- 'OPENAI_API_KEY='
|
|
@ -1,19 +1,20 @@
|
|||
import { DocumentProps, Head, Html, Main, NextScript } from 'next/document';
|
||||
|
||||
import i18nextConfig from '../next-i18next.config';
|
||||
// import i18nextConfig from '../next-i18next.config';
|
||||
|
||||
type Props = DocumentProps & {
|
||||
// add custom document props
|
||||
};
|
||||
|
||||
export default function Document(props: Props) {
|
||||
const currentLocale =
|
||||
props.__NEXT_DATA__.locale ?? i18nextConfig.i18n.defaultLocale;
|
||||
// const currentLocale =
|
||||
// props.__NEXT_DATA__.locale ?? i18nextConfig.i18n.defaultLocale;
|
||||
return (
|
||||
<Html lang={currentLocale}>
|
||||
// <Html lang={currentLocale}>
|
||||
<Html>
|
||||
<Head>
|
||||
<meta name="apple-mobile-web-app-capable" content="yes" />
|
||||
<meta name="apple-mobile-web-app-title" content="Chatbot UI"></meta>
|
||||
<meta name="apple-mobile-web-app-title" content="LlamaGPT"></meta>
|
||||
</Head>
|
||||
<body>
|
||||
<Main />
|
|
@ -52,6 +52,8 @@ const handler = async (req: Request): Promise<Response> => {
|
|||
|
||||
encoding.free();
|
||||
|
||||
console.log(model, promptToSend, temperatureToUse, key, messagesToSend);
|
||||
|
||||
const stream = await OpenAIStream(model, promptToSend, temperatureToUse, key, messagesToSend);
|
||||
|
||||
return new Response(stream);
|
|
@ -360,8 +360,8 @@ const Home = ({
|
|||
}}
|
||||
>
|
||||
<Head>
|
||||
<title>Chatbot UI</title>
|
||||
<meta name="description" content="ChatGPT but better." />
|
||||
<title>LlamaGPT</title>
|
||||
<meta name="description" content="Chat with a local LLM on your Umbrel without leaking your data to OpenAI" />
|
||||
<meta
|
||||
name="viewport"
|
||||
content="height=device-height ,width=device-width, initial-scale=1, user-scalable=no"
|
|
@ -17,6 +17,7 @@ const handler = async (req: Request): Promise<Response> => {
|
|||
url = `${OPENAI_API_HOST}/openai/deployments?api-version=${OPENAI_API_VERSION}`;
|
||||
}
|
||||
|
||||
console.log("making request to ", url);
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
Binary file not shown.
After Width: | Height: | Size: 761 B |
Binary file not shown.
After Width: | Height: | Size: 2.1 KiB |
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue