From 25a4dbb052542898a48695c27cc8cefec28b5756 Mon Sep 17 00:00:00 2001 From: itbm <22393016+itbm@users.noreply.github.com> Date: Tue, 11 Apr 2023 10:16:33 +0100 Subject: [PATCH] Add support for Azure OpenAI (#495) --- README.md | 6 ++++-- pages/api/models.ts | 23 +++++++++++++++++------ types/env.ts | 2 ++ types/openai.ts | 16 ++++++++++++++++ utils/app/const.ts | 9 +++++++++ utils/server/index.ts | 30 +++++++++++++++++++----------- 6 files changed, 67 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 62d5ed3..436b59e 100644 --- a/README.md +++ b/README.md @@ -115,9 +115,11 @@ When deploying the application, the following environment variables can be set: | Environment Variable | Default value | Description | | --------------------- | ------------------------------ | ------------------------------------------------------- | | OPENAI_API_KEY | | The default API key used for authentication with OpenAI | +| OPENAI_API_HOST | `https://api.openai.com` | The base url, for Azure use `https://.openai.azure.com` | +| OPENAI_API_TYPE | `openai` | The API type, options are `openai` or `azure` | +| OPENAI_API_VERSION | `2023-03-15-preview` | Only applicable for Azure OpenAI | | OPENAI_ORGANIZATION | | Your OpenAI organization ID | -| OPENAI_API_HOST | `https://api.openai.com` | Base url of the OpenAI API | -| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations | +| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations, for Azure use `gpt-35-turbo` | | DEFAULT_SYSTEM_PROMPT | [see here](utils/app/const.ts) | The default system prompt to use on new conversations | | GOOGLE_API_KEY | | See [Custom Search JSON API documentation][GCSE] | | GOOGLE_CSE_ID | | See [Custom Search JSON API documentation][GCSE] | diff --git a/pages/api/models.ts b/pages/api/models.ts index 1a9209f..e72dcaa 100644 --- a/pages/api/models.ts +++ b/pages/api/models.ts @@ -1,4 +1,4 @@ -import { OPENAI_API_HOST } from '@/utils/app/const'; +import { OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from '@/utils/app/const'; import { OpenAIModel, OpenAIModelID, OpenAIModels } from '@/types/openai'; @@ -12,12 +12,22 @@ const handler = async (req: Request): Promise => { key: string; }; - const response = await fetch(`${OPENAI_API_HOST}/v1/models`, { + let url = `${OPENAI_API_HOST}/v1/models`; + if (OPENAI_API_TYPE === 'azure') { + url = `${OPENAI_API_HOST}/openai/deployments?api-version=${OPENAI_API_VERSION}`; + } + + const response = await fetch(url, { headers: { 'Content-Type': 'application/json', - Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`, - ...(process.env.OPENAI_ORGANIZATION && { - 'OpenAI-Organization': process.env.OPENAI_ORGANIZATION, + ...(OPENAI_API_TYPE === 'openai' && { + Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}` + }), + ...(OPENAI_API_TYPE === 'azure' && { + 'api-key': `${key ? key : process.env.OPENAI_API_KEY}` + }), + ...((OPENAI_API_TYPE === 'openai' && OPENAI_ORGANIZATION) && { + 'OpenAI-Organization': OPENAI_ORGANIZATION, }), }, }); @@ -40,8 +50,9 @@ const handler = async (req: Request): Promise => { const models: OpenAIModel[] = json.data .map((model: any) => { + const model_name = (OPENAI_API_TYPE === 'azure') ? model.model : model.id; for (const [key, value] of Object.entries(OpenAIModelID)) { - if (value === model.id) { + if (value === model_name) { return { id: model.id, name: OpenAIModels[value].name, diff --git a/types/env.ts b/types/env.ts index b6e4a54..f6b9dd7 100644 --- a/types/env.ts +++ b/types/env.ts @@ -1,5 +1,7 @@ export interface ProcessEnv { OPENAI_API_KEY: string; OPENAI_API_HOST?: string; + OPENAI_API_TYPE?: 'openai' | 'azure'; + OPENAI_API_VERSION?: string; OPENAI_ORGANIZATION?: string; } diff --git a/types/openai.ts b/types/openai.ts index 8e423da..d404350 100644 --- a/types/openai.ts +++ b/types/openai.ts @@ -1,3 +1,5 @@ +import { OPENAI_API_TYPE } from '../utils/app/const'; + export interface OpenAIModel { id: string; name: string; @@ -7,7 +9,9 @@ export interface OpenAIModel { export enum OpenAIModelID { GPT_3_5 = 'gpt-3.5-turbo', + GPT_3_5_AZ = 'gpt-35-turbo', GPT_4 = 'gpt-4', + GPT_4_32K = 'gpt-4-32k', } // in case the `DEFAULT_MODEL` environment variable is not set or set to an unsupported model @@ -20,10 +24,22 @@ export const OpenAIModels: Record = { maxLength: 12000, tokenLimit: 4000, }, + [OpenAIModelID.GPT_3_5_AZ]: { + id: OpenAIModelID.GPT_3_5_AZ, + name: 'GPT-3.5', + maxLength: 12000, + tokenLimit: 4000, + }, [OpenAIModelID.GPT_4]: { id: OpenAIModelID.GPT_4, name: 'GPT-4', maxLength: 24000, tokenLimit: 8000, }, + [OpenAIModelID.GPT_4_32K]: { + id: OpenAIModelID.GPT_4_32K, + name: 'GPT-4-32K', + maxLength: 96000, + tokenLimit: 32000, + }, }; diff --git a/utils/app/const.ts b/utils/app/const.ts index 20a9958..eca6deb 100644 --- a/utils/app/const.ts +++ b/utils/app/const.ts @@ -4,3 +4,12 @@ export const DEFAULT_SYSTEM_PROMPT = export const OPENAI_API_HOST = process.env.OPENAI_API_HOST || 'https://api.openai.com'; + +export const OPENAI_API_TYPE = + process.env.OPENAI_API_TYPE || 'openai'; + +export const OPENAI_API_VERSION = + process.env.OPENAI_API_VERSION || '2023-03-15-preview'; + +export const OPENAI_ORGANIZATION = + process.env.OPENAI_ORGANIZATION || ''; diff --git a/utils/server/index.ts b/utils/server/index.ts index 6cc3dca..bfa5408 100644 --- a/utils/server/index.ts +++ b/utils/server/index.ts @@ -1,7 +1,7 @@ import { Message } from '@/types/chat'; import { OpenAIModel } from '@/types/openai'; -import { OPENAI_API_HOST } from '../app/const'; +import { OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from '../app/const'; import { ParsedEvent, @@ -29,17 +29,26 @@ export const OpenAIStream = async ( key: string, messages: Message[], ) => { - const res = await fetch(`${OPENAI_API_HOST}/v1/chat/completions`, { + let url = `${OPENAI_API_HOST}/v1/chat/completions`; + if (OPENAI_API_TYPE === 'azure') { + url = `${OPENAI_API_HOST}/openai/deployments/${model.id}/chat/completions?api-version=${OPENAI_API_VERSION}`; + } + const res = await fetch(url, { headers: { 'Content-Type': 'application/json', - Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`, - ...(process.env.OPENAI_ORGANIZATION && { - 'OpenAI-Organization': process.env.OPENAI_ORGANIZATION, + ...(OPENAI_API_TYPE === 'openai' && { + Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}` + }), + ...(OPENAI_API_TYPE === 'azure' && { + 'api-key': `${key ? key : process.env.OPENAI_API_KEY}` + }), + ...((OPENAI_API_TYPE === 'openai' && OPENAI_ORGANIZATION) && { + 'OpenAI-Organization': OPENAI_ORGANIZATION, }), }, method: 'POST', body: JSON.stringify({ - model: model.id, + ...(OPENAI_API_TYPE === 'openai' && {model: model.id}), messages: [ { role: 'system', @@ -80,13 +89,12 @@ export const OpenAIStream = async ( if (event.type === 'event') { const data = event.data; - if (data === '[DONE]') { - controller.close(); - return; - } - try { const json = JSON.parse(data); + if (json.choices[0].finish_reason != null) { + controller.close(); + return; + } const text = json.choices[0].delta.content; const queue = encoder.encode(text); controller.enqueue(queue);