Add support for Azure OpenAI (#495)
This commit is contained in:
parent
60288ad20a
commit
25a4dbb052
|
@ -115,9 +115,11 @@ When deploying the application, the following environment variables can be set:
|
||||||
| Environment Variable | Default value | Description |
|
| Environment Variable | Default value | Description |
|
||||||
| --------------------- | ------------------------------ | ------------------------------------------------------- |
|
| --------------------- | ------------------------------ | ------------------------------------------------------- |
|
||||||
| OPENAI_API_KEY | | The default API key used for authentication with OpenAI |
|
| OPENAI_API_KEY | | The default API key used for authentication with OpenAI |
|
||||||
|
| OPENAI_API_HOST | `https://api.openai.com` | The base url, for Azure use `https://<endpoint>.openai.azure.com` |
|
||||||
|
| OPENAI_API_TYPE | `openai` | The API type, options are `openai` or `azure` |
|
||||||
|
| OPENAI_API_VERSION | `2023-03-15-preview` | Only applicable for Azure OpenAI |
|
||||||
| OPENAI_ORGANIZATION | | Your OpenAI organization ID |
|
| OPENAI_ORGANIZATION | | Your OpenAI organization ID |
|
||||||
| OPENAI_API_HOST | `https://api.openai.com` | Base url of the OpenAI API |
|
| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations, for Azure use `gpt-35-turbo` |
|
||||||
| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations |
|
|
||||||
| DEFAULT_SYSTEM_PROMPT | [see here](utils/app/const.ts) | The default system prompt to use on new conversations |
|
| DEFAULT_SYSTEM_PROMPT | [see here](utils/app/const.ts) | The default system prompt to use on new conversations |
|
||||||
| GOOGLE_API_KEY | | See [Custom Search JSON API documentation][GCSE] |
|
| GOOGLE_API_KEY | | See [Custom Search JSON API documentation][GCSE] |
|
||||||
| GOOGLE_CSE_ID | | See [Custom Search JSON API documentation][GCSE] |
|
| GOOGLE_CSE_ID | | See [Custom Search JSON API documentation][GCSE] |
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { OPENAI_API_HOST } from '@/utils/app/const';
|
import { OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from '@/utils/app/const';
|
||||||
|
|
||||||
import { OpenAIModel, OpenAIModelID, OpenAIModels } from '@/types/openai';
|
import { OpenAIModel, OpenAIModelID, OpenAIModels } from '@/types/openai';
|
||||||
|
|
||||||
|
@ -12,12 +12,22 @@ const handler = async (req: Request): Promise<Response> => {
|
||||||
key: string;
|
key: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
const response = await fetch(`${OPENAI_API_HOST}/v1/models`, {
|
let url = `${OPENAI_API_HOST}/v1/models`;
|
||||||
|
if (OPENAI_API_TYPE === 'azure') {
|
||||||
|
url = `${OPENAI_API_HOST}/openai/deployments?api-version=${OPENAI_API_VERSION}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`,
|
...(OPENAI_API_TYPE === 'openai' && {
|
||||||
...(process.env.OPENAI_ORGANIZATION && {
|
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`
|
||||||
'OpenAI-Organization': process.env.OPENAI_ORGANIZATION,
|
}),
|
||||||
|
...(OPENAI_API_TYPE === 'azure' && {
|
||||||
|
'api-key': `${key ? key : process.env.OPENAI_API_KEY}`
|
||||||
|
}),
|
||||||
|
...((OPENAI_API_TYPE === 'openai' && OPENAI_ORGANIZATION) && {
|
||||||
|
'OpenAI-Organization': OPENAI_ORGANIZATION,
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
@ -40,8 +50,9 @@ const handler = async (req: Request): Promise<Response> => {
|
||||||
|
|
||||||
const models: OpenAIModel[] = json.data
|
const models: OpenAIModel[] = json.data
|
||||||
.map((model: any) => {
|
.map((model: any) => {
|
||||||
|
const model_name = (OPENAI_API_TYPE === 'azure') ? model.model : model.id;
|
||||||
for (const [key, value] of Object.entries(OpenAIModelID)) {
|
for (const [key, value] of Object.entries(OpenAIModelID)) {
|
||||||
if (value === model.id) {
|
if (value === model_name) {
|
||||||
return {
|
return {
|
||||||
id: model.id,
|
id: model.id,
|
||||||
name: OpenAIModels[value].name,
|
name: OpenAIModels[value].name,
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
export interface ProcessEnv {
|
export interface ProcessEnv {
|
||||||
OPENAI_API_KEY: string;
|
OPENAI_API_KEY: string;
|
||||||
OPENAI_API_HOST?: string;
|
OPENAI_API_HOST?: string;
|
||||||
|
OPENAI_API_TYPE?: 'openai' | 'azure';
|
||||||
|
OPENAI_API_VERSION?: string;
|
||||||
OPENAI_ORGANIZATION?: string;
|
OPENAI_ORGANIZATION?: string;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { OPENAI_API_TYPE } from '../utils/app/const';
|
||||||
|
|
||||||
export interface OpenAIModel {
|
export interface OpenAIModel {
|
||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
|
@ -7,7 +9,9 @@ export interface OpenAIModel {
|
||||||
|
|
||||||
export enum OpenAIModelID {
|
export enum OpenAIModelID {
|
||||||
GPT_3_5 = 'gpt-3.5-turbo',
|
GPT_3_5 = 'gpt-3.5-turbo',
|
||||||
|
GPT_3_5_AZ = 'gpt-35-turbo',
|
||||||
GPT_4 = 'gpt-4',
|
GPT_4 = 'gpt-4',
|
||||||
|
GPT_4_32K = 'gpt-4-32k',
|
||||||
}
|
}
|
||||||
|
|
||||||
// in case the `DEFAULT_MODEL` environment variable is not set or set to an unsupported model
|
// in case the `DEFAULT_MODEL` environment variable is not set or set to an unsupported model
|
||||||
|
@ -20,10 +24,22 @@ export const OpenAIModels: Record<OpenAIModelID, OpenAIModel> = {
|
||||||
maxLength: 12000,
|
maxLength: 12000,
|
||||||
tokenLimit: 4000,
|
tokenLimit: 4000,
|
||||||
},
|
},
|
||||||
|
[OpenAIModelID.GPT_3_5_AZ]: {
|
||||||
|
id: OpenAIModelID.GPT_3_5_AZ,
|
||||||
|
name: 'GPT-3.5',
|
||||||
|
maxLength: 12000,
|
||||||
|
tokenLimit: 4000,
|
||||||
|
},
|
||||||
[OpenAIModelID.GPT_4]: {
|
[OpenAIModelID.GPT_4]: {
|
||||||
id: OpenAIModelID.GPT_4,
|
id: OpenAIModelID.GPT_4,
|
||||||
name: 'GPT-4',
|
name: 'GPT-4',
|
||||||
maxLength: 24000,
|
maxLength: 24000,
|
||||||
tokenLimit: 8000,
|
tokenLimit: 8000,
|
||||||
},
|
},
|
||||||
|
[OpenAIModelID.GPT_4_32K]: {
|
||||||
|
id: OpenAIModelID.GPT_4_32K,
|
||||||
|
name: 'GPT-4-32K',
|
||||||
|
maxLength: 96000,
|
||||||
|
tokenLimit: 32000,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -4,3 +4,12 @@ export const DEFAULT_SYSTEM_PROMPT =
|
||||||
|
|
||||||
export const OPENAI_API_HOST =
|
export const OPENAI_API_HOST =
|
||||||
process.env.OPENAI_API_HOST || 'https://api.openai.com';
|
process.env.OPENAI_API_HOST || 'https://api.openai.com';
|
||||||
|
|
||||||
|
export const OPENAI_API_TYPE =
|
||||||
|
process.env.OPENAI_API_TYPE || 'openai';
|
||||||
|
|
||||||
|
export const OPENAI_API_VERSION =
|
||||||
|
process.env.OPENAI_API_VERSION || '2023-03-15-preview';
|
||||||
|
|
||||||
|
export const OPENAI_ORGANIZATION =
|
||||||
|
process.env.OPENAI_ORGANIZATION || '';
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { Message } from '@/types/chat';
|
import { Message } from '@/types/chat';
|
||||||
import { OpenAIModel } from '@/types/openai';
|
import { OpenAIModel } from '@/types/openai';
|
||||||
|
|
||||||
import { OPENAI_API_HOST } from '../app/const';
|
import { OPENAI_API_HOST, OPENAI_API_TYPE, OPENAI_API_VERSION, OPENAI_ORGANIZATION } from '../app/const';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ParsedEvent,
|
ParsedEvent,
|
||||||
|
@ -29,17 +29,26 @@ export const OpenAIStream = async (
|
||||||
key: string,
|
key: string,
|
||||||
messages: Message[],
|
messages: Message[],
|
||||||
) => {
|
) => {
|
||||||
const res = await fetch(`${OPENAI_API_HOST}/v1/chat/completions`, {
|
let url = `${OPENAI_API_HOST}/v1/chat/completions`;
|
||||||
|
if (OPENAI_API_TYPE === 'azure') {
|
||||||
|
url = `${OPENAI_API_HOST}/openai/deployments/${model.id}/chat/completions?api-version=${OPENAI_API_VERSION}`;
|
||||||
|
}
|
||||||
|
const res = await fetch(url, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`,
|
...(OPENAI_API_TYPE === 'openai' && {
|
||||||
...(process.env.OPENAI_ORGANIZATION && {
|
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`
|
||||||
'OpenAI-Organization': process.env.OPENAI_ORGANIZATION,
|
}),
|
||||||
|
...(OPENAI_API_TYPE === 'azure' && {
|
||||||
|
'api-key': `${key ? key : process.env.OPENAI_API_KEY}`
|
||||||
|
}),
|
||||||
|
...((OPENAI_API_TYPE === 'openai' && OPENAI_ORGANIZATION) && {
|
||||||
|
'OpenAI-Organization': OPENAI_ORGANIZATION,
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: model.id,
|
...(OPENAI_API_TYPE === 'openai' && {model: model.id}),
|
||||||
messages: [
|
messages: [
|
||||||
{
|
{
|
||||||
role: 'system',
|
role: 'system',
|
||||||
|
@ -80,13 +89,12 @@ export const OpenAIStream = async (
|
||||||
if (event.type === 'event') {
|
if (event.type === 'event') {
|
||||||
const data = event.data;
|
const data = event.data;
|
||||||
|
|
||||||
if (data === '[DONE]') {
|
|
||||||
controller.close();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const json = JSON.parse(data);
|
const json = JSON.parse(data);
|
||||||
|
if (json.choices[0].finish_reason != null) {
|
||||||
|
controller.close();
|
||||||
|
return;
|
||||||
|
}
|
||||||
const text = json.choices[0].delta.content;
|
const text = json.choices[0].delta.content;
|
||||||
const queue = encoder.encode(text);
|
const queue = encoder.encode(text);
|
||||||
controller.enqueue(queue);
|
controller.enqueue(queue);
|
||||||
|
|
Loading…
Reference in New Issue