change output limit
This commit is contained in:
parent
f9ddf07085
commit
1dc4f86df5
|
@ -2,7 +2,7 @@ import { ChatBody, Message } from '@/types/chat';
|
||||||
import { DEFAULT_SYSTEM_PROMPT } from '@/utils/app/const';
|
import { DEFAULT_SYSTEM_PROMPT } from '@/utils/app/const';
|
||||||
import { OpenAIError, OpenAIStream } from '@/utils/server';
|
import { OpenAIError, OpenAIStream } from '@/utils/server';
|
||||||
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
|
import tiktokenModel from '@dqbd/tiktoken/encoders/cl100k_base.json';
|
||||||
import { init, Tiktoken } from '@dqbd/tiktoken/lite/init';
|
import { Tiktoken, init } from '@dqbd/tiktoken/lite/init';
|
||||||
// @ts-expect-error
|
// @ts-expect-error
|
||||||
import wasm from '../../node_modules/@dqbd/tiktoken/lite/tiktoken_bg.wasm?module';
|
import wasm from '../../node_modules/@dqbd/tiktoken/lite/tiktoken_bg.wasm?module';
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ const handler = async (req: Request): Promise<Response> => {
|
||||||
const message = messages[i];
|
const message = messages[i];
|
||||||
const tokens = encoding.encode(message.content);
|
const tokens = encoding.encode(message.content);
|
||||||
|
|
||||||
if (tokenCount + tokens.length + 1000 > model.tokenLimit) {
|
if (tokenCount + tokens.length + 2000 > model.tokenLimit) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
tokenCount += tokens.length;
|
tokenCount += tokens.length;
|
||||||
|
|
|
@ -33,7 +33,7 @@ export const OpenAIStream = async (
|
||||||
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`,
|
Authorization: `Bearer ${key ? key : process.env.OPENAI_API_KEY}`,
|
||||||
...(process.env.OPENAI_ORGANIZATION && {
|
...(process.env.OPENAI_ORGANIZATION && {
|
||||||
'OpenAI-Organization': process.env.OPENAI_ORGANIZATION,
|
'OpenAI-Organization': process.env.OPENAI_ORGANIZATION,
|
||||||
})
|
}),
|
||||||
},
|
},
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
|
@ -45,7 +45,7 @@ export const OpenAIStream = async (
|
||||||
},
|
},
|
||||||
...messages,
|
...messages,
|
||||||
],
|
],
|
||||||
max_tokens: 1000,
|
max_tokens: 2000,
|
||||||
temperature: 1,
|
temperature: 1,
|
||||||
stream: true,
|
stream: true,
|
||||||
}),
|
}),
|
||||||
|
|
Loading…
Reference in New Issue