@@ -29,7 +29,7 @@ const track = async (userId: string, model: string) => {
2929 }
3030 )
3131}
32- type LLM = 'openai/gpt-4' | 'openai/gpt-3.5-turbo-16k' | 'tiiuae/falcon-7b' | 'google/bison' | 'bigscience/bloomz-7b1'
32+ type LLM = 'openai/gpt-4' | 'openai/gpt-3.5-turbo' | 'openai/gpt-3.5-turbo -16k' | 'tiiuae/falcon-7b' | 'google/bison' | 'bigscience/bloomz-7b1'
3333
3434interface RequestPayload {
3535 prompt : string
@@ -95,7 +95,7 @@ const getUserId = async (apiKey) => {
9595
9696const handler = async ( req : Request , res : Response ) : Promise < Response > => {
9797 let { prompt, history, system, model, stream, max_new_tokens, stop } = ( await req . json ( ) ) as RequestPayload
98- if ( ! model ) model = 'openai/gpt-3.5-turbo-16k '
98+ if ( ! model ) model = 'openai/gpt-3.5-turbo'
9999 if ( stream === undefined ) stream = true
100100 if ( ! prompt ) {
101101 return new Response ( JSON . stringify ( { error : 'No prompt in the request' } ) , {
@@ -116,7 +116,7 @@ const handler = async (req: Request, res: Response): Promise<Response> => {
116116
117117 //3. pass in the history of the conversation as well as the context (which is included in the prompt)
118118 const payload : OpenAIPayload = {
119- model : 'gpt-3.5-turbo-16k ' ,
119+ model : 'gpt-3.5-turbo' ,
120120 messages,
121121 stream : true ,
122122 }
@@ -230,6 +230,25 @@ const handler = async (req: Request, res: Response): Promise<Response> => {
230230 } )
231231 }
232232 readableStream = await OpenAIStream ( payload )
233+ } else if ( model === 'openai/gpt-3.5-turbo-16k' ) {
234+ payload . model = 'gpt-3.5-turbo-16k'
235+ if ( ! stream ) {
236+ payload . stream = stream
237+ const res = await fetch ( 'https://api.openai.com/v1/chat/completions' , {
238+ headers : {
239+ 'Content-Type' : 'application/json' ,
240+ Authorization : `Bearer ${ process . env . OPENAI_API_KEY ?? '' } ` ,
241+ } ,
242+ method : 'POST' ,
243+ body : JSON . stringify ( payload ) ,
244+ } ) . then ( ( res ) => res . json ( ) )
245+ return new Response ( JSON . stringify ( {
246+ generated_text : res ?. choices ?. [ 0 ] ?. message . content || ''
247+ } ) , {
248+ status : 200 ,
249+ } )
250+ }
251+ readableStream = await OpenAIStream ( payload )
233252 } else {
234253 if ( ! stream ) {
235254 payload . stream = stream
0 commit comments