|  | 
|  | 1 | +#!/usr/bin/env -S npm run tsn -T | 
|  | 2 | + | 
|  | 3 | +import OpenAI from 'openai'; | 
|  | 4 | +import { Stream } from 'openai/streaming'; | 
|  | 5 | + | 
|  | 6 | +// gets API Key from environment variable OPENAI_API_KEY | 
|  | 7 | +const openai = new OpenAI(); | 
|  | 8 | + | 
|  | 9 | +async function main() { | 
|  | 10 | +  // ---------------- Explicit non-streaming params ------------ | 
|  | 11 | + | 
|  | 12 | +  const params: OpenAI.Chat.CompletionCreateParams = { | 
|  | 13 | +    model: 'gpt-4', | 
|  | 14 | +    messages: [{ role: 'user', content: 'Say this is a test!' }], | 
|  | 15 | +  }; | 
|  | 16 | +  const completion = await openai.chat.completions.create(params); | 
|  | 17 | +  console.log(completion.choices[0]?.message?.content); | 
|  | 18 | + | 
|  | 19 | +  // ---------------- Explicit streaming params ---------------- | 
|  | 20 | + | 
|  | 21 | +  const streamingParams: OpenAI.Chat.CompletionCreateParams = { | 
|  | 22 | +    model: 'gpt-4', | 
|  | 23 | +    messages: [{ role: 'user', content: 'Say this is a test!' }], | 
|  | 24 | +    stream: true, | 
|  | 25 | +  }; | 
|  | 26 | + | 
|  | 27 | +  const stream = await openai.chat.completions.create(streamingParams); | 
|  | 28 | +  for await (const chunk of stream) { | 
|  | 29 | +    process.stdout.write(chunk.choices[0]?.delta?.content || ''); | 
|  | 30 | +  } | 
|  | 31 | +  process.stdout.write('\n'); | 
|  | 32 | + | 
|  | 33 | +  // ---------------- Explicit (non)streaming types ---------------- | 
|  | 34 | + | 
|  | 35 | +  const params1: OpenAI.Chat.CompletionCreateParamsNonStreaming = { | 
|  | 36 | +    model: 'gpt-4', | 
|  | 37 | +    messages: [{ role: 'user', content: 'Say this is a test!' }], | 
|  | 38 | +  }; | 
|  | 39 | + | 
|  | 40 | +  const params2: OpenAI.Chat.CompletionCreateParamsStreaming = { | 
|  | 41 | +    model: 'gpt-4', | 
|  | 42 | +    messages: [{ role: 'user', content: 'Say this is a test!' }], | 
|  | 43 | +    stream: true, | 
|  | 44 | +  }; | 
|  | 45 | + | 
|  | 46 | +  // ---------------- Implicit params type ------------------- | 
|  | 47 | + | 
|  | 48 | +  // Note: the `as const` is required here so that TS can properly infer | 
|  | 49 | +  // the right params type. | 
|  | 50 | +  // | 
|  | 51 | +  // If you didn't include it then you'd also get an error saying that | 
|  | 52 | +  // `role: string` is not assignable. | 
|  | 53 | +  const streamingParams2 = { | 
|  | 54 | +    model: 'gpt-4', | 
|  | 55 | +    messages: [{ role: 'user', content: 'Say this is a test!' }], | 
|  | 56 | +    stream: true, | 
|  | 57 | +  } as const; | 
|  | 58 | + | 
|  | 59 | +  // TS knows this is a Stream instance. | 
|  | 60 | +  const stream2 = await openai.chat.completions.create(streamingParams2); | 
|  | 61 | +  for await (const chunk of stream2) { | 
|  | 62 | +    process.stdout.write(chunk.choices[0]?.delta?.content || ''); | 
|  | 63 | +  } | 
|  | 64 | +  process.stdout.write('\n'); | 
|  | 65 | + | 
|  | 66 | +  // Without the `as const` for `stream`. | 
|  | 67 | +  const streamingParams3 = { | 
|  | 68 | +    model: 'gpt-4', | 
|  | 69 | +    messages: [{ role: 'user' as const, content: 'Say this is a test!' }], | 
|  | 70 | +    stream: true, | 
|  | 71 | +  }; | 
|  | 72 | + | 
|  | 73 | +  // TS doesn't know if this is a `Stream` or a direct response | 
|  | 74 | +  const response = await openai.chat.completions.create(streamingParams3); | 
|  | 75 | +  if (response instanceof Stream) { | 
|  | 76 | +    // here TS knows the response type is a `Stream` | 
|  | 77 | +  } else { | 
|  | 78 | +    // here TS knows the response type is a `ChatCompletion` | 
|  | 79 | +  } | 
|  | 80 | + | 
|  | 81 | +  // ---------------- Dynamic params type ------------------- | 
|  | 82 | + | 
|  | 83 | +  // TS knows this is a `Stream` | 
|  | 84 | +  const streamParamsFromFn = await createCompletionParams(true); | 
|  | 85 | +  const streamFromFn = await openai.chat.completions.create(streamParamsFromFn); | 
|  | 86 | +  console.log(streamFromFn); | 
|  | 87 | + | 
|  | 88 | +  // TS knows this is a `ChatCompletion` | 
|  | 89 | +  const paramsFromFn = await createCompletionParams(false); | 
|  | 90 | +  const completionFromFn = await openai.chat.completions.create(paramsFromFn); | 
|  | 91 | +  console.log(completionFromFn); | 
|  | 92 | +} | 
|  | 93 | + | 
|  | 94 | +// Dynamically construct the params object while retaining whether or | 
|  | 95 | +// not the response will be streamed. | 
|  | 96 | +export async function createCompletionParams( | 
|  | 97 | +  stream: true, | 
|  | 98 | +): Promise<OpenAI.Chat.CompletionCreateParamsStreaming>; | 
|  | 99 | +export async function createCompletionParams( | 
|  | 100 | +  stream: false, | 
|  | 101 | +): Promise<OpenAI.Chat.CompletionCreateParamsNonStreaming>; | 
|  | 102 | +export async function createCompletionParams(stream: boolean): Promise<OpenAI.Chat.CompletionCreateParams> { | 
|  | 103 | +  const params = { | 
|  | 104 | +    model: 'gpt-3.5-turbo', | 
|  | 105 | +    messages: [{ role: 'user' as const, content: 'Hello!' }], | 
|  | 106 | +    stream: stream, | 
|  | 107 | +  }; | 
|  | 108 | + | 
|  | 109 | +  // <your logic here> | 
|  | 110 | + | 
|  | 111 | +  return params; | 
|  | 112 | +} | 
|  | 113 | + | 
|  | 114 | +main(); | 
0 commit comments