Skip to content

Commit

Permalink
Merge pull request #5 from grafana/add-proxy-submit-method
Browse files Browse the repository at this point in the history
Added an extra button for submitting non-streaming req
  • Loading branch information
edwardcqian authored Sep 26, 2023
2 parents 6989294 + 04eee2e commit afabe03
Showing 1 changed file with 45 additions and 27 deletions.
72 changes: 45 additions & 27 deletions llmexamples-app/src/pages/ExamplePage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ export function ExamplePage() {
// The latest reply from the LLM.
const [reply, setReply] = useState('');

const [useStream, setUseStream] = useState(false);

const [started, setStarted] = useState(false);
const [finished, setFinished] = useState(true);

Expand All @@ -30,32 +32,47 @@ export function ExamplePage() {

setStarted(true);
setFinished(false);
// Stream the completions. Each element is the next stream chunk.
const stream = llms.openai.streamChatCompletions({
model: 'gpt-3.5-turbo',
messages: [
{ role: 'system', content: 'You are a cynical assistant.' },
{ role: 'user', content: message },
],
}).pipe(
// Accumulate the stream content into a stream of strings, where each
// element contains the accumulated message so far.
llms.openai.accumulateContent(),
// The stream is just a regular Observable, so we can use standard rxjs
// functionality to update state, e.g. recording when the stream
// has completed.
// The operator decision tree on the rxjs website is a useful resource:
// https://rxjs.dev/operator-decision-tree.
finalize(() => {
setStarted(false);
setFinished(true);
})
);
// Subscribe to the stream and update the state for each returned value.
return {
enabled,
stream: stream.subscribe(setReply),
};
if (!useStream) {
// Make a single request to the LLM.
const response = await llms.openai.chatCompletions({
model: 'gpt-3.5-turbo',
messages: [
{ role: 'system', content: 'You are a cynical assistant.' },
{ role: 'user', content: message },
],
});
setReply(response.choices[0].message.content);
setStarted(false);
setFinished(true);
return { enabled, response };
} else {
// Stream the completions. Each element is the next stream chunk.
const stream = llms.openai.streamChatCompletions({
model: 'gpt-3.5-turbo',
messages: [
{ role: 'system', content: 'You are a cynical assistant.' },
{ role: 'user', content: message },
],
}).pipe(
// Accumulate the stream content into a stream of strings, where each
// element contains the accumulated message so far.
llms.openai.accumulateContent(),
// The stream is just a regular Observable, so we can use standard rxjs
// functionality to update state, e.g. recording when the stream
// has completed.
// The operator decision tree on the rxjs website is a useful resource:
// https://rxjs.dev/operator-decision-tree.
finalize(() => {
setStarted(false);
setFinished(true);
})
);
// Subscribe to the stream and update the state for each returned value.
return {
enabled,
stream: stream.subscribe(setReply),
};
}
}, [message]);

if (error) {
Expand All @@ -73,7 +90,8 @@ export function ExamplePage() {
placeholder="Enter a message"
/>
<br />
<Button type="submit" onClick={() => setMessage(input)}>Submit</Button>
<Button type="submit" onClick={() => {setMessage(input); setUseStream(true);}}>Submit Stream</Button>
<Button type="submit" onClick={() => {setMessage(input); setUseStream(false);}}>Submit Request</Button>
<br />
<div>{loading ? <Spinner /> : reply}</div>
<div>{started ? "Response is started" : "Response is not started"}</div>
Expand Down

0 comments on commit afabe03

Please sign in to comment.