Skip to content

Commit

Permalink
Delete aiconfig_complete stream response, replace with aiconfig
Browse files Browse the repository at this point in the history
Before we used to not support streaming, so when we would return `aiconfig` it would be from a blocking hanging operation. This meant that we needed to set `isRunning` prompt state to be true while we were waiting, but now we don't need to do that anymore after we migrated all run events to return in streaming response format, even for non-streaming models: #806

Also we are now no longer using the `streamApi` helper since we added and are now using `streamingApiChain`, which was added in #789

Finally, if you want more resources on how streaming is connected, you can check out #910 which is a teaching guide I built for adding explaining how the code is connected

## Test Plan
Both streaming and non-streaming models work as before
  • Loading branch information
Rossdan Craig [email protected] committed Jan 13, 2024
1 parent e47a9b7 commit 7139c4a
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 118 deletions.
75 changes: 6 additions & 69 deletions cookbooks/Gradio/huggingface.aiconfig.json

Large diffs are not rendered by default.

3 changes: 0 additions & 3 deletions python/src/aiconfig/editor/client/src/LocalEditor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,6 @@ export default function Editor() {
aiconfig: (data) => {
onStream({ type: "aiconfig", data: data as AIConfig });
},
aiconfig_complete: (data) => {
onStream({ type: "aiconfig_complete", data: data as AIConfig });
},
error: (data) => {
onError({
type: "error",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,10 +71,6 @@ export type RunPromptStreamEvent =
| {
type: "aiconfig";
data: AIConfig;
}
| {
type: "aiconfig_complete";
data: AIConfig;
};

export type RunPromptStreamErrorEvent = {
Expand Down Expand Up @@ -641,17 +637,9 @@ export default function EditorContainer({
type: "CONSOLIDATE_AICONFIG",
action: {
...action,
// Ensure we keep the prompt in a running state since this is an in-progress update
isRunning: true,
},
config: event.data,
});
} else if (event.type === "aiconfig_complete") {
dispatch({
type: "CONSOLIDATE_AICONFIG",
action,
config: event.data,
});
}
},
(event) => {
Expand Down
34 changes: 1 addition & 33 deletions python/src/aiconfig/editor/client/src/utils/oboeHelpers.ts
Original file line number Diff line number Diff line change
@@ -1,41 +1,9 @@
import oboe, { Options } from "oboe";

// Promisify Oboe - similar to this: https://stackoverflow.com/questions/54855494/rewrite-fetch-call-to-oboe-for-json-streams-with-typescript
// Except it allows to use .node('*', fn) & only resolves on done
// Except it allows to use .node('keyname', fn) & only resolves on done
// See https://medium.com/@amberlamps84/oboe-js-mongodb-express-node-js-and-the-beauty-of-streams-4a90fad5414 on using oboe vs raw streams
// (multiple chunks can be sent in single response & we only want valid json ones)
export async function streamingApi<T>(
headers: Options,
on: string = "*",
fn: (data: unknown) => void,
on2?: string,
fn2?: (data: unknown) => void,
on3?: string,
fn3?: (data: unknown) => void
): Promise<T> {
return new Promise((resolve, reject) => {
if (fn2 && on2 && fn3 && on3) {
oboe(headers)
.node(on, fn)
.node(on2, fn2)
.node(on3, fn3)
.done((data) => resolve(data))
.fail((err) => reject(err.jsonBody));
} else if (fn2 && on2) {
oboe(headers)
.node(on, fn)
.node(on2, fn2)
.done((data) => resolve(data))
.fail((err) => reject(err.jsonBody));
} else {
oboe(headers)
.node(on, fn)
.done((data) => resolve(data))
.fail((err) => reject(err.jsonBody));
}
});
}

export async function streamingApiChain<T>(
headers: Options,
chain: { [on: string]: (data: unknown) => void }
Expand Down
2 changes: 1 addition & 1 deletion python/src/aiconfig/editor/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,7 @@ def kill_thread(thread_id: int | None):

aiconfig_json = aiconfig.model_dump(exclude=EXCLUDE_OPTIONS) if aiconfig is not None else None
yield "["
yield json.dumps({"aiconfig_complete": aiconfig_json})
yield json.dumps({"aiconfig": aiconfig_json})
yield "]"

try:
Expand Down

0 comments on commit 7139c4a

Please sign in to comment.