Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[do not land][teaching] Proof of concept diff to show how streaming events get mapped from oboe helper for us to process #910

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion python/src/aiconfig/editor/client/src/LocalEditor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,12 @@ export default function Editor() {
},
},
{
// If you change the `output_chunk` var below on L102,
// you will notice that streaming no longer works because there
// isn't a key that matches any of the json keys we pass back from
// server.py
output_chunk: (data) => {
onStream({ type: "output_chunk", data: data as Output });
onStream({ type: "this_can_be_anything", data: data as Output });
},
aiconfig: (data) => {
onStream({ type: "aiconfig", data: data as AIConfig });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ type Props = {

export type RunPromptStreamEvent =
| {
type: "output_chunk";
type: "this_can_be_anything";
data: Output;
}
| {
Expand Down Expand Up @@ -630,7 +630,7 @@ export default function EditorContainer({
const serverConfigResponse = await runPromptCallback(
promptName,
(event) => {
if (event.type === "output_chunk") {
if (event.type === "this_can_be_anything") {
dispatch({
type: "STREAM_OUTPUT_CHUNK",
id: promptId,
Expand Down Expand Up @@ -828,6 +828,8 @@ export default function EditorContainer({
return () => clearInterval(interval);
}, [getServerStatusCallback, serverStatus]);

const runningPromptId: string | undefined = aiconfigState._ui.runningPromptId;

return (
<AIConfigContext.Provider value={contextValue}>
<Notifications />
Expand Down Expand Up @@ -907,6 +909,8 @@ export default function EditorContainer({
/>
</div>
{aiconfigState.prompts.map((prompt: ClientPrompt, i: number) => {
const isAnotherPromptRunning =
runningPromptId !== undefined && runningPromptId !== prompt._ui.id;
return (
<Stack key={prompt._ui.id}>
<Flex mt="md">
Expand All @@ -925,6 +929,7 @@ export default function EditorContainer({
onUpdateModelSettings={onUpdatePromptModelSettings}
onUpdateParameters={onUpdatePromptParameters}
defaultConfigModelName={aiconfigState.metadata.default_model}
isRunButtonDisabled={isAnotherPromptRunning}
/>
</Flex>
<div className={classes.addPromptRow}>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,22 +181,38 @@ function reduceConsolidateAIConfig(
);
}
case "RUN_PROMPT": {
return reduceReplacePrompt(state, action.id, (prompt) => {
const responsePrompt = responseConfig.prompts.find(
(resPrompt) => resPrompt.name === prompt.name
);
// Note: We are relying on the callsite to explicitly set `isRunning`
// to true when it's needed. If isRunning is not explicitly defined
// in the action, and we have called consolidate aiconfig, then we
// assume that we are finished running the prompt.
const isRunning = action.isRunning ?? false;
const stateWithUpdatedRunningPromptId = {
...state,
_ui: {
...state._ui,
runningPromptId: isRunning ? action.id : undefined,
},
};
return reduceReplacePrompt(
stateWithUpdatedRunningPromptId,
action.id,
(prompt) => {
const responsePrompt = responseConfig.prompts.find(
(resPrompt) => resPrompt.name === prompt.name
);

const outputs = responsePrompt?.outputs ?? prompt.outputs;
const outputs = responsePrompt?.outputs ?? prompt.outputs;

return {
...prompt,
_ui: {
...prompt._ui,
isRunning: action.isRunning ?? false,
},
outputs,
};
});
return {
...prompt,
_ui: {
...prompt._ui,
isRunning,
},
outputs,
};
}
);
}
case "UPDATE_PROMPT_INPUT": {
return reduceReplacePrompt(state, action.id, consolidatePrompt);
Expand Down Expand Up @@ -226,19 +242,19 @@ export default function aiconfigReducer(
const prompts = state.prompts.map((prompt) => {
if (prompt.outputs) {
return {
...prompt,
outputs: undefined
}
...prompt,
outputs: undefined,
};
} else {
return prompt;
}
});


for (const prompt of prompts) {
if (prompt.outputs) {
delete prompt.outputs;
}}
for (const prompt of prompts) {
if (prompt.outputs) {
delete prompt.outputs;
}
}

return {
...dirtyState,
Expand All @@ -254,7 +270,14 @@ export default function aiconfigReducer(
};
}
case "RUN_PROMPT": {
return reduceReplacePrompt(dirtyState, action.id, (prompt) => ({
const runningState = {
...dirtyState,
_ui: {
...dirtyState._ui,
runningPromptId: action.id,
},
};
return reduceReplacePrompt(runningState, action.id, (prompt) => ({
...prompt,
_ui: {
...prompt._ui,
Expand All @@ -264,7 +287,14 @@ export default function aiconfigReducer(
}));
}
case "RUN_PROMPT_ERROR": {
return reduceReplacePrompt(dirtyState, action.id, (prompt) => ({
const nonRunningState = {
...dirtyState,
_ui: {
...dirtyState._ui,
runningPromptId: undefined,
},
};
return reduceReplacePrompt(nonRunningState, action.id, (prompt) => ({
...prompt,
_ui: {
...prompt._ui,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ type Props = {
newParameters: Record<string, unknown>
) => void;
defaultConfigModelName?: string;
isRunButtonDisabled?: boolean;
};

export default memo(function PromptContainer({
Expand All @@ -43,6 +44,7 @@ export default memo(function PromptContainer({
onUpdateModel,
onUpdateModelSettings,
onUpdateParameters,
isRunButtonDisabled = false,
}: Props) {
const promptId = prompt._ui.id;
const onChangeInput = useCallback(
Expand Down Expand Up @@ -119,6 +121,7 @@ export default memo(function PromptContainer({
onCancelRun={onCancelRun}
onRunPrompt={runPrompt}
isRunning={prompt._ui.isRunning}
isRunButtonDisabled={isRunButtonDisabled}
/>
<PromptOutputBar />
{prompt.outputs && <PromptOutputsRenderer outputs={prompt.outputs} />}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ type Props = {
onCancelRun: () => Promise<void>;
onRunPrompt: () => Promise<void>;
isRunning?: boolean;
isRunButtonDisabled?: boolean;
};

type ErrorFallbackProps = {
Expand Down Expand Up @@ -75,6 +76,7 @@ export default memo(function PromptInputRenderer({
onCancelRun,
onRunPrompt,
isRunning = false,
isRunButtonDisabled = false,
}: Props) {
const { classes } = useStyles();

Expand All @@ -93,6 +95,7 @@ export default memo(function PromptInputRenderer({
<div className={classes.promptInputButtonWrapper}>
<RunPromptButton
isRunning={isRunning}
disabled={isRunButtonDisabled}
cancel={onCancelRun}
runPrompt={onRunPrompt}
/>
Expand Down
1 change: 1 addition & 0 deletions python/src/aiconfig/editor/client/src/shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export type ClientAIConfig = Omit<AIConfig, "prompts"> & {
prompts: ClientPrompt[];
_ui: {
isDirty?: boolean;
runningPromptId?: string;
};
};

Expand Down
3 changes: 3 additions & 0 deletions python/src/aiconfig/editor/client/src/utils/oboeHelpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ export async function streamingApiChain<T>(
let oboeInstance = oboe(headers);
Object.keys(chain).forEach((on) => {
const fn = chain[on];
// We need the `on` key to match what's passed from server.py,
// while the function callback (defined in LocalEditor) takes
// the data which matches that key in that JSON object
oboeInstance = oboeInstance.node(on, fn);
});

Expand Down
1 change: 1 addition & 0 deletions python/src/aiconfig/editor/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,7 @@ def kill_thread(thread_id: int | None):
} # type: ignore
)
yield "["
# Below is where we set the "output_chunk" key in the JSON response
yield json.dumps({"output_chunk": accumulated_output.to_json()})
yield "]"

Expand Down