Skip to content

Commit

Permalink
Implement STREAM_AICONFIG_CHUNK action
Browse files Browse the repository at this point in the history
This is the biggest part of this diff stack. The functionality is still exactly the same as before, but now we can call it directly instead of as a sub-action within `"CONSOLIDATE_AICONFIG"`. I think this is much cleaner.

Another benefit is this removes the need for having to check the `isRunning` flags within the consolidated `"RUN_PROMPT"` sub-action, since we now know that the only time we can call that is through the beginning and the end (non-streaming end). Next PR I am going to split this into two separate Actions:
1. `"RUN_PROMPT_START"`
2. `"RUN_PROMPT_SUCCESS"`


## Test Plan
Streaming and non-streaming models still work. Can cancel, it'll end, etc

https://github.com/lastmile-ai/aiconfig/assets/151060367/cf29ee99-9ede-4c5e-99a7-a7f7816adfe1
  • Loading branch information
Rossdan Craig [email protected] committed Jan 14, 2024
1 parent 3269e8a commit fbfddb6
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 72 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -638,15 +638,9 @@ export default function EditorContainer({
});
} else if (event.type === "aiconfig_chunk") {
dispatch({
type: "CONSOLIDATE_AICONFIG",
action: {
type: "STREAM_AICONFIG_CHUNK",
id: promptId,
cancellationToken,
// Keep the prompt running state until the end of streaming
isRunning: true,
},
type: "STREAM_AICONFIG_CHUNK",
config: event.data,
cancellationToken,
});
} else if (event.type === "stop_streaming") {
// Pass this event at the end of streaming to signal
Expand Down
86 changes: 22 additions & 64 deletions python/src/aiconfig/editor/client/src/components/aiconfigReducer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ export type MutateAIConfigAction =
export type ConsolidateAIConfigSubAction =
| AddPromptAction
| RunPromptAction
| StreamAIConfigChunkAction
| UpdatePromptInputAction;

export type ConsolidateAIConfigAction = {
Expand Down Expand Up @@ -82,9 +81,8 @@ export type SetNameAction = {

export type StreamAIConfigChunkAction = {
type: "STREAM_AICONFIG_CHUNK";
id: string;
config: AIConfig;
cancellationToken?: string;
isRunning?: boolean;
};

export type StreamOutputChunkAction = {
Expand Down Expand Up @@ -201,57 +199,16 @@ function reduceConsolidateAIConfig(
consolidatePrompt
);
}
// Next PR: Split "RUN_PROMPT" into two actions:
// 1) "RUN_PROMPT_START"
// 2) "RUN_PROMPT_SUCCESS"
// 3) (Already exists) "RUN_PROMPT_ERROR"
case "RUN_PROMPT": {
// Note: If we are calling "RUN_PROMPT" directly as a dispatched event
// type, we automatically set the state there to `isRunning` for that
// prompt. That logic does not happen here, it happens in
// `aiconfigReducer`.
// If we are calling "RUN_PROMPT" indirectly via the action of a
// "CONSOLIDATE_AICONFIG" dispatch, we end up here. We need to check
// if we actually want to set the prompt state to `isRunning`
const isRunning = action.isRunning ?? false;
const stateWithUpdatedRunningPromptId = {
...state,
_ui: {
...state._ui,
runningPromptId: isRunning ? action.id : undefined,
},
};
return reduceReplacePrompt(
stateWithUpdatedRunningPromptId,
action.id,
(prompt) => {
const responsePrompt = responseConfig.prompts.find(
(resPrompt) => resPrompt.name === prompt.name
);

const outputs = responsePrompt?.outputs ?? prompt.outputs;

return {
...prompt,
_ui: {
...prompt._ui,
isRunning,
},
outputs,
};
}
);
}
case "STREAM_AICONFIG_CHUNK": {
// Note: If we are calling "RUN_PROMPT" directly as a dispatched event
// type, we automatically set the state there to `isRunning` for that
// prompt. That logic does not happen here, it happens in
// `aiconfigReducer`.
// If we are calling "RUN_PROMPT" indirectly via the action of a
// "CONSOLIDATE_AICONFIG" dispatch, we end up here. We need to check
// if we actually want to set the prompt state to `isRunning`
const isRunning = action.isRunning ?? false;
const stateWithUpdatedRunningPromptId = {
...state,
_ui: {
...state._ui,
runningPromptId: isRunning ? action.id : undefined,
runningPromptId: undefined,
},
};
return reduceReplacePrompt(
Expand All @@ -268,7 +225,7 @@ function reduceConsolidateAIConfig(
...prompt,
_ui: {
...prompt._ui,
isRunning,
isRunning: false,
},
outputs,
};
Expand Down Expand Up @@ -393,21 +350,22 @@ export default function aiconfigReducer(
};
}
case "STREAM_AICONFIG_CHUNK": {
const runningState = {
...dirtyState,
_ui: {
...dirtyState._ui,
runningPromptId: action.id,
},
const replaceOutput = (statePrompt: ClientPrompt) => {
const responsePrompt = action.config.prompts.find(
(resPrompt) => resPrompt.name === statePrompt.name
);
return {
// Note: Don't need to set `isRunning` or `cancellationToken`
// because we already call RUN_PROMPT earlier in `onRunPrompt`
...statePrompt,
outputs: responsePrompt?.outputs,
} as ClientPrompt;
};
return reduceReplacePrompt(runningState, action.id, (prompt) => ({
...prompt,
_ui: {
...prompt._ui,
cancellationToken: action.cancellationToken,
isRunning: true,
},
}));
return reduceReplacePrompt(
dirtyState,
dirtyState._ui.runningPromptId as string,
replaceOutput
);
}
case "STREAM_OUTPUT_CHUNK": {
return reduceReplacePrompt(dirtyState, action.id, (prompt) => ({
Expand Down

0 comments on commit fbfddb6

Please sign in to comment.