Skip to content

Commit

Permalink
Improvements and bug fixes: added support for o1-mini, o3-mini, o1, o…
Browse files Browse the repository at this point in the history
…1-preview
  • Loading branch information
Paul-Borisov committed Feb 8, 2025
1 parent e3ffbcd commit d53df17
Show file tree
Hide file tree
Showing 18 changed files with 164 additions and 46 deletions.
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,14 @@ The integrations available in this release include:
- Parallel function calling, image generation and recognition are seamlessly supported for new models.
- Updated documentation with these new models.

- Version 1.3.2 released on Feb 08, 2025.
- I added support for two newer language models, o1-mini and o3-mini that became available in Azure OpenAI.
- These Azure OpenAI models did not support streaming options; I added the exemption logic for them.
- I added support for regular OpenAI language models: o1-mini, o1-preview, o1, o1-2024-12-17, o3-mini.
- The full-scale models o1 and o1-2024-12-17 did not support streaming options; I added the exemption logic for them.
- Added informational messages to the progress shimmer. It displays the header if streaming options were enabled but unsupported by the model.
- Bug fix: incorrect voice out on the first entry instead of selected one.

### Full-Scale Setup

![Data access diagram](docs/data-access-diagram.png "Data access diagram")
Expand Down
2 changes: 1 addition & 1 deletion spfx-latest/config/package-solution.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"name": "azure-openai-chat",
"title": "Azure OpenAI Chat Web Part",
"id": "64e358ba-f88c-4fee-9a8a-6db06299d90a",
"version": "1.3.1.0",
"version": "1.3.2.0",
"includeClientSideAssets": true,
"skipFeatureDeployment": true,
"isDomainIsolated": false,
Expand Down
16 changes: 8 additions & 8 deletions spfx-latest/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion spfx-latest/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "azure-openai-chat",
"version": "1.3.1",
"version": "1.3.2",
"private": true,
"engines": {
"node": ">=16.13.0 <17.0.0 || >=18.17.1 <19.0.0"
Expand Down
4 changes: 3 additions & 1 deletion spfx-latest/src/components/AzureOpenAiChatLoader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ export default class AzureOpenAiChatLoader extends BaseClientSideWebPart<IAzureO
}

protected get dataVersion(): Version {
return Version.parse('1.0');
return Version.parse('1.3.2');
}

protected getPropertyPaneConfiguration(): IPropertyPaneConfiguration {
Expand Down Expand Up @@ -274,6 +274,8 @@ export default class AzureOpenAiChatLoader extends BaseClientSideWebPart<IAzureO
{ key: 'gpt-4-1106-preview', text: `${strings.TextGpt4Turbo} (${strings.TextPreview})` },
{ key: 'gpt-4o-mini', text: strings.TextGpt4oMini },
{ key: 'gpt-4o', text: strings.TextGpt4o },
{ key: 'o1-mini', text: strings.TextO1Mini },
{ key: 'o3-mini', text: strings.TextO3Mini },
],
properties: this.properties,
}),
Expand Down
22 changes: 17 additions & 5 deletions spfx-latest/src/components/ContentPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,15 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
setModel
);

const panelContentPane = elements.getPanelContentPane(refContentPane, chatHistory, isCustomPanelOpen, rows, isProgress);
const panelContentPane = elements.getPanelContentPane(
refContentPane,
chatHistory,
isCustomPanelOpen,
rows,
isProgress,
props,
model
);

const promptContainer = elements.getPromptContainer(
refPromptArea,
Expand Down Expand Up @@ -407,6 +415,9 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
const maxHeight = 100; // px
const padding = 15; // px

// Stript trailing new line chars (added on copy-paste)
e.target.value = e.target.value.replace(/\n+$/, '');

if (!e.target.value) {
e.target.style.height = `${minHeight}px`;
setIsSubmitDisabled(true);
Expand Down Expand Up @@ -569,7 +580,7 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
};

if (props.apiService.isConfigured()) {
if (!props.streaming) {
if (!ChatHelper.isStreamingSupported(payload.model, props)) {
props.apiService.callQueryText(payload).then((response) => {
unstable_batchedUpdates(() => {
handleResponse(response);
Expand Down Expand Up @@ -714,8 +725,9 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
? `.${styles.customPanel} div[id='${chatMessageId}']`
: `div[id='${chatMessageId}']`;

const inputText = HtmlHelper.stripHtml(r.content);
const getAudio =
isAi && props.voiceOutput && ChatHelper.supportsTextToSpeech(props)
isAi && props.voiceOutput && ChatHelper.supportsTextToSpeech(props) //&& inputText.length <= 4096 // tts input supports max 4096 chars
? (text: string) => new SpeechService(props.apiService).callTextToSpeech(text)
: undefined;

Expand Down Expand Up @@ -804,8 +816,8 @@ const ContentPanel: FunctionComponent<IContentPanelProps> = ({ props }) => {
<>
{isAi && props.voiceOutput ? (
<VoiceOutput
querySelector={chatMessageIdSelector}
text={HtmlHelper.stripHtml(r.content)}
//querySelector={chatMessageIdSelector}
text={inputText}
tooltip={strings.TextVoiceOutput}
getAudio={getAudio}
/>
Expand Down
44 changes: 34 additions & 10 deletions spfx-latest/src/components/ContentPanelElements.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { IChatHistory } from 'shared/model/IChat';
import SessionStorageService from 'shared/services/SessionStorageService';
import { IChatProps } from './Chat';
import styles from './Chat.module.scss';
import { IAzureOpenAiChatProps } from './IAzureOpenAiChatProps';
import * as Icons from './Icons';
import Prompts from './Prompts';
import UploadFiles from './UploadFiles';
Expand Down Expand Up @@ -86,7 +87,7 @@ export default class ContentPanelElements {
const isVisionSupported = props.vision;
const isPdfSupported = true;
// Upload button should be visible only if Enable integrations is turned on in web part settings.
return props.functions && (isVisionSupported || isPdfSupported) ? (
return isVisionSupported || isPdfSupported ? (
<>
{getSimpleDialog(strings.TextUpload, strings.TextUploadFiles, showUploadDialog, setShowUploadDialog, [
<UploadFiles
Expand Down Expand Up @@ -127,20 +128,31 @@ export default class ContentPanelElements {
return star ? Icons.getStarIcon() : Icons.getLighteningIcon();
}

private getModelText(languageModel: string, defaultModelText?: string) {
const lcLanguageModel = languageModel.toLocaleLowerCase();
const modelTexts = {
'4o': strings.TextGpt4o,
'4o-mini': strings.TextGpt4oMini,
'o1-mini': strings.TextO1Mini,
'o1-preview': strings.TextO1Preview,
o1: strings.TextO1,
'o3-mini': strings.TextO3Mini,
};
for (const key of Object.keys(modelTexts)) {
if (lcLanguageModel.endsWith(key)) return modelTexts[key];
}
return defaultModelText ?? languageModel;
}

private getLanguageModelText(languageModel: string, isGpt3: boolean, isGpt4: boolean, isGpt4Turbo: boolean): string {
if (isGpt3) {
return strings.TextGpt35;
} else if (isGpt4Turbo) {
return strings.TextGpt4Turbo;
} else if (isGpt4) {
if (languageModel.toLocaleLowerCase().endsWith('4o')) {
return strings.TextGpt4o;
} else if (languageModel.toLocaleLowerCase().endsWith('4o-mini')) {
return strings.TextGpt4oMini;
}
return strings.TextGpt4;
return this.getModelText(languageModel, strings.TextGpt4);
} else {
return languageModel;
return this.getModelText(languageModel);
}
}

Expand Down Expand Up @@ -199,11 +211,23 @@ export default class ContentPanelElements {
chatHistory: IChatHistory[],
isCustomPanelOpen: boolean,
rows: JSX.Element[],
isProgress: boolean
isProgress: boolean,
contentProps: IAzureOpenAiChatProps,
model: string
): JSX.Element {
const props = this.props;

const noUpperLanguageSelector = !props.promptAtBottom && !(props.languageModels?.length > 1);
let shimmerInformationalHeader: React.ReactNode;
if (contentProps.streaming && !ChatHelper.isStreamingSupported(model, contentProps)) {
shimmerInformationalHeader = (
<div>
<strong>
{model}: {strings.TextStreamingUnsupported}
</strong>
</div>
);
}
return (
<div
ref={refContentPane}
Expand All @@ -218,7 +242,7 @@ export default class ContentPanelElements {
>
<div className={styles.responseRowsContainer}>
{rows}
{isProgress && <CustomShimmer />}
{isProgress && <CustomShimmer header={shimmerInformationalHeader} />}
</div>
</div>
);
Expand Down
39 changes: 32 additions & 7 deletions spfx-latest/src/helpers/ChatHelper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,29 +34,33 @@ export default class ChatHelper {
const largeContentDeduction = 1500;

let returnValue = Math.floor((4 * 1024 - responseTokens) * averageCharsPerToken);
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model)) {
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model) || /o1-mini|o1-preview/i.test(model)) {
returnValue = Math.floor((128 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
} else if (/32k/i.test(model)) {
returnValue = Math.floor((32 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
} else if (/16k|-1106/i.test(model)) {
returnValue = Math.floor((16 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
} else if (/8k/i.test(model) || /gpt-4/i.test(model)) {
returnValue = Math.floor((8 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
} else if (/o\d/i.test(model)) {
returnValue = Math.floor((200 * 1024 - responseTokens) * averageCharsPerToken) - largeContentDeduction;
}
return returnValue - 200; // 200 extra chars reserved for service needs (redundancy).
}

public static maxRequestLength(model: string, responseTokens: number, chatHistoryLength: number): number {
// maxRequestLength = max allowed number of characters in the prompt.
let maxCharacters = 4000; // GPT-35-turbo, 4k
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model)) {
maxCharacters = 125000; // ~ (128 * 1024 * 3.6) / 3.75 long questions - answers.
let maxCharacters = 4_000; // GPT-35-turbo, 4k
if (/4o/i.test(model) || /4-(1106|turbo|vision)/i.test(model) || /o1-mini|o1-preview/i.test(model)) {
maxCharacters = 125_000; // ~ (128 * 1024 * 3.6) / 3.75 long questions - answers.
} else if (/32k/i.test(model)) {
maxCharacters = 30000; // ~ (32 * 1024 * 3.6) / 3.75 long questions - answers.
maxCharacters = 30_000; // ~ (32 * 1024 * 3.6) / 3.75 long questions - answers.
} else if (/16k|-1106/i.test(model)) {
maxCharacters = 15000; // ~ (16 * 1024 * 3.6) / 3.75 long questions - answers.
maxCharacters = 15_000; // ~ (16 * 1024 * 3.6) / 3.75 long questions - answers.
} else if (/8k/i.test(model) || /gpt-4/i.test(model)) {
maxCharacters = 7500; // ~ (8 * 1024 * 3.6) / 3.75 long questions - answers.
maxCharacters = 7_500; // ~ (8 * 1024 * 3.6) / 3.75 long questions - answers.
} else if (/o\d/i.test(model)) {
maxCharacters = 195_000; // ~ (200 * 1024 * 3.6) / 3.75 long questions - answers.
}
const maxLength = this.maxContentLength(model, responseTokens);
const allowedLength = maxLength - chatHistoryLength;
Expand All @@ -74,6 +78,12 @@ export default class ChatHelper {
returnValue = defaultResponseTokens;
} else if (/8k/i.test(model) || /gpt-4/i.test(model)) {
returnValue = defaultResponseTokens;
} else if (/o\d-mini/i.test(model)) {
returnValue = 65_536;
} else if (/o1-preview/i.test(model)) {
returnValue = 32_768;
} else if (/o\d/i.test(model)) {
returnValue = 100_000;
}
return returnValue;
}
Expand Down Expand Up @@ -137,6 +147,21 @@ export default class ChatHelper {
}
}

public static isStreamingSupported = (model: string, props: IAzureOpenAiChatProps) => {
// As of February 2025, the model o1-mini of Azure OpenAI did not support streaming and function calling.
// - At the same time, native OpenAI models o1-mini, o3-mini, o1-preview supported streaming, but did not support function calling.
// - The full-scale native OpenAI models o1 and o1-2024-12-17 did not support streaming and function calling.
// On attempts to use streaming outputs, they have thrown errors. Note that this behaviour might change later.
if (/^o\d$|o\d-\d{4}-\d{2}-\d{2}/i.test(model?.toLocaleLowerCase())) return false;

return (
props.streaming &&
(!/o\d/.test(model) ||
props.apiService?.isNative(props.endpointBaseUrlForOpenAi) ||
props.apiService?.isOpenAiNativeUrl(props.endpointBaseUrlForOpenAi))
);
};

public static formatDate(date: string | Date, locale: string): string {
if (typeof date === 'string') date = new Date(date);
return new Date().getFullYear() !== date.getFullYear()
Expand Down
5 changes: 5 additions & 0 deletions spfx-latest/src/loc/en-us.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,10 @@ define([], function () {
TextGpt4o: 'GPT-4o',
TextGpt4oMini: 'GPT-4o Mini',
TextGpt4Turbo: 'GPT-4 Turbo',
TextO1Mini: 'O1 Mini',
TextO1Preview: 'O1 Preview',
TextO1: 'O1',
TextO3Mini: 'O3 Mini',
TextHideMySharedChats: 'Hide my chats',
TextInvalidListUrl: 'Invalid list URL. List with the same name already exists at the site',
TextLanguage: 'Language',
Expand Down Expand Up @@ -102,6 +106,7 @@ define([], function () {
TextSharedChats: 'Shared chats',
TextShareWith: 'Specific people to share with (if you want to limit access to this chat, max 15 persons)',
TextStop: 'Stop',
TextStreamingUnsupported: 'streaming option is unsupported for this model',
TextSubmit: 'Submit',
TextSummarizePdf: 'Summarise PDF content',
TextUndeterminedError: 'Unexpected error',
Expand Down
5 changes: 5 additions & 0 deletions spfx-latest/src/loc/fi-fi.js
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,10 @@ define([], function () {
TextGpt4o: 'GPT-4o',
TextGpt4oMini: 'GPT-4o Mini',
TextGpt4Turbo: 'GPT-4 Turbo',
TextO1Mini: 'O1 Mini',
TextO1Preview: 'O1 Preview',
TextO1: 'O1',
TextO3Mini: 'O3 Mini',
TextHideMySharedChats: 'Piilota chattini',
TextInvalidListUrl: 'Virheellinen luettelon URL-osoite. Luettelo samalla nimella on jo olemassa sivustolla',
TextLanguage: 'Kieli',
Expand Down Expand Up @@ -101,6 +105,7 @@ define([], function () {
TextSharedChats: 'Jaetut chatit',
TextShareWith: 'Tietyt ihmiset, joiden kanssa jakaa (jos haluat rajoittaa pääsyä tähän keskusteluun, enintään 15 henkilöä)',
TextStop: 'Lopeta',
TextStreamingUnsupported: 'suoratoisto-ominaisuutta ei tueta tässä mallissa',
TextSubmit: 'Lähetä',
TextSummarizePdf: 'Tee yhteenveto PDF-sisällöstä',
TextUndeterminedError: 'Odottamaton virhe',
Expand Down
5 changes: 5 additions & 0 deletions spfx-latest/src/loc/mystrings.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,10 @@ declare interface IAzureOpenAiChatWebPartStrings {
TextGpt4o: string;
TextGpt4oMini: string;
TextGpt4Turbo: string;
TextO1Mini: string;
TextO1Preview: string;
TextO1: string;
TextO3Mini: string;
TextHideMySharedChats: string;
TextInvalidListUrl: string;
TextLanguage: string;
Expand Down Expand Up @@ -100,6 +104,7 @@ declare interface IAzureOpenAiChatWebPartStrings {
TextSharedChats: string;
TextShareWith: string;
TextStop: string;
TextStreamingUnsupported: string;
TextSubmit: string;
TextSummarizePdf: string;
TextUndeterminedError: string;
Expand Down
5 changes: 5 additions & 0 deletions spfx-latest/src/loc/nb-no.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,10 @@ define([], function () {
TextGpt4o: 'GPT-4o',
TextGpt4oMini: 'GPT-4o Mini',
TextGpt4Turbo: 'GPT-4 Turbo',
TextO1Mini: 'O1 Mini',
TextO1Preview: 'O1 Preview',
TextO1: 'O1',
TextO3Mini: 'O3 Mini',
TextHideMySharedChats: 'Skjul chattene mine',
TextInvalidListUrl: 'Ugyldig liste-URL. Liste med samme navn finnes allerede på nettstedet',
TextLanguage: 'Språk',
Expand Down Expand Up @@ -102,6 +106,7 @@ define([], function () {
TextSharedChats: 'Delte chatter',
TextShareWith: 'Spesifikke personer å dele med (hvis du vil begrense tilgangen til denne chatten, maks 15 personer)',
TextStop: 'Stopp',
TextStreamingUnsupported: 'strømmingsalternativet støttes ikke for denne modellen',
TextSubmit: 'Send',
TextSummarizePdf: 'Oppsummer PDF-innhold',
TextUndeterminedError: 'Uventet feil',
Expand Down
Loading

0 comments on commit d53df17

Please sign in to comment.