Skip to content

Commit

Permalink
Added UI logic for displaying if a model is downloaded in the setting…
Browse files Browse the repository at this point in the history
… model dropdown
  • Loading branch information
npmSteven committed Feb 10, 2024
1 parent 766f2c8 commit c1516ad
Show file tree
Hide file tree
Showing 2 changed files with 59 additions and 4 deletions.
32 changes: 28 additions & 4 deletions src/components/SettingsModal.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { useCallback, useEffect, useState } from "react";
import { Dispatch, SetStateAction, useCallback, useEffect, useState } from "react";
import { useAtom } from "jotai";
import {
Modal,
Expand All @@ -14,12 +14,13 @@ import {
Tooltip,
} from "@mantine/core";
import {
ModelOption,
whisperModelSizes,
whisperModelsBase,
whisperModelsQuantized,
} from "../whisper-utils";
import { currentModel, currentSettings, fetchTimestamp } from "../state/main";
import { loadOrGetModel } from "../utils/model-data";
import { getDownloadedModels, loadOrGetModel } from "../utils/model-data";
import { WhisperModelName } from "../types";
import { IconQuestionMark } from "@tabler/icons-react";

Expand All @@ -45,6 +46,8 @@ export function SettingsModal({
const whisperModelUpdated = useCallback((_newModel: Uint8Array) => {
setNewModel(_newModel);
}, []);
const [whisperModelsBaseState, setWhisperModelsBaseState] = useState(whisperModelsBase);
const [whisperModelsQuantizedState, setWhisperModelsQuantizedState] = useState(whisperModelsQuantized);

useEffect(() => {
setNewModel(model);
Expand All @@ -66,6 +69,27 @@ export function SettingsModal({
loadingProgress,
} = useModelData(selectedModel, whisperModelUpdated);

useEffect(() => {
const updateModels = async (models: ModelOption[], setStateCallback: Dispatch<SetStateAction<ModelOption[]>>) => {
const downloadedModels = await getDownloadedModels();
const updatedModels = models.map((item) => {
const isDownloaded = downloadedModels.includes(item.value);
return {
...item,
label: isDownloaded ? `${item.label} Downloaded` : item.label
};
});
setStateCallback(updatedModels);
};

(async () => {
await Promise.all([
updateModels(whisperModelsBase, setWhisperModelsBaseState),
updateModels(whisperModelsQuantized, setWhisperModelsQuantizedState),
])
})();
}, [whisperModelLoaded]);

return (
<Modal
opened={opened}
Expand Down Expand Up @@ -130,11 +154,11 @@ export function SettingsModal({
{ label: "None", value: "" },
{
group: "Raw",
items: whisperModelsBase,
items: whisperModelsBaseState,
},
{
group: "Quantized",
items: whisperModelsQuantized,
items: whisperModelsQuantizedState,
},
]}
onChange={(e) => {
Expand Down
31 changes: 31 additions & 0 deletions src/utils/model-data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,37 @@ const dbName = "whisperModels";
const modelBaseUrl =
"https://link.storjshare.io/s/jueavj4qtolpgszkbp5awref22da/models";

export function getDownloadedModels(): Promise<string[]> {
return new Promise((resolve, reject) => {
if (!navigator.storage || !navigator.storage.estimate) {
console.log("loadRemote: navigator.storage.estimate() is not supported");
}

const openRequest = indexedDB.open(dbName, dbVersion);

openRequest.onsuccess = function () {
const db = openRequest.result;
const tx = db.transaction(["models"], "readonly");
const objectStore = tx.objectStore("models");
const localFilesRequest = objectStore.getAllKeys();

localFilesRequest.onsuccess = function () {
resolve((localFilesRequest.result as string[]) || []);
};

localFilesRequest.onerror = function () {
console.error("Failed to fetch models");
reject(new Error("Failed to fetch models"));
};
};

openRequest.onerror = function () {
console.error("Failed to open request to indexedDB");
reject(new Error("Failed to open request to indexedDB"));
};
});
}

// TODO: this method seems to leak memory when changing models
export function loadOrGetModel(
selectedModel: keyof typeof whisperModelSizes | "" | undefined,
Expand Down

0 comments on commit c1516ad

Please sign in to comment.