Skip to content

Commit

Permalink
Fix tauri build
Browse files Browse the repository at this point in the history
  • Loading branch information
JinayJain committed Apr 3, 2024
1 parent 41a87b7 commit 83790be
Show file tree
Hide file tree
Showing 6 changed files with 557 additions and 353 deletions.
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-icons": "^4.8.0",
"react-markdown": "^9.0.1",
"react-markdown": "^8.0.x",
"react-syntax-highlighter": "^15.5.0",
"rehype-katex": "^7.0.0",
"rehype-katex": "^6.0.x",
"remark-breaks": "^4.0.0",
"remark-math": "^6.0.0",
"tauri-plugin-store-api": "https://github.com/tauri-apps/tauri-plugin-store"
Expand Down
3 changes: 2 additions & 1 deletion src/components/Search.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,11 @@ function Search({
}, []);

useEffect(() => {
console.log("model changed", model);
const updateModel = async () => {
if (model) {
await store.set(STORE_KEY.MODEL, model);
await store.save();
console.log("model changed", model);
}
};

Expand Down
76 changes: 31 additions & 45 deletions src/pages/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -34,35 +34,21 @@ function App() {

const handleGenerate = useCallback(
async (prompt: string, temperature = 1.0) => {
addPrompt(prompt);
if (isLoading || !prompt) {
return;
}

const NUM_WORDS = 50;
setIsLoading(true);
setError(null);

// if (prompt) {
// const chatHistory: ChatMessage[] = [
// ...chatLog,
// { role: "user", text: prompt },
// ];
// addUser(prompt);
// setError(null);
// setIsLoading(true);
// try {
// const response = await chatComplete({
// chat: chatHistory,
// onChunk(chunk) {},
// apiParams: {
// temperature,
// },
// });
// addAssistant(response);
// } catch (e) {
// if (e instanceof Error) {
// setError(e);
// }
// console.log(e);
// }
// setIsLoading(false);
// }
try {
await addPrompt(prompt, temperature);
} catch (e) {
if (e instanceof Error) {
setError(e);
}
}
setIsLoading(false);
},
[addPrompt]
);
Expand All @@ -85,24 +71,24 @@ function App() {
/>

<Box overflowY="auto" maxH="100%">
{/* {error && (
<Box
as={motion.div}
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
exit={{ opacity: 0 }}
mb={2}
rounded="md"
overflow="hidden"
background="blackAlpha.800"
>
{error.message === "Unauthorized" ? (
<UnauthorizedErrorBox />
) : (
<ErrorBox error={error} />
)}
</Box>
)} */}
{error && (
<Box
as={motion.div}
initial={{ opacity: 0 }}
animate={{ opacity: 1 }}
exit={{ opacity: 0 }}
mb={2}
rounded="md"
overflow="hidden"
background="blackAlpha.800"
>
{error.message === "Unauthorized" ? (
<UnauthorizedErrorBox />
) : (
<ErrorBox error={error} />
)}
</Box>
)}

<Messages messages={messages} />

Expand Down
2 changes: 1 addition & 1 deletion src/util/ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ export function useChat() {
const [messages, setMessages] = useState<Message[]>([]);

const addPrompt = useCallback(
async (prompt: string) => {
async (prompt: string, temperature: number = 1.0) => {
const model = (await store.get(STORE_KEY.MODEL)) as keyof typeof MODELS;
const responseId = generateId();

Expand Down
15 changes: 12 additions & 3 deletions src/util/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,16 @@ async function processLine(line: string) {
type SendRequestFn = (
chat: Message[],
controller: AbortController,
model: string
model: string,
temperature?: number
) => Promise<Response>;

const sendOpenAiApiRequest: SendRequestFn = async (chat, controller, model) => {
const sendOpenAiApiRequest: SendRequestFn = async (
chat,
controller,
model,
temperature
) => {
const apiKey = await store.get(STORE_KEY.OPENAI_API_KEY);
const max_tokens =
Number(await store.get(STORE_KEY.MAX_TOKENS)) || DEFAULT_MAX_TOKENS;
Expand Down Expand Up @@ -69,6 +75,7 @@ const sendOpenAiApiRequest: SendRequestFn = async (chat, controller, model) => {
],
stream: true,
max_tokens,
temperature,
}),
});
};
Expand Down Expand Up @@ -168,10 +175,12 @@ async function chatComplete({
chat,
onChunk,
model,
temperature = 1.0,
}: {
chat: Message[];
onChunk: (message: string) => void;
model: string;
temperature?: number;
}) {
const controller = new AbortController();

Expand All @@ -182,7 +191,7 @@ async function chatComplete({
controller.abort();
}, timeoutSec * 1000);

const res = await sendOpenAiApiRequest(chat, controller, model);
const res = await sendOpenAiApiRequest(chat, controller, model, temperature);

if (!res.ok) {
if (res.status === 401) {
Expand Down
Loading

0 comments on commit 83790be

Please sign in to comment.