Skip to content

Commit

Permalink
Add Ollama native API to support keep alive parameters (#748)
Browse files Browse the repository at this point in the history
* Add Chinese translation.

* Optimize style.

* Add Ollama native API to support keep alive parameters.

* Optimized popup page style.

* fix: Fixed data type for Ollama keep_alive parameter forever
  • Loading branch information
lzskyline authored Jul 30, 2024
1 parent 9ad6698 commit 4881700
Show file tree
Hide file tree
Showing 13 changed files with 241 additions and 5 deletions.
11 changes: 10 additions & 1 deletion src/_locales/zh-hans/main.json
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
"ChatGPT (GPT-4-8k)": "ChatGPT (GPT-4-8k)",
"ChatGPT (GPT-4-32k)": "ChatGPT (GPT-4-32k)",
"GPT-3.5": "GPT-3.5",
"Ollama API": "Ollama API",
"Custom Model": "自定义模型",
"Balanced": "平衡",
"Creative": "有创造力",
Expand Down Expand Up @@ -142,5 +143,13 @@
"Icon": "图标",
"Prompt Template": "提示模板",
"Explain this: {{selection}}": "解释这个: {{selection}}",
"New": "新建"
"New": "新建",
"DisplayMode": "显示方式",
"Display in sidebar": "在侧边栏显示",
"Display in floating toolbar": "在浮动工具栏显示",
"Temperature": "温度",
"keep-alive Time": "保活时间",
"5m": "5分钟",
"30m": "半小时",
"Forever": "永久"
}
10 changes: 10 additions & 0 deletions src/background/index.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
generateAnswersWithGptCompletionApi,
} from '../services/apis/openai-api'
import { generateAnswersWithCustomApi } from '../services/apis/custom-api.mjs'
import { generateAnswersWithOllamaApi } from '../services/apis/ollama-api.mjs'
import { generateAnswersWithAzureOpenaiApi } from '../services/apis/azure-openai-api.mjs'
import { generateAnswersWithClaudeApi } from '../services/apis/claude-api.mjs'
import { generateAnswersWithChatGLMApi } from '../services/apis/chatglm-api.mjs'
Expand All @@ -25,6 +26,7 @@ import {
claudeWebModelKeys,
moonshotWebModelKeys,
customApiModelKeys,
ollamaApiModelKeys,
defaultConfig,
getUserConfig,
githubThirdPartyApiModelKeys,
Expand Down Expand Up @@ -124,6 +126,14 @@ async function executeApi(session, port, config) {
config.customApiKey,
config.customModelName,
)
} else if (ollamaApiModelKeys.includes(session.modelName)) {
await generateAnswersWithOllamaApi(
port,
session.question,
session,
config.ollamaApiKey,
config.ollamaModelName,
)
} else if (azureOpenAiApiModelKeys.includes(session.modelName)) {
await generateAnswersWithAzureOpenaiApi(port, session.question, session)
} else if (claudeApiModelKeys.includes(session.modelName)) {
Expand Down
2 changes: 1 addition & 1 deletion src/components/ConversationItem/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import ReadButton from '../ReadButton'
import PropTypes from 'prop-types'
import MarkdownRender from '../MarkdownRender/markdown.jsx'
import { useTranslation } from 'react-i18next'
import { isUsingCustomModel } from '../../config/index.mjs'
import { isUsingCustomModel, isUsingOllamaModel } from '../../config/index.mjs'
import { useConfig } from '../../hooks/use-config.mjs'

function AnswerTitle({ descName, modelName }) {
Expand Down
11 changes: 11 additions & 0 deletions src/config/index.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ export const chatgptApiModelKeys = [
'chatgptApi4_128k_0125_preview',
]
export const customApiModelKeys = ['customModel']
export const ollamaApiModelKeys = ['ollamaModel']
export const azureOpenAiApiModelKeys = ['azureOpenAi']
export const claudeApiModelKeys = [
'claude12Api',
Expand Down Expand Up @@ -163,6 +164,7 @@ export const Models = {
gptApiDavinci: { value: 'text-davinci-003', desc: 'GPT-3.5' },

customModel: { value: '', desc: 'Custom Model' },
ollamaModel: { value: '', desc: 'Ollama API' },
azureOpenAi: { value: '', desc: 'ChatGPT (Azure)' },
waylaidwandererApi: { value: '', desc: 'Waylaidwanderer API (Github)' },

Expand Down Expand Up @@ -249,6 +251,10 @@ export const defaultConfig = {
customModelName: 'gpt-3.5-turbo',
githubThirdPartyUrl: 'http://127.0.0.1:3000/conversation',

ollamaEndpoint: 'http://127.0.0.1:11434',
ollamaModelName: 'gemma2',
keepAliveTime: '5m',

// advanced

maxResponseTokenLength: 1000,
Expand Down Expand Up @@ -281,6 +287,7 @@ export const defaultConfig = {
'moonshotWebFree',
'chatglmTurbo',
'customModel',
'ollamaModel',
'azureOpenAi',
],
activeSelectionTools: ['translate', 'summary', 'polish', 'code', 'ask'],
Expand Down Expand Up @@ -381,6 +388,10 @@ export function isUsingCustomModel(configOrSession) {
return customApiModelKeys.includes(configOrSession.modelName)
}

export function isUsingOllamaModel(configOrSession) {
return ollamaApiModelKeys.includes(configOrSession.modelName)
}

export function isUsingChatGLMApi(configOrSession) {
return chatglmApiModelKeys.includes(configOrSession.modelName)
}
Expand Down
2 changes: 1 addition & 1 deletion src/content-script/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ async function mountComponent(siteConfig, userConfig) {
})

const position = {
x: window.innerWidth - 300 - (Math.floor((20 / 100) * window.innerWidth)),
x: window.innerWidth - 300 - Math.floor((20 / 100) * window.innerWidth),
y: window.innerHeight / 2 - 200,
}
const toolbarContainer = createElementAtPosition(position.x, position.y)
Expand Down
2 changes: 1 addition & 1 deletion src/manifest.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "ChatGPTBox",
"description": "Integrating ChatGPT into your browser deeply, everything you need is here",
"version": "2.5.6",
"version": "2.5.7",
"manifest_version": 3,
"icons": {
"16": "logo.png",
Expand Down
2 changes: 1 addition & 1 deletion src/manifest.v2.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "ChatGPTBox",
"description": "Integrating ChatGPT into your browser deeply, everything you need is here",
"version": "2.5.6",
"version": "2.5.7",
"manifest_version": 2,
"icons": {
"16": "logo.png",
Expand Down
45 changes: 45 additions & 0 deletions src/popup/sections/AdvancedPart.jsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import '../styles.scss'
import { useTranslation } from 'react-i18next'
import { parseFloatWithClamp, parseIntWithClamp } from '../../utils/index.mjs'
import { isUsingOllamaModel } from '../../config/index.mjs'
import PropTypes from 'prop-types'
import { Tab, TabList, TabPanel, Tabs } from 'react-tabs'
import Browser from 'webextension-polyfill'
Expand Down Expand Up @@ -56,6 +58,49 @@ function ApiParams({ config, updateConfig }) {
}}
/>
</label>
{isUsingOllamaModel(config) && (
<label>
{`${t('keep-alive Time')}: `}
<div className="label-group">
<label>
<input
type="radio"
name="keepAliveTime"
value="5m"
checked={config.keepAliveTime === '5m'}
onChange={(e) => {
updateConfig({ keepAliveTime: e.target.value })
}}
/>
{t('5m')}
</label>
<label>
<input
type="radio"
name="keepAliveTime"
value="30m"
checked={config.keepAliveTime === '30m'}
onChange={(e) => {
updateConfig({ keepAliveTime: e.target.value })
}}
/>
{t('30m')}
</label>
<label>
<input
type="radio"
name="keepAliveTime"
value="-1"
checked={config.keepAliveTime === '-1'}
onChange={(e) => {
updateConfig({ keepAliveTime: e.target.value })
}}
/>
{t('Forever')}
</label>
</div>
</label>
)}
</>
)
}
Expand Down
36 changes: 36 additions & 0 deletions src/popup/sections/GeneralPart.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
isUsingClaudeApi,
isUsingCustomModel,
isUsingCustomNameOnlyModel,
isUsingOllamaModel,
isUsingGithubThirdPartyApi,
isUsingMultiModeModel,
ModelMode,
Expand Down Expand Up @@ -163,6 +164,7 @@ export function GeneralPart({ config, updateConfig }) {
isUsingOpenAiApiKey(config) ||
isUsingMultiModeModel(config) ||
isUsingCustomModel(config) ||
isUsingOllamaModel(config) ||
isUsingAzureOpenAi(config) ||
isUsingClaudeApi(config) ||
isUsingCustomNameOnlyModel(config) ||
Expand Down Expand Up @@ -271,6 +273,18 @@ export function GeneralPart({ config, updateConfig }) {
}}
/>
)}
{isUsingOllamaModel(config) && (
<input
style="width: 50%;"
type="text"
value={config.ollamaModelName}
placeholder={t('Model Name')}
onChange={(e) => {
const ollamaModelName = e.target.value
updateConfig({ ollamaModelName: ollamaModelName })
}}
/>
)}
{isUsingAzureOpenAi(config) && (
<input
type="password"
Expand Down Expand Up @@ -354,6 +368,28 @@ export function GeneralPart({ config, updateConfig }) {
}}
/>
)}
{isUsingOllamaModel(config) && (
<input
type="text"
value={config.ollamaEndpoint}
placeholder={t('Ollama Endpoint')}
onChange={(e) => {
const value = e.target.value
updateConfig({ ollamaEndpoint: value })
}}
/>
)}
{isUsingOllamaModel(config) && (
<input
type="password"
value={config.ollamaApiKey}
placeholder={t('API Key')}
onChange={(e) => {
const apiKey = e.target.value
updateConfig({ ollamaApiKey: apiKey })
}}
/>
)}
{isUsingAzureOpenAi(config) && (
<input
type="password"
Expand Down
15 changes: 15 additions & 0 deletions src/popup/styles.scss
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@
--active-color: #eaecf0;
}

#app {
max-width: 600px;
margin: 0 auto;
}

.container-page-mode {
display: flex;
flex-direction: column;
Expand Down Expand Up @@ -58,6 +63,7 @@
}

.footer {
max-width: 580px;
width: 90%;
position: fixed;
bottom: 10px;
Expand Down Expand Up @@ -88,3 +94,12 @@
background: var(--active-color);
}
}

.label-group {
display: flex;
align-items: center;
}

.label-group label {
margin-right: 10px;
}
82 changes: 82 additions & 0 deletions src/services/apis/ollama-api.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
// ollama api version

// There is a lot of duplicated code here, but it is very easy to refactor.
// The current state is mainly convenient for making targeted changes at any time,
// and it has not yet had a negative impact on maintenance.
// If necessary, I will refactor.

import { getUserConfig } from '../../config/index.mjs'
import { fetchSSE } from '../../utils/fetch-ollama.mjs'
import { getConversationPairs } from '../../utils/get-conversation-pairs.mjs'
import { isEmpty } from 'lodash-es'
import { pushRecord, setAbortController } from './shared.mjs'

/**
* @param {Browser.Runtime.Port} port
* @param {string} question
* @param {Session} session
* @param {string} apiKey
* @param {string} modelName
*/
export async function generateAnswersWithOllamaApi(port, question, session, apiKey, modelName) {
const { controller, messageListener, disconnectListener } = setAbortController(port)

const config = await getUserConfig()
const prompt = getConversationPairs(
session.conversationRecords.slice(-config.maxConversationContextLength),
false,
)
// prompt.unshift({ role: 'system', content: await getOllamaApiPromptBase() })
prompt.push({ role: 'user', content: question })
const apiUrl = config.ollamaEndpoint

let answer = ''
let finished = false
const finish = () => {
finished = true
pushRecord(session, question, answer)
console.debug('conversation history', { content: session.conversationRecords })
port.postMessage({ answer: null, done: true, session: session })
}
await fetchSSE(`${apiUrl}/api/chat`, {
method: 'POST',
signal: controller.signal,
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
},
body: JSON.stringify({
messages: prompt,
model: modelName,
stream: true,
keep_alive: config.keepAliveTime === '-1' ? -1 : config.keepAliveTime,
}),
onMessage(message) {
console.debug('sse message', message)
if (finished) return
let data = message
const delta = data.message?.content
if (delta) {
answer += delta
port.postMessage({ answer: answer, done: false, session: null })
}
if (data.done_reason) {
finish()
return
}
},
async onStart() {},
async onEnd() {
port.postMessage({ done: true })
port.onMessage.removeListener(messageListener)
port.onDisconnect.removeListener(disconnectListener)
},
async onError(resp) {
port.onMessage.removeListener(messageListener)
port.onDisconnect.removeListener(disconnectListener)
if (resp instanceof Error) throw resp
const error = await resp.json().catch(() => ({}))
throw new Error(!isEmpty(error) ? JSON.stringify(error) : `${resp.status} ${resp.statusText}`)
},
})
}
Loading

0 comments on commit 4881700

Please sign in to comment.