Skip to content

Commit

Permalink
4.7 (#50)
Browse files Browse the repository at this point in the history
* fix docker-compose download url (labring#994)

original code is a bad url with '404 NOT FOUND' return.
fix docker-compose download url, add 'v' before docker-compose version

* Update ai_settings.md (labring#1000)

* Update configuration.md

* Update configuration.md

* Fix history in classifyQuestion and extract modules (labring#1012)

* Fix history in classifyQuestion and extract modules

* Add chatValue2RuntimePrompt import and update text formatting

* flow controller to packages

* fix: rerank select

* modal ui

* perf: modal code path

* point not sufficient

* feat: http url support variable

* fix http key

* perf: prompt

* perf: ai setting modal

* simple edit ui

---------

Co-authored-by: entorick <[email protected]>
Co-authored-by: liujianglc <[email protected]>
Co-authored-by: Fengrui Liu <[email protected]>
  • Loading branch information
4 people committed Mar 20, 2024
1 parent 695db95 commit 0261bc2
Show file tree
Hide file tree
Showing 146 changed files with 1,914 additions and 2,150 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,9 @@ fastgpt.run 域名会弃用。

## 🏘️ 社区交流群

添加 wx 小助手加入
wx 扫一下加入

![](https://otnvvf-imgs.oss.laf.run/wx300.jpg)
![](https://oss.laf.run/htr4n1-images/fastgpt-qr-code.jpg)

<a href="#readme">
<img src="https://img.shields.io/badge/-返回顶部-7d09f1.svg" alt="#" align="right">
Expand Down
10 changes: 4 additions & 6 deletions README_en.md
Original file line number Diff line number Diff line change
Expand Up @@ -116,14 +116,12 @@ Project tech stack: NextJs + TS + ChakraUI + Mongo + Postgres (Vector plugin)
- [Configuring Multiple Models](https://doc.fastgpt.in/docs/installation/reference/models)
- [Version Updates & Upgrades](https://doc.fastgpt.in/docs/installation/upgrading)

<!-- ## :point_right: RoadMap
- [FastGPT RoadMap](https://kjqvjse66l.feishu.cn/docx/RVUxdqE2WolDYyxEKATcM0XXnte) -->

<!-- ## 🏘️ Community
## 🏘️ Community

| Community Group | Assistant |
| ------------------------------------------------- | ---------------------------------------------- |
| ![](https://otnvvf-imgs.oss.laf.run/wxqun300.jpg) | ![](https://otnvvf-imgs.oss.laf.run/wx300.jpg) | -->
| Community Group |
| ------------------------------------------------- |
| ![](https://oss.laf.run/htr4n1-images/fastgpt-qr-code.jpg) |

<a href="#readme">
<img src="https://img.shields.io/badge/-Back_to_Top-7d09f1.svg" alt="#" align="right">
Expand Down
2 changes: 1 addition & 1 deletion packages/global/common/error/code/team.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export enum TeamErrEnum {
const teamErr = [
{ statusText: TeamErrEnum.teamOverSize, message: 'error.team.overSize' },
{ statusText: TeamErrEnum.unAuthTeam, message: '无权操作该团队' },
{ statusText: TeamErrEnum.aiPointsNotEnough, message: 'AI积分已用完~' },
{ statusText: TeamErrEnum.aiPointsNotEnough, message: '' },
{ statusText: TeamErrEnum.datasetSizeNotEnough, message: '知识库容量不足,请先扩容~' },
{ statusText: TeamErrEnum.datasetAmountNotEnough, message: '知识库数量已达上限~' },
{ statusText: TeamErrEnum.appAmountNotEnough, message: '应用数量已达上限~' },
Expand Down
2 changes: 1 addition & 1 deletion packages/global/common/error/utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { replaceSensitiveText } from '../string/tools';

export const getErrText = (err: any, def = '') => {
const msg: string = typeof err === 'string' ? err : err?.message || def || '';
const msg: string = typeof err === 'string' ? err : err?.message ?? def;
msg && console.log('error =>', msg);
return replaceSensitiveText(msg);
};
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { PromptTemplateItem } from '@fastgpt/global/core/ai/type.d';
import { PromptTemplateItem } from '../type.d';

export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,5 +58,3 @@ Human:"{{question}}"
类型ID=
`;

export const Prompt_QuestionGuide = `我不太清楚问你什么问题,请帮我生成 3 个问题,引导我继续提问。问题的长度应小于20个字符,按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;
15 changes: 1 addition & 14 deletions packages/global/core/app/type.d.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import type { AppTTSConfigType, ModuleItemType, VariableItemType } from '../module/type.d';
import { AppTypeEnum } from './constants';
import { PermissionTypeEnum } from '../../support/permission/constant';
import type { AIChatModuleProps, DatasetModuleProps } from '../module/node/type.d';
import type { DatasetModuleProps } from '../module/node/type.d';
import { VariableInputEnum } from '../module/constants';
import { SelectedDatasetType } from '../module/api';
import { DatasetSearchModeEnum } from '../dataset/constants';
Expand Down Expand Up @@ -36,19 +36,6 @@ export type AppDetailType = AppSchema & {
canWrite: boolean;
};

// export type AppSimpleEditFormType = {
// aiSettings: AIChatModuleProps;
// dataset: DatasetModuleProps & {
// searchEmptyText: string;
// };
// userGuide: {
// welcomeText: string;
// variables: VariableItemType[];
// questionGuide: boolean;
// tts: AppTTSConfigType;
// };
// };
// Since useform cannot infer enumeration types, all enumeration keys can only be undone manually
export type AppSimpleEditFormType = {
// templateId: string;
aiSettings: {
Expand Down
5 changes: 4 additions & 1 deletion packages/global/core/app/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,10 @@ export const appModules2Form = ({ modules }: { modules: ModuleItemType[] }) => {
};

modules.forEach((module) => {
if (module.flowType === FlowNodeTypeEnum.chatNode) {
if (
module.flowType === FlowNodeTypeEnum.chatNode ||
module.flowType === FlowNodeTypeEnum.tools
) {
defaultAppForm.aiSettings.model = findInputValueByKey(
module.inputs,
ModuleInputKeyEnum.aiModel
Expand Down
1 change: 1 addition & 0 deletions packages/global/core/module/node/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export enum FlowNodeInputTypeEnum {

// ai model select
selectLLMModel = 'selectLLMModel',
settingLLMModel = 'settingLLMModel',

// dataset special input
selectDataset = 'selectDataset',
Expand Down
6 changes: 6 additions & 0 deletions packages/global/core/module/node/type.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,12 @@ export type EditNodeFieldType = {
};

/* ------------- item type --------------- */
export type SettingAIDataType = {
model: string;
temperature: number;
maxToken: number;
isResponseAnswerText?: boolean;
};
/* ai chat modules props */
export type AIChatModuleProps = {
[ModuleInputKeyEnum.aiModel]: string;
Expand Down
11 changes: 10 additions & 1 deletion packages/global/core/module/template/input.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ export const Input_Template_DynamicInput: FlowNodeInputItemType = {
hideInApp: true
};

export const Input_Template_AiModel: FlowNodeInputItemType = {
export const Input_Template_SelectAIModel: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
label: 'core.module.input.label.aiModel',
Expand All @@ -68,6 +68,15 @@ export const Input_Template_AiModel: FlowNodeInputItemType = {
showTargetInApp: false,
showTargetInPlugin: false
};
export const Input_Template_SettingAiModel: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.settingLLMModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: false,
showTargetInPlugin: false
};

export const Input_Template_System_Prompt: FlowNodeInputItemType = {
key: ModuleInputKeyEnum.aiSystemPrompt,
Expand Down
12 changes: 2 additions & 10 deletions packages/global/core/module/template/system/aiChat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import {
FlowNodeTemplateTypeEnum
} from '../../constants';
import {
Input_Template_AiModel,
Input_Template_SettingAiModel,
Input_Template_Dataset_Quote,
Input_Template_History,
Input_Template_Switch,
Expand All @@ -32,7 +32,7 @@ export const AiChatModule: FlowNodeTemplateType = {
isTool: true,
inputs: [
Input_Template_Switch,
Input_Template_AiModel,
Input_Template_SettingAiModel,
// --- settings modal
{
key: ModuleInputKeyEnum.aiChatTemperature,
Expand Down Expand Up @@ -83,14 +83,6 @@ export const AiChatModule: FlowNodeTemplateType = {
showTargetInApp: false,
showTargetInPlugin: false
},
{
key: ModuleInputKeyEnum.aiChatSettingModal,
type: FlowNodeInputTypeEnum.aiSettings,
label: '',
valueType: ModuleIOValueTypeEnum.any,
showTargetInApp: false,
showTargetInPlugin: false
},
// settings modal ---
{
...Input_Template_System_Prompt,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import {
FlowNodeTemplateTypeEnum
} from '../../constants';
import {
Input_Template_AiModel,
Input_Template_SelectAIModel,
Input_Template_History,
Input_Template_Switch,
Input_Template_UserChatInput
Expand All @@ -30,7 +30,7 @@ export const ClassifyQuestionModule: FlowNodeTemplateType = {
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
...Input_Template_SelectAIModel,
llmModelType: LLMModelTypeEnum.classify
},
{
Expand Down
8 changes: 6 additions & 2 deletions packages/global/core/module/template/system/contextExtract.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,11 @@ import {
ModuleOutputKeyEnum,
FlowNodeTemplateTypeEnum
} from '../../constants';
import { Input_Template_AiModel, Input_Template_History, Input_Template_Switch } from '../input';
import {
Input_Template_SelectAIModel,
Input_Template_History,
Input_Template_Switch
} from '../input';
import { LLMModelTypeEnum } from '../../../ai/constants';

export const ContextExtractModule: FlowNodeTemplateType = {
Expand All @@ -25,7 +29,7 @@ export const ContextExtractModule: FlowNodeTemplateType = {
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
...Input_Template_SelectAIModel,
llmModelType: LLMModelTypeEnum.extractFields
},
{
Expand Down
4 changes: 2 additions & 2 deletions packages/global/core/module/template/system/queryExtension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import {
Input_Template_History,
Input_Template_Switch,
Input_Template_UserChatInput,
Input_Template_AiModel
Input_Template_SelectAIModel
} from '../input';
import { Output_Template_UserChatInput } from '../output';
import { LLMModelTypeEnum } from '../../../ai/constants';
Expand All @@ -31,7 +31,7 @@ export const AiQueryExtension: FlowNodeTemplateType = {
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
...Input_Template_SelectAIModel,
llmModelType: LLMModelTypeEnum.queryExtension
},
{
Expand Down
37 changes: 33 additions & 4 deletions packages/global/core/module/template/system/tools.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
import { FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from '../../node/constant';
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowNodeTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleOutputKeyEnum,
FlowNodeTemplateTypeEnum
FlowNodeTemplateTypeEnum,
ModuleInputKeyEnum
} from '../../constants';
import {
Input_Template_AiModel,
Input_Template_SettingAiModel,
Input_Template_History,
Input_Template_Switch,
Input_Template_System_Prompt,
Expand All @@ -27,9 +32,33 @@ export const ToolModule: FlowNodeTemplateType = {
inputs: [
Input_Template_Switch,
{
...Input_Template_AiModel,
...Input_Template_SettingAiModel,
llmModelType: LLMModelTypeEnum.toolCall
},
{
key: ModuleInputKeyEnum.aiChatTemperature,
type: FlowNodeInputTypeEnum.hidden, // Set in the pop-up window
label: '',
value: 0,
valueType: ModuleIOValueTypeEnum.number,
min: 0,
max: 10,
step: 1,
showTargetInApp: false,
showTargetInPlugin: false
},
{
key: ModuleInputKeyEnum.aiChatMaxToken,
type: FlowNodeInputTypeEnum.hidden, // Set in the pop-up window
label: '',
value: 2000,
valueType: ModuleIOValueTypeEnum.number,
min: 100,
max: 4000,
step: 50,
showTargetInApp: false,
showTargetInPlugin: false
},
{
...Input_Template_System_Prompt,
label: 'core.ai.Prompt',
Expand Down
3 changes: 2 additions & 1 deletion packages/global/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
"js-tiktoken": "^1.0.7",
"openai": "4.28.0",
"nanoid": "^4.0.1",
"timezones-list": "^3.0.2"
"timezones-list": "^3.0.2",
"next": "13.5.2"
},
"devDependencies": {
"@types/node": "^20.8.5"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { POST } from '@fastgpt/service/common/api/plusRequest';
import { POST } from './plusRequest';

export const postTextCensor = (data: { text: string }) =>
POST<{ code?: number; message: string }>('/common/censor/text_baidu', data)
Expand Down
Loading

0 comments on commit 0261bc2

Please sign in to comment.