-
Notifications
You must be signed in to change notification settings - Fork 121
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
abab5b9
commit 5b109ad
Showing
20 changed files
with
525 additions
and
30 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* AI agent for question requests | ||
* | ||
* The AI agent used to handle queries. | ||
*/ | ||
export interface AiAgentAsk { | ||
/** | ||
* The type of AI agent used to handle queries. | ||
* Example: ai_agent_ask | ||
*/ | ||
type: 'ai_agent_ask'; | ||
long_text?: schemas.AiAgentLongTextTool; | ||
basic_text?: schemas.AiAgentBasicTextToolAsk; | ||
long_text_multi?: schemas.AiAgentLongTextTool; | ||
basic_text_multi?: schemas.AiAgentBasicTextToolAsk; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* AI agent basic text generation tool | ||
* | ||
* AI agent basic tool used to generate text. | ||
*/ | ||
export interface AiAgentBasicGenTool extends schemas.AiAgentLongTextTool { | ||
/** | ||
* How the content should be included in a request to the LLM. | ||
* When passing this parameter, you must include `{content}`. | ||
* Example: ---{content}--- | ||
*/ | ||
content_template?: string; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* AI agent basic text tool | ||
* | ||
* AI agent tool used to handle basic text. | ||
*/ | ||
export interface AiAgentBasicTextToolAsk { | ||
/** | ||
* The model used for the AI Agent for basic text. | ||
* Example: openai__gpt_3_5_turbo | ||
*/ | ||
model?: string; | ||
/** | ||
* System messages try to help the LLM "understand" its role and what it is supposed to do. | ||
* Example: You are a helpful travel assistant specialized in budget travel | ||
*/ | ||
system_message?: string; | ||
/** | ||
* The prompt template contains contextual information of the request and the user prompt. | ||
* | ||
* When passing `prompt_template` parameters, you **must include** inputs for `{current_date}`, `{user_question}`, and `{content}`. | ||
* Example: It is `{current_date}`, and I have $8000 and want to spend a week in the Azores. What should I see? | ||
*/ | ||
prompt_template?: string; | ||
/** | ||
* The number of tokens for completion. | ||
* Example: 8400 | ||
*/ | ||
num_tokens_for_completion?: number; | ||
/** | ||
* The parameters for the LLM endpoint specific to OpenAI / Google models. | ||
*/ | ||
llm_endpoint_params?: schemas.AiLlmEndpointParamsOpenAi | schemas.AiLlmEndpointParamsGoogle; | ||
} |
36 changes: 36 additions & 0 deletions
36
src/schemas/ai-agent-basic-text-tool-text-gen.generated.ts
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* AI agent basic text tool | ||
* | ||
* AI agent tool used to handle basic text. | ||
*/ | ||
export interface AiAgentBasicTextToolTextGen { | ||
/** | ||
* The model to be used for the AI Agent for basic text. | ||
* Example: openai__gpt_3_5_turbo | ||
*/ | ||
model?: string; | ||
/** | ||
* System messages try to help the LLM "understand" its role and what it is supposed to do. | ||
* This parameter requires using `{current_date}`. | ||
* Example: You are a helpful travel assistant specialized in budget travel | ||
*/ | ||
system_message?: string; | ||
/** | ||
* The prompt template contains contextual information of the request and the user prompt. | ||
* | ||
* When using the `prompt_template` parameter, you **must include** input for `{user_question}`. | ||
* Inputs for `{current_date}` and`{content}` are optional, depending on the use. | ||
* Example: It is `{current_date}`, and I have $8000 and want to spend a week in the Azores. What should I see? | ||
*/ | ||
prompt_template?: string; | ||
/** | ||
* The number of tokens for completion. | ||
* Example: 8400 | ||
*/ | ||
num_tokens_for_completion?: number; | ||
/** | ||
* The parameters for the LLM endpoint specific to OpenAI / Google models. | ||
*/ | ||
llm_endpoint_params?: schemas.AiLlmEndpointParamsOpenAi | schemas.AiLlmEndpointParamsGoogle; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,10 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* AI agent long text tool | ||
* | ||
* AI agent tool used to to handle longer text. | ||
*/ | ||
export interface AiAgentLongTextTool | ||
extends schemas.AiAgentBasicTextToolTextGen { | ||
embeddings?: object; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* AI agent for text generation requests | ||
* | ||
* The AI agent used for generating text. | ||
*/ | ||
export interface AiAgentTextGen { | ||
/** | ||
* The type of AI agent used for generating text. | ||
* Example: ai_agent_text_gen | ||
*/ | ||
type: 'ai_agent_text_gen'; | ||
basic_gen?: schemas.AiAgentBasicGenTool; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,28 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* The citation of the LLM's answer reference | ||
* | ||
* The citation of the LLM's answer reference. | ||
*/ | ||
export interface AiCitation { | ||
/** | ||
* The specific content from where the answer was referenced. | ||
* Example: Public APIs are key drivers of innovation and growth. | ||
*/ | ||
content?: string; | ||
/** | ||
* The id of the item. | ||
* Example: 123 | ||
*/ | ||
id?: string; | ||
/** | ||
* The type of the item. | ||
* Example: file | ||
*/ | ||
type?: 'file'; | ||
/** | ||
* The name of the item. | ||
* Example: The importance of public APIs.pdf | ||
*/ | ||
name?: string; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* Dialogue history | ||
* | ||
* A context object that can hold prior prompts and answers. | ||
*/ | ||
export interface AiDialogueHistory { | ||
/** | ||
* The prompt previously provided by the client and answered by the LLM. | ||
* Example: Make my email about public APIs sound more professional. | ||
*/ | ||
prompt?: string; | ||
/** | ||
* The answer previously provided by the LLM. | ||
* Example: Here is the first draft of your professional email about public APIs. | ||
*/ | ||
answer?: string; | ||
/** | ||
* The ISO date formatted timestamp of when the previous answer to the prompt was created. | ||
* Example: 2012-12-12T10:53:43-08:00 | ||
*/ | ||
created_at?: string; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
import * as schemas from '.'; | ||
/** | ||
* AI LLM endpoint params Google | ||
* | ||
* AI LLM endpoint params Google object | ||
*/ | ||
export interface AiLlmEndpointParamsGoogle { | ||
/** | ||
* The type of the AI LLM endpoint params object for Google. | ||
* This parameter is **required**. | ||
* Example: google_params | ||
*/ | ||
type: 'google_params'; | ||
/** | ||
* The temperature is used for sampling during response generation, which occurs when `top-P` and `top-K` are applied. | ||
* Temperature controls the degree of randomness in token selection. | ||
*/ | ||
temperature?: number; | ||
/** | ||
* `Top-P` changes how the model selects tokens for output. Tokens are selected from the most (see `top-K`) to least probable | ||
* until the sum of their probabilities equals the `top-P` value. | ||
* Example: 1 | ||
*/ | ||
top_p?: number; | ||
/** | ||
* `Top-K` changes how the model selects tokens for output. A `top-K` of 1 means the next selected token is | ||
* the most probable among all tokens in the model's vocabulary (also called greedy decoding), | ||
* while a `top-K` of 3 means that the next token is selected from among the three most probable tokens by using temperature. | ||
* Example: 1 | ||
*/ | ||
top_k?: number; | ||
} |
Oops, something went wrong.