diff --git a/src/commands/config.ts b/src/commands/config.ts index c45e2f2f..0b508a44 100644 --- a/src/commands/config.ts +++ b/src/commands/config.ts @@ -76,6 +76,16 @@ export const MODEL_LIST = { 'gemini-1.0-pro', 'gemini-pro-vision', 'text-embedding-004' + ], + + groq: [ + 'llama3-70b-8192', // Meta Llama 3 70B (default one, no daily token limit and 14 400 reqs/day) + 'llama3-8b-8192', // Meta Llama 3 8B + 'llama-guard-3-8b', // Llama Guard 3 8B + 'llama-3.1-8b-instant', // Llama 3.1 8B (Preview) + 'llama-3.1-70b-versatile', // Llama 3.1 70B (Preview) + 'gemma-7b-it', // Gemma 7B + 'gemma2-9b-it' // Gemma 2 9B ] }; @@ -87,6 +97,8 @@ const getDefaultModel = (provider: string | undefined): string => { return MODEL_LIST.anthropic[0]; case 'gemini': return MODEL_LIST.gemini[0]; + case 'groq': + return MODEL_LIST.groq[0]; default: return MODEL_LIST.openai[0]; } @@ -241,7 +253,7 @@ export const configValidators = { validateConfig( CONFIG_KEYS.OCO_AI_PROVIDER, - ['openai', 'anthropic', 'gemini', 'azure', 'test', 'flowise'].includes( + ['openai', 'anthropic', 'gemini', 'azure', 'test', 'flowise', 'groq'].includes( value ) || value.startsWith('ollama'), `${value} is not supported yet, use 'ollama', 'anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)` @@ -288,7 +300,8 @@ export enum OCO_AI_PROVIDER_ENUM { GEMINI = 'gemini', AZURE = 'azure', TEST = 'test', - FLOWISE = 'flowise' + FLOWISE = 'flowise', + GROQ = 'groq', } export type ConfigType = { @@ -388,7 +401,7 @@ const getEnvConfig = (envPath: string) => { OCO_EMOJI: parseConfigVarValue(process.env.OCO_EMOJI), OCO_LANGUAGE: process.env.OCO_LANGUAGE, OCO_MESSAGE_TEMPLATE_PLACEHOLDER: - process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER, + process.env.OCO_MESSAGE_TEMPLATE_PLACEHOLDER, OCO_PROMPT_MODULE: process.env.OCO_PROMPT_MODULE as OCO_PROMPT_MODULE_ENUM, OCO_ONE_LINE_COMMIT: parseConfigVarValue(process.env.OCO_ONE_LINE_COMMIT), OCO_TEST_MOCK_TYPE: process.env.OCO_TEST_MOCK_TYPE, @@ -445,9 +458,9 @@ interface GetConfigOptions { } export const getConfig = ({ - envPath = defaultEnvPath, - globalPath = defaultConfigPath -}: GetConfigOptions = {}): ConfigType => { + envPath = defaultEnvPath, + globalPath = defaultConfigPath + }: GetConfigOptions = {}): ConfigType => { const envConfig = getEnvConfig(envPath); const globalConfig = getGlobalConfig(globalPath); diff --git a/src/engine/groq.ts b/src/engine/groq.ts new file mode 100644 index 00000000..baa6410c --- /dev/null +++ b/src/engine/groq.ts @@ -0,0 +1,10 @@ +import { OpenAiConfig, OpenAiEngine } from './openAi'; + +interface GroqConfig extends OpenAiConfig {} + +export class GroqEngine extends OpenAiEngine { + constructor(config: GroqConfig) { + config.baseURL = 'https://api.groq.com/openai/v1'; + super(config); + } +} \ No newline at end of file diff --git a/src/engine/openAi.ts b/src/engine/openAi.ts index 2f231eed..ea5d9e9a 100644 --- a/src/engine/openAi.ts +++ b/src/engine/openAi.ts @@ -4,7 +4,7 @@ import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitD import { tokenCount } from '../utils/tokenCount'; import { AiEngine, AiEngineConfig } from './Engine'; -interface OpenAiConfig extends AiEngineConfig {} +export interface OpenAiConfig extends AiEngineConfig {} export class OpenAiEngine implements AiEngine { config: OpenAiConfig; @@ -12,7 +12,12 @@ export class OpenAiEngine implements AiEngine { constructor(config: OpenAiConfig) { this.config = config; - this.client = new OpenAI({ apiKey: config.apiKey }); + + if (!config.baseURL) { + this.client = new OpenAI({ apiKey: config.apiKey }); + } else { + this.client = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL }); + } } public generateCommitMessage = async ( diff --git a/src/utils/engine.ts b/src/utils/engine.ts index f3b3ae0a..5930c2ff 100644 --- a/src/utils/engine.ts +++ b/src/utils/engine.ts @@ -7,6 +7,7 @@ import { GeminiEngine } from '../engine/gemini'; import { OllamaEngine } from '../engine/ollama'; import { OpenAiEngine } from '../engine/openAi'; import { TestAi, TestMockType } from '../engine/testAi'; +import { GroqEngine } from '../engine/groq'; export function getEngine(): AiEngine { const config = getConfig(); @@ -39,6 +40,9 @@ export function getEngine(): AiEngine { case OCO_AI_PROVIDER_ENUM.FLOWISE: return new FlowiseEngine(DEFAULT_CONFIG); + case OCO_AI_PROVIDER_ENUM.GROQ: + return new GroqEngine(DEFAULT_CONFIG); + default: return new OpenAiEngine(DEFAULT_CONFIG); }