Skip to content

Commit

Permalink
Merge pull request #144 from olasunkanmi-SE/ai-agents
Browse files Browse the repository at this point in the history
Improve type safety and reusability in BaseEmitter
olasunkanmi-SE authored Jan 28, 2025

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
2 parents cafb87c + 0084bef commit 56ee9d5
Showing 12 changed files with 321 additions and 84 deletions.
2 changes: 1 addition & 1 deletion src/agents/orchestrator.ts
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@ export class Orchestrator implements vscode.Disposable {

constructor(private readonly aiAgent: BaseAiAgent) {
this.disposables.push(
this.aiAgent.onStatus(this.handleStatus.bind(this)),
this.aiAgent.onStatusChange(this.handleStatus.bind(this)),
this.aiAgent.onError(this.handleError.bind(this)),
);
}
82 changes: 61 additions & 21 deletions src/commands/event-generator.ts
Original file line number Diff line number Diff line change
@@ -3,7 +3,11 @@ import Anthropic from "@anthropic-ai/sdk";
import { GenerativeModel, GoogleGenerativeAI } from "@google/generative-ai";
import Groq from "groq-sdk";
import * as vscode from "vscode";
import { APP_CONFIG, COMMON, generativeAiModels } from "../application/constant";
import {
APP_CONFIG,
COMMON,
generativeAiModels,
} from "../application/constant";
import { AnthropicWebViewProvider } from "../providers/anthropic";
import { GeminiWebViewProvider } from "../providers/gemini";
import { GroqWebViewProvider } from "../providers/groq";
@@ -38,7 +42,7 @@ export abstract class EventGenerator implements IEventGenerator {
constructor(
private readonly action: string,
_context: vscode.ExtensionContext,
errorMessage?: string
errorMessage?: string,
) {
this.context = _context;
this.error = errorMessage;
@@ -68,21 +72,23 @@ export abstract class EventGenerator implements IEventGenerator {
return getConfigValue(configKey);
}

protected createModel(): { generativeAi: string; model: any; modelName: string } | undefined {
protected createModel():
| { generativeAi: string; model: any; modelName: string }
| undefined {
try {
let model;
let modelName = "";
if (!this.generativeAi) {
vscodeErrorMessage(
"Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name"
"Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name",
);
}
if (this.generativeAi === generativeAiModels.GROQ) {
const apiKey = this.groqApiKey;
modelName = this.groqModel;
if (!apiKey || !modelName) {
vscodeErrorMessage(
"Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name"
"Configuration not found. Go to settings, search for Your coding buddy. Fill up the model and model name",
);
}
model = this.createGroqModel(apiKey);
@@ -108,7 +114,9 @@ export abstract class EventGenerator implements IEventGenerator {
return { generativeAi: this.generativeAi, model, modelName };
} catch (error) {
console.error("Error creating model:", error);
vscode.window.showErrorMessage("An error occurred while creating the model. Please try again.");
vscode.window.showErrorMessage(
"An error occurred while creating the model. Please try again.",
);
}
}

@@ -145,7 +153,9 @@ export abstract class EventGenerator implements IEventGenerator {
return new Groq({ apiKey });
}

protected async generateModelResponse(text: string): Promise<string | Anthropic.Messages.Message | undefined> {
protected async generateModelResponse(
text: string,
): Promise<string | Anthropic.Messages.Message | undefined> {
try {
const activeModel = this.createModel();
if (!activeModel) {
@@ -182,7 +192,7 @@ export abstract class EventGenerator implements IEventGenerator {

if (!response) {
throw new Error(
"Could not generate response. Check your settings, ensure the API keys and Model Name is added properly."
"Could not generate response. Check your settings, ensure the API keys and Model Name is added properly.",
);
}
if (this.action.includes("chart")) {
@@ -191,7 +201,9 @@ export abstract class EventGenerator implements IEventGenerator {
return response;
} catch (error) {
console.error("Error generating response:", error);
vscode.window.showErrorMessage("An error occurred while generating the response. Please try again.");
vscode.window.showErrorMessage(
"An error occurred while generating the response. Please try again.",
);
}
}

@@ -202,12 +214,19 @@ export abstract class EventGenerator implements IEventGenerator {
return inputString;
}

async generateGeminiResponse(model: any, text: string): Promise<string | undefined> {
async generateGeminiResponse(
model: any,
text: string,
): Promise<string | undefined> {
const result = await model.generateContent(text);
return result ? await result.response.text() : undefined;
}

private async anthropicResponse(model: Anthropic, generativeAiModel: string, userPrompt: string) {
private async anthropicResponse(
model: Anthropic,
generativeAiModel: string,
userPrompt: string,
) {
try {
const response = await model.messages.create({
model: generativeAiModel,
@@ -218,14 +237,22 @@ export abstract class EventGenerator implements IEventGenerator {
return response.content[0].text;
} catch (error) {
console.error("Error generating response:", error);
vscode.window.showErrorMessage("An error occurred while generating the response. Please try again.");
vscode.window.showErrorMessage(
"An error occurred while generating the response. Please try again.",
);
return;
}
}

private async groqResponse(model: Groq, prompt: string, generativeAiModel: string): Promise<string | undefined> {
private async groqResponse(
model: Groq,
prompt: string,
generativeAiModel: string,
): Promise<string | undefined> {
try {
const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : [];
const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY)
? Memory.get(COMMON.GROQ_CHAT_HISTORY)
: [];
const params = {
messages: [
...chatHistory,
@@ -237,11 +264,14 @@ export abstract class EventGenerator implements IEventGenerator {
model: generativeAiModel,
};

const completion: Groq.Chat.ChatCompletion = await model.chat.completions.create(params);
const completion: Groq.Chat.ChatCompletion =
await model.chat.completions.create(params);
return completion.choices[0]?.message?.content ?? undefined;
} catch (error) {
console.error("Error generating response:", error);
vscode.window.showErrorMessage("An error occurred while generating the response. Please try again.");
vscode.window.showErrorMessage(
"An error occurred while generating the response. Please try again.",
);
return;
}
}
@@ -250,7 +280,9 @@ export abstract class EventGenerator implements IEventGenerator {

abstract createPrompt(text?: string): any;

async generateResponse(message?: string): Promise<string | Anthropic.Messages.Message | undefined> {
async generateResponse(
message?: string,
): Promise<string | Anthropic.Messages.Message | undefined> {
this.showInformationMessage();
let prompt;
const selectedCode = this.getSelectedWindowArea();
@@ -262,7 +294,9 @@ export abstract class EventGenerator implements IEventGenerator {
if (message && selectedCode) {
prompt = await this.createPrompt(`${message} \n ${selectedCode}`);
} else {
message ? (prompt = await this.createPrompt(message)) : (prompt = await this.createPrompt(selectedCode));
message
? (prompt = await this.createPrompt(message))
: (prompt = await this.createPrompt(selectedCode));
}

if (!prompt) {
@@ -345,19 +379,25 @@ export abstract class EventGenerator implements IEventGenerator {
placeHolder: "Enter instructions for CodeBuddy",
ignoreFocusOut: true,
validateInput: (text) => {
return text === "" ? "Enter instructions for CodeBuddy or press Escape to close chat box" : null;
return text === ""
? "Enter instructions for CodeBuddy or press Escape to close chat box"
: null;
},
});
return userPrompt;
} catch (error) {
vscode.window.showInformationMessage(`Error occured while getting user prompt`);
vscode.window.showInformationMessage(
`Error occured while getting user prompt`,
);
console.log(error);
}
}

async execute(message?: string): Promise<void> {
let prompt: string | undefined;
const response = (await this.generateResponse(prompt ? prompt : message)) as string;
const response = (await this.generateResponse(
prompt ? prompt : message,
)) as string;
if (!response) {
vscode.window.showErrorMessage("model not reponding, try again later");
return;
6 changes: 5 additions & 1 deletion src/emitter/agent-emitter.ts
Original file line number Diff line number Diff line change
@@ -8,7 +8,7 @@ import {
import * as vscode from "vscode";

export class AgentEventEmitter extends BaseEmitter<IAgentEventMap> {
onStatus: vscode.Event<IStatusEvent> = this.createEvent("onStatus");
onStatusChange: vscode.Event<IStatusEvent> = this.createEvent("onStatus");
onError: vscode.Event<IErrorEvent> = this.createEvent("onError");

public emitError(message: string, code: string) {
@@ -28,4 +28,8 @@ export class AgentEventEmitter extends BaseEmitter<IAgentEventMap> {
timestamp: Date.now(),
});
}

public dispose(): void {
super.dispose();
}
}
22 changes: 19 additions & 3 deletions src/emitter/emitter.ts
Original file line number Diff line number Diff line change
@@ -1,26 +1,39 @@
import * as vscode from "vscode";
import { Logger } from "../infrastructure/logger/logger";
export class BaseEmitter<EventMap> {
export class BaseEmitter<EventMap extends Record<string, any>> {
protected logger: Logger;
constructor() {
this.logger = new Logger("BaseEmitter");
}
private readonly emitters: Map<keyof EventMap, vscode.EventEmitter<any>> =
new Map();

/**
* Creates a new event for the given event name, reusing an existing emitter if one is already registered.
* @param name The name of the event to create.
* @returns The event that was created or retrieved.
*/
protected createEvent<K extends keyof EventMap>(
name: K,
): vscode.Event<EventMap[K]> {
try {
const emitter = new vscode.EventEmitter<EventMap[K]>();
this.emitters.set(name, emitter);
let emitter = this.emitters.get(name);
if (!emitter) {
emitter = new vscode.EventEmitter<EventMap[K]>();
this.emitters.set(name, emitter);
}
return emitter.event;
} catch (error) {
this.logger.error("Error generating embeddings", error);
throw new Error("Failed to generate embeddings");
}
}

/**
* Emits the given event with the provided data, if an emitter exists for the event name.
* @param name The name of the event to emit.
* @param data The data to emit with the event.
*/
protected emit<K extends keyof EventMap>(name: K, data: EventMap[K]): void {
try {
const emitter = this.emitters.get(name);
@@ -31,6 +44,9 @@ export class BaseEmitter<EventMap> {
}
}

/**
* Disposes of all stored event emitters, freeing up any system resources they were using.
*/
public dispose(): void {
this.emitters.forEach((emitter) => emitter.dispose());
}
114 changes: 92 additions & 22 deletions src/extension.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
import * as vscode from "vscode";
import { APP_CONFIG, generativeAiModels, OLA_ACTIONS, USER_MESSAGE } from "./application/constant";
import {
APP_CONFIG,
generativeAiModels,
OLA_ACTIONS,
USER_MESSAGE,
} from "./application/constant";
import { Comments } from "./commands/comment";
import { ExplainCode } from "./commands/explain";
import { FixError } from "./commands/fixError";
@@ -22,14 +27,28 @@ import { FileUploader } from "./services/file-uploader";
import { setUpGenerativeAiModel } from "./services/generative-ai-model-manager";
import { getConfigValue } from "./utils/utils";
import { Memory } from "./memory/base";
import { AgentEventEmitter } from "./emitter/agent-emitter";

const { geminiKey, geminiModel, groqApiKey, groqModel, anthropicApiKey, anthropicModel, grokApiKey, grokModel } =
APP_CONFIG;
const {
geminiKey,
geminiModel,
groqApiKey,
groqModel,
anthropicApiKey,
anthropicModel,
grokApiKey,
grokModel,
} = APP_CONFIG;

const connectDB = async () => {
await dbManager.connect("file:/Users/olasunkanmi/Documents/Github/codebuddy/patterns/dev.db");
await dbManager.connect(
"file:/Users/olasunkanmi/Documents/Github/codebuddy/patterns/dev.db",
);
};

let quickFixCodeAction: vscode.Disposable;
let agentEventEmmitter: AgentEventEmitter;

export async function activate(context: vscode.ExtensionContext) {
try {
Memory.getInstance();
@@ -65,19 +84,52 @@ export async function activate(context: vscode.ExtensionContext) {
generateCodeChart,
inlineChat,
} = OLA_ACTIONS;
const getComment = new Comments(`${USER_MESSAGE} generates the code comments...`, context);
const getInLineChat = new InLineChat(`${USER_MESSAGE} generates a response...`, context);
const generateOptimizeCode = new OptimizeCode(`${USER_MESSAGE} optimizes the code...`, context);
const generateRefactoredCode = new RefactorCode(`${USER_MESSAGE} refactors the code...`, context);
const explainCode = new ExplainCode(`${USER_MESSAGE} explains the code...`, context);
const generateReview = new ReviewCode(`${USER_MESSAGE} reviews the code...`, context);
const codeChartGenerator = new CodeChartGenerator(`${USER_MESSAGE} creates the code chart...`, context);
const getComment = new Comments(
`${USER_MESSAGE} generates the code comments...`,
context,
);
const getInLineChat = new InLineChat(
`${USER_MESSAGE} generates a response...`,
context,
);
const generateOptimizeCode = new OptimizeCode(
`${USER_MESSAGE} optimizes the code...`,
context,
);
const generateRefactoredCode = new RefactorCode(
`${USER_MESSAGE} refactors the code...`,
context,
);
const explainCode = new ExplainCode(
`${USER_MESSAGE} explains the code...`,
context,
);
const generateReview = new ReviewCode(
`${USER_MESSAGE} reviews the code...`,
context,
);
const codeChartGenerator = new CodeChartGenerator(
`${USER_MESSAGE} creates the code chart...`,
context,
);
const codePattern = fileUpload;
const knowledgeBase = new ReadFromKnowledgeBase(`${USER_MESSAGE} generate your code pattern...`, context);
const generateCommitMessage = new GenerateCommitMessage(`${USER_MESSAGE} generates a commit message...`, context);
const generateInterviewQuestions = new InterviewMe(`${USER_MESSAGE} generates interview questions...`, context);
const knowledgeBase = new ReadFromKnowledgeBase(
`${USER_MESSAGE} generate your code pattern...`,
context,
);
const generateCommitMessage = new GenerateCommitMessage(
`${USER_MESSAGE} generates a commit message...`,
context,
);
const generateInterviewQuestions = new InterviewMe(
`${USER_MESSAGE} generates interview questions...`,
context,
);

const generateUnitTests = new GenerateUnitTest(`${USER_MESSAGE} generates unit tests...`, context);
const generateUnitTests = new GenerateUnitTest(
`${USER_MESSAGE} generates unit tests...`,
context,
);

const actionMap = {
[comment]: () => getComment.execute(),
@@ -87,7 +139,11 @@ export async function activate(context: vscode.ExtensionContext) {
[interviewMe]: () => generateInterviewQuestions.execute(),
[generateUnitTest]: () => generateUnitTests.execute(),
[fix]: (errorMessage: string) =>
new FixError(`${USER_MESSAGE} finds a solution to the error...`, context, errorMessage).execute(errorMessage),
new FixError(
`${USER_MESSAGE} finds a solution to the error...`,
context,
errorMessage,
).execute(errorMessage),
[explain]: () => explainCode.execute(),
[pattern]: () => codePattern.uploadFileHandler(),
[knowledge]: () => knowledgeBase.execute(),
@@ -96,18 +152,20 @@ export async function activate(context: vscode.ExtensionContext) {
[inlineChat]: () => getInLineChat.execute(),
};

const subscriptions: vscode.Disposable[] = Object.entries(actionMap).map(([action, handler]) =>
vscode.commands.registerCommand(action, handler)
const subscriptions: vscode.Disposable[] = Object.entries(actionMap).map(
([action, handler]) => vscode.commands.registerCommand(action, handler),
);

const selectedGenerativeAiModel = getConfigValue("generativeAi.option");

const quickFix = new CodeActionsProvider();
const quickFixCodeAction: vscode.Disposable = vscode.languages.registerCodeActionsProvider(
quickFixCodeAction = vscode.languages.registerCodeActionsProvider(
{ scheme: "file", language: "*" },
quickFix
quickFix,
);

agentEventEmmitter = new AgentEventEmitter();

const modelConfigurations: {
[key: string]: {
key: string;
@@ -139,15 +197,27 @@ export async function activate(context: vscode.ExtensionContext) {
if (selectedGenerativeAiModel in modelConfigurations) {
const modelConfig = modelConfigurations[selectedGenerativeAiModel];
const { key, model, webviewProviderClass } = modelConfig;
setUpGenerativeAiModel(context, model, key, webviewProviderClass, subscriptions, quickFixCodeAction);
setUpGenerativeAiModel(
context,
model,
key,
webviewProviderClass,
subscriptions,
quickFixCodeAction,
agentEventEmmitter,
);
}
} catch (error) {
Memory.clear();
vscode.window.showErrorMessage("An Error occured while setting up generative AI model");
vscode.window.showErrorMessage(
"An Error occured while setting up generative AI model",
);
console.log(error);
}
}

export function deactivate(context: vscode.ExtensionContext) {
quickFixCodeAction.dispose();
agentEventEmmitter.dispose();
context.subscriptions.forEach((subscription) => subscription.dispose());
}
29 changes: 29 additions & 0 deletions src/infrastructure/storage/local-storage.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import * as vscode from "vscode";
import { Logger } from "../logger/logger";

export class LocalStorageManager {
private readonly localStorage: vscode.SecretStorage;
private readonly logger: Logger;
constructor(context: vscode.ExtensionContext) {
this.localStorage = context.secrets;
this.logger = new Logger("localStorageManager");
this.localStorage.onDidChange(this.handleOnChange.bind(this));
}

async add(key: string, value: string): Promise<void> {
await this.localStorage.store(key, value);
}

async get(key: string): Promise<string | undefined> {
return await this.localStorage.get(key);
}

async delete(key: string) {
await this.localStorage.delete(key);
}

async handleOnChange(event: vscode.SecretStorageChangeEvent) {
const value = await this.localStorage.get(event.key);
this.logger.info(`Key: ${event.key}, Value: ${value}`);
}
}
11 changes: 3 additions & 8 deletions src/memory/base.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
import { MEMORY_CACHE_OPTIONS } from "../application/constant";

interface ICacheEntry {
value: any;
expiry: number;
}

export class Memory {
private static bank: Map<string, ICacheEntry>;
private static bank: Map<string, any>;
private static instance: Memory;

constructor() {
@@ -20,7 +15,7 @@ export class Memory {
return Memory.instance;
}

static set(key: string, value: any): Map<string, ICacheEntry> {
static set(key: string, value: any): Map<string, any> {
const expiry = Date.now() + MEMORY_CACHE_OPTIONS.sessionTTL;
return Memory.bank.set(key, { value, expiry });
}
@@ -45,7 +40,7 @@ export class Memory {
return Array.from(Memory.bank.keys());
}

static values(): ICacheEntry[] {
static values(): any[] {
return Array.from(Memory.bank.values());
}

47 changes: 37 additions & 10 deletions src/providers/anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,16 @@
import * as vscode from "vscode";
import { BaseWebViewProvider } from "./base";
import { COMMON, generativeAiModels, GROQ_CONFIG } from "../application/constant";
import {
COMMON,
generativeAiModels,
GROQ_CONFIG,
} from "../application/constant";
import Anthropic from "@anthropic-ai/sdk";
import { createAnthropicClient, getGenerativeAiModel, getXGroKBaseURL } from "../utils/utils";
import {
createAnthropicClient,
getGenerativeAiModel,
getXGroKBaseURL,
} from "../utils/utils";
import { Memory } from "../memory/base";

type Role = "user" | "assistant";
@@ -18,12 +26,15 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider {
apiKey: string,
generativeAiModel: string,
context: vscode.ExtensionContext,
protected baseUrl?: string
protected baseUrl?: string,
) {
super(extensionUri, apiKey, generativeAiModel, context);
}

public async sendResponse(response: string, currentChat: string): Promise<boolean | undefined> {
public async sendResponse(
response: string,
currentChat: string,
): Promise<boolean | undefined> {
try {
const type = currentChat === "bot" ? "bot-response" : "user-input";
if (currentChat === "bot") {
@@ -39,8 +50,13 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider {
}

if (this.chatHistory.length === 2) {
const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) : [];
Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, [...chatHistory, ...this.chatHistory]);
const chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY)
? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY)
: [];
Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, [
...chatHistory,
...this.chatHistory,
]);
}
return await this.currentWebView?.webview.postMessage({
type,
@@ -51,14 +67,23 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider {
}
}

async generateResponse(message: string, apiKey?: string, name?: string): Promise<string | undefined> {
async generateResponse(
message: string,
apiKey?: string,
name?: string,
): Promise<string | undefined> {
try {
const { max_tokens } = GROQ_CONFIG;
if (getGenerativeAiModel() === generativeAiModels.GROK) {
this.baseUrl = getXGroKBaseURL();
}
const anthropic: Anthropic = createAnthropicClient(this.apiKey, this.baseUrl);
let chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY) ? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY) : [];
const anthropic: Anthropic = createAnthropicClient(
this.apiKey,
this.baseUrl,
);
let chatHistory = Memory.has(COMMON.ANTHROPIC_CHAT_HISTORY)
? Memory.get(COMMON.ANTHROPIC_CHAT_HISTORY)
: [];

if (chatHistory?.length) {
chatHistory = [...chatHistory, { role: "user", content: message }];
@@ -83,7 +108,9 @@ export class AnthropicWebViewProvider extends BaseWebViewProvider {
} catch (error) {
console.error(error);
Memory.set(COMMON.ANTHROPIC_CHAT_HISTORY, []);
vscode.window.showErrorMessage("Model not responding, please resend your question");
vscode.window.showErrorMessage(
"Model not responding, please resend your question",
);
}
}
}
35 changes: 28 additions & 7 deletions src/providers/gemini.ts
Original file line number Diff line number Diff line change
@@ -12,11 +12,19 @@ export interface IHistory {

export class GeminiWebViewProvider extends BaseWebViewProvider {
chatHistory: IHistory[] = [];
constructor(extensionUri: vscode.Uri, apiKey: string, generativeAiModel: string, context: vscode.ExtensionContext) {
constructor(
extensionUri: vscode.Uri,
apiKey: string,
generativeAiModel: string,
context: vscode.ExtensionContext,
) {
super(extensionUri, apiKey, generativeAiModel, context);
}

async sendResponse(response: string, currentChat: string): Promise<boolean | undefined> {
async sendResponse(
response: string,
currentChat: string,
): Promise<boolean | undefined> {
try {
const type = currentChat === "bot" ? "bot-response" : "user-input";
if (currentChat === "bot") {
@@ -31,8 +39,13 @@ export class GeminiWebViewProvider extends BaseWebViewProvider {
});
}
if (this.chatHistory.length === 2) {
const chatHistory = Memory.has(COMMON.GEMINI_CHAT_HISTORY) ? Memory.get(COMMON.GEMINI_CHAT_HISTORY) : [];
Memory.set(COMMON.GEMINI_CHAT_HISTORY, [...chatHistory, ...this.chatHistory]);
const chatHistory = Memory.has(COMMON.GEMINI_CHAT_HISTORY)
? Memory.get(COMMON.GEMINI_CHAT_HISTORY)
: [];
Memory.set(COMMON.GEMINI_CHAT_HISTORY, [
...chatHistory,
...this.chatHistory,
]);
}
return await this.currentWebView?.webview.postMessage({
type,
@@ -44,11 +57,17 @@ export class GeminiWebViewProvider extends BaseWebViewProvider {
}
}

async generateResponse(apiKey: string, name: string, message: string): Promise<string | undefined> {
async generateResponse(
apiKey: string,
name: string,
message: string,
): Promise<string | undefined> {
try {
const genAi = new GoogleGenerativeAI(apiKey);
const model = genAi.getGenerativeModel({ model: name });
let chatHistory = Memory.has(COMMON.GEMINI_CHAT_HISTORY) ? Memory.get(COMMON.GEMINI_CHAT_HISTORY) : [];
let chatHistory = Memory.has(COMMON.GEMINI_CHAT_HISTORY)
? Memory.get(COMMON.GEMINI_CHAT_HISTORY)
: [];

if (chatHistory?.length) {
chatHistory = [
@@ -89,7 +108,9 @@ export class GeminiWebViewProvider extends BaseWebViewProvider {
return response.text();
} catch (error) {
Memory.set(COMMON.GEMINI_CHAT_HISTORY, []);
vscode.window.showErrorMessage("Model not responding, please resend your question");
vscode.window.showErrorMessage(
"Model not responding, please resend your question",
);
console.error(error);
return;
}
35 changes: 28 additions & 7 deletions src/providers/groq.ts
Original file line number Diff line number Diff line change
@@ -12,11 +12,19 @@ export interface IHistory {

export class GroqWebViewProvider extends BaseWebViewProvider {
chatHistory: IHistory[] = [];
constructor(extensionUri: vscode.Uri, apiKey: string, generativeAiModel: string, context: vscode.ExtensionContext) {
constructor(
extensionUri: vscode.Uri,
apiKey: string,
generativeAiModel: string,
context: vscode.ExtensionContext,
) {
super(extensionUri, apiKey, generativeAiModel, context);
}

public async sendResponse(response: string, currentChat: string): Promise<boolean | undefined> {
public async sendResponse(
response: string,
currentChat: string,
): Promise<boolean | undefined> {
try {
const type = currentChat === "bot" ? "bot-response" : "user-input";
if (currentChat === "bot") {
@@ -31,8 +39,13 @@ export class GroqWebViewProvider extends BaseWebViewProvider {
});
}
if (this.chatHistory.length === 2) {
const chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : [];
Memory.set(COMMON.GROQ_CHAT_HISTORY, [...chatHistory, ...this.chatHistory]);
const chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY)
? Memory.get(COMMON.GROQ_CHAT_HISTORY)
: [];
Memory.set(COMMON.GROQ_CHAT_HISTORY, [
...chatHistory,
...this.chatHistory,
]);
}
// Once the agent task is done, map the memory into the llm brain.
// Send the final answer to the webview here.
@@ -45,14 +58,20 @@ export class GroqWebViewProvider extends BaseWebViewProvider {
}
}

async generateResponse(message: string, apiKey?: string, name?: string): Promise<string | undefined> {
async generateResponse(
message: string,
apiKey?: string,
name?: string,
): Promise<string | undefined> {
try {
const { temperature, max_tokens, top_p, stop } = GROQ_CONFIG;
const groq = new Groq({
apiKey: this.apiKey,
});

let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY) ? Memory.get(COMMON.GROQ_CHAT_HISTORY) : [];
let chatHistory = Memory.has(COMMON.GROQ_CHAT_HISTORY)
? Memory.get(COMMON.GROQ_CHAT_HISTORY)
: [];

if (chatHistory?.length) {
chatHistory = [...chatHistory, { role: "user", content: message }];
@@ -80,7 +99,9 @@ export class GroqWebViewProvider extends BaseWebViewProvider {
} catch (error) {
console.error(error);
Memory.set(COMMON.GROQ_CHAT_HISTORY, []);
vscode.window.showErrorMessage("Model not responding, please resend your question");
vscode.window.showErrorMessage(
"Model not responding, please resend your question",
);
return;
}
}
3 changes: 3 additions & 0 deletions src/services/generative-ai-model-manager.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import * as vscode from "vscode";
import { getConfigValue } from "../utils/utils";
import { ChatManager } from "./chat-manager";
import { AgentEventEmitter } from "../emitter/agent-emitter";

export const setUpGenerativeAiModel = (
context: vscode.ExtensionContext,
@@ -9,6 +10,7 @@ export const setUpGenerativeAiModel = (
webViewProviderClass: any,
subscriptions: vscode.Disposable[],
quickFixCodeAction: vscode.Disposable,
agentEventEmmitter: AgentEventEmitter,
) => {
try {
const apiKey = getConfigValue(key);
@@ -34,6 +36,7 @@ export const setUpGenerativeAiModel = (
quickFixCodeAction,
registerWebViewProvider,
chatWithCodeBuddy,
agentEventEmmitter,
);
} catch (error) {
vscode.window.showErrorMessage(
19 changes: 15 additions & 4 deletions src/utils/utils.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import * as markdownit from "markdown-it";
import * as vscode from "vscode";
import { APP_CONFIG, COMMON, generativeAiModels } from "../application/constant";
import {
APP_CONFIG,
COMMON,
generativeAiModels,
} from "../application/constant";
import Anthropic from "@anthropic-ai/sdk";
import { Memory } from "../memory/base";

@@ -14,7 +18,9 @@ export const formatText = (text?: string): string => {
return "";
};

export const getConfigValue: GetConfigValueType<any> = <T>(key: string): T | undefined => {
export const getConfigValue: GetConfigValueType<any> = <T>(
key: string,
): T | undefined => {
return vscode.workspace.getConfiguration().get<T>(key);
};

@@ -66,7 +72,11 @@ export const getGenerativeAiModel = (): string | undefined => {
return getConfigValue("generativeAi.option");
};

export function getUri(webview: vscode.Webview, extensionUri: vscode.Uri, pathList: string[]) {
export function getUri(
webview: vscode.Webview,
extensionUri: vscode.Uri,
pathList: string[],
) {
return webview.asWebviewUri(vscode.Uri.joinPath(extensionUri, ...pathList));
}

@@ -75,7 +85,8 @@ export function getUri(webview: vscode.Webview, extensionUri: vscode.Uri, pathLi
// and ensure script integrity when using Content Security Policy (CSP)
export const getNonce = () => {
let text = "";
const possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
const possible =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for (let i = 0; i < 32; i++) {
text += possible.charAt(Math.floor(Math.random() * possible.length));
}

0 comments on commit 56ee9d5

Please sign in to comment.