Skip to content

Commit

Permalink
feat: create some abstraction in llms
Browse files Browse the repository at this point in the history
  • Loading branch information
mattzcarey committed Jul 17, 2023
1 parent 4a976b0 commit 0a64bc4
Show file tree
Hide file tree
Showing 11 changed files with 162 additions and 89 deletions.
17 changes: 14 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,25 @@ Run `npm i code-review-gpt && npx code-review-gpt` in the root directory of a gi

Run `code-review-gpt` in the root directory of a git repository.

### Commands

- `code-review-gpt review` - Runs the code review on the staged files.
- `code-review-gpt configure` - Runs a setup tool to configure the application.

### Options

- `--ci` - Runs the application in CI mode. This will use the BASE_SHA and GITHUB_SHA environment variables to determine which files to review. It will also use the GITHUB_TOKEN environment variable to create a comment on the pull request with the review results.

- `--model` - The model to use for the review. Defaults to `gpt-4`. You can use any openai model you have access to.

## Roadmap

- [ ] Make a more clever way to find the exact code to review
- [ ] VSCode extension
- [ ] Use some embeddings and vector store to build a knowledge graph
- [ ] Use some embeddings and vector store to build a knowledge graph of the repo to make better suggestions
- [ ] Prompt engineering to refine the prompt
- [ ] Support different LLMs
- [ ] Cash in on the cloud offering
- [ ] Support different LLMs... Private, HuggingFace, Azure etc.
- [ ] Build out the cloud offering

## Sponsors ❤️

Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "code-review-gpt",
"version": "0.0.20",
"version": "0.0.21",
"description": "Your AI code reviewer",
"bin": {
"code-review-gpt": "./dist/index.js"
Expand Down
49 changes: 28 additions & 21 deletions src/args.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,44 +3,51 @@ import dotenv from "dotenv";

dotenv.config();

const handleNoCommand = async () => {
const inquirer = await import("inquirer");
const questions = [
{
type: "list",
name: "command",
message: "What do you want to do?",
choices: [
{ name: "Review the staged files", value: "review" },
{
name: "Configure the script (Recommended for first time use)",
value: "configure",
},
],
},
];

const answers = await inquirer.default.prompt(questions);
return answers.command;
};

export const getYargs = async () => {
const argv = yargs
.option("ci", {
description: "Indicate that the script is running on a CI environment",
type: "boolean",
default: false,
})
.option("model", {
description: "The model to use for generating the review",
type: "string",
default: "gpt-4",
})
.command("review", "Review the pull request")
.command("configure", "Configure the script")
.parseSync();

if (argv.ci) {
argv._[0] = "review";
console.info(
"Running in CI mode, defaulting to review."
);
console.info("Running in CI mode, defaulting to review.");
return argv;
}

if (!argv._[0]) {
const inquirer = await import("inquirer");
const questions = [
{
type: "list",
name: "command",
message: "What do you want to do?",
choices: [
{ name: "Review the staged files", value: "review" },
{
name: "Configure the script (Recommended for first time use)",
value: "configure",
},
],
},
];

const answers = await inquirer.default.prompt(questions);
argv._[0] = answers.command; // Update the command based on user's choice
argv._[0] = await handleNoCommand();
}

return argv;
Expand Down
51 changes: 0 additions & 51 deletions src/review/askAI.ts

This file was deleted.

4 changes: 2 additions & 2 deletions src/review/commentOnPR.ts → src/review/ci/commentOnPR.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { context, getOctokit } from "@actions/github";
import { getGitHubEnvVariables } from "../config";
import { signOff } from "./constants";
import { getGitHubEnvVariables } from "../../config";
import { signOff } from "../constants";

const getToken = () => {
const { githubToken } = getGitHubEnvVariables();
Expand Down
13 changes: 7 additions & 6 deletions src/review/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import { Argv } from "yargs";
import { askAI } from "./askAI";
import { commentOnPR } from "./commentOnPR";
import { constructPromptsArray } from "./constructPrompt";
import { getFileNames } from "./getFileNames";
import { commentOnPR } from "./ci/commentOnPR";
import { constructPromptsArray } from "./prompt/constructPrompt";
import { getFileNames } from "./prompt/getFileNames";
import { askAi } from "./llm/askAi";

interface ReviewArgs {
[x: string]: unknown;
Expand All @@ -13,10 +12,12 @@ interface ReviewArgs {

export const review = async (yargs: ReviewArgs) => {
const isCi = yargs.ci;
const modelName = yargs.model as string;

const fileNames = await getFileNames(isCi);
const prompts = await constructPromptsArray(fileNames);
const response = await askAI(prompts);

const response = await askAi(prompts, modelName);

if (isCi) {
await commentOnPR(response);
Expand Down
25 changes: 25 additions & 0 deletions src/review/llm/AIModel.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import { OpenAIChat } from "langchain/llms/openai";

interface IAIModel {
modelName: string;
temperature: number;
apiKey: string;
}

class AIModel {
private model: OpenAIChat;

constructor(options: IAIModel) {
this.model = new OpenAIChat({
openAIApiKey: options.apiKey,
modelName: options.modelName,
temperature: options.temperature,
});
}

public async callModel(prompt: string): Promise<string> {
return this.model.call(prompt);
}
}

export default AIModel;
22 changes: 22 additions & 0 deletions src/review/llm/askAI.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { createSummary, processFeedbacks } from "./feedbackProcessor";
import AIModel from "./AIModel";
import { openAIApiKey } from "../../config";

export const askAi = async (
prompts: string[],
modelName: string
): Promise<string> => {
console.info("Asking the experts...");

const model = new AIModel({
modelName: modelName,
temperature: 0.0,
apiKey: openAIApiKey(),
});

const feedbacks = await processFeedbacks(model, prompts);

const summary = await createSummary(model, feedbacks);

return `${feedbacks.join("\n---\n")}\n\n---\n\n${summary}`;
};
58 changes: 58 additions & 0 deletions src/review/llm/feedbackProcessor.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import AIModel from "./AIModel";
import { completionPrompt } from "../constants";

interface IFeedback {
feedback: string;
}

const collectAndLogFeedback = async (
feedbackPromise: Promise<string>
): Promise<IFeedback> => {
try {
const feedback = await feedbackPromise;
console.log(feedback);
return { feedback };
} catch (error) {
console.error(`Error in processing prompt`, error);
throw error;
}
};

const createSummary = async (
model: AIModel,
feedbacks: string[]
): Promise<string> => {
const finalPrompt = completionPrompt.replace(
"{feedback}",
feedbacks.join("\n---\n")
);

const summary = await model.callModel(finalPrompt);
console.log(summary);
return summary;
};

const processFeedbacks = async (
model: AIModel,
prompts: string[]
): Promise<string[]> => {
const feedbackPromises = prompts.map((prompt) => model.callModel(prompt));

const feedbackResults = await Promise.allSettled(
feedbackPromises.map(collectAndLogFeedback)
);

const feedbacks = feedbackResults.reduce(
(accumulatedFeedbacks, feedbackResult) => {
if (feedbackResult.status === "fulfilled") {
accumulatedFeedbacks.push(feedbackResult.value.feedback);
}
return accumulatedFeedbacks;
},
[] as string[]
);

return feedbacks;
};

export { createSummary, processFeedbacks };
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import {
filePromptTemplate,
maxPromptLength,
continuationPrompt,
} from "./constants";
} from "../constants";
import { readFile } from "fs/promises";

const appendToLastPrompt = (prompts: string[], text: string): string[] => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import { exec } from "child_process";
import { extname, join } from "path";
import { supportedFiles } from "./constants";
import { getGitHubEnvVariables } from "../config";
import { supportedFiles } from "../constants";
import { getGitHubEnvVariables } from "../../config";

const gitCommand = async (isCi: boolean): Promise<string> => {
if (isCi) {
const { githubSha, baseSha } = getGitHubEnvVariables();
return `git diff --name-only --diff-filter=ACMRT ${baseSha} ${githubSha}`;
return `git diff --name-only --diff-filter=AMT ${baseSha} ${githubSha}`;
} else {
return "git diff --name-only --diff-filter=ACMRT";
return "git diff --name-only --diff-filter=AMT --cached";
}
};

Expand Down

0 comments on commit 0a64bc4

Please sign in to comment.