From 316110fb9951721a3041a618a86d348eecd8e672 Mon Sep 17 00:00:00 2001 From: Nyaundi Brian Date: Wed, 20 Mar 2024 20:57:33 +0300 Subject: [PATCH] feat: support for gpt-4-1106-preview (#325) --- packages/code-review-gpt/package.json | 2 +- packages/code-review-gpt/src/common/utils/parseAttributes.ts | 2 +- packages/code-review-gpt/src/review/constants.ts | 4 ++++ 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/code-review-gpt/package.json b/packages/code-review-gpt/package.json index b4ee493a..f00a5cb4 100644 --- a/packages/code-review-gpt/package.json +++ b/packages/code-review-gpt/package.json @@ -1,6 +1,6 @@ { "name": "code-review-gpt", - "version": "0.1.5", + "version": "0.1.6", "description": "Your AI code reviewer. Improve code quality and catch bugs before you break production", "bin": { "code-review-gpt": "./dist/index.js" diff --git a/packages/code-review-gpt/src/common/utils/parseAttributes.ts b/packages/code-review-gpt/src/common/utils/parseAttributes.ts index 99db5131..be391b65 100644 --- a/packages/code-review-gpt/src/common/utils/parseAttributes.ts +++ b/packages/code-review-gpt/src/common/utils/parseAttributes.ts @@ -30,7 +30,7 @@ const isIFeedbackArray = (input: unknown): input is IFeedback[] => Array.isArray(input) && input.every((entry) => isIFeedback(entry)); export const parseAttributes = (jsonString: string): IFeedback[] => { - let encodedJsonString = jsonString; + let encodedJsonString = jsonString.trim().startsWith('```json')?jsonString.trim().slice(8, -4): jsonString.trim() encodedJsonString = encodeDetails(encodedJsonString); // Parse the JSON string diff --git a/packages/code-review-gpt/src/review/constants.ts b/packages/code-review-gpt/src/review/constants.ts index e431e4bb..7696b046 100644 --- a/packages/code-review-gpt/src/review/constants.ts +++ b/packages/code-review-gpt/src/review/constants.ts @@ -2,6 +2,10 @@ export const signOff = "#### Powered by [Code Review GPT](https://github.com/mattzcarey/code-review-gpt)"; export const modelInfo = [ + { + model: "gpt-4-1106-preview", + maxPromptLength: 128000, //128k tokens + }, { model: "gpt-4", maxPromptLength: 21000, //8k tokens