Skip to content

Commit

Permalink
tested interactive pause and heal plugin, replaced gpt model
Browse files Browse the repository at this point in the history
  • Loading branch information
DavertMik committed Jul 1, 2023
1 parent f21ed1f commit cd69757
Show file tree
Hide file tree
Showing 14 changed files with 5,739 additions and 142 deletions.
192 changes: 192 additions & 0 deletions lib/ai.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
const { Configuration, OpenAIApi } = require('openai');
const debug = require('debug')('codeceptjs:ai');
const config = require('./config');
const output = require('./output');
const { removeNonInteractiveElements, minifyHtml, splitByChunks } = require('./html');

const defaultConfig = {
model: 'gpt-3.5-turbo-16k',
temperature: 0.1,
}

const htmlConfig = {
maxLength: null,
simplify: true,
minify: true,
interactiveElements: ['a', 'input', 'button', 'select', 'textarea', 'option'],
textElements: ['label', 'h1', 'h2'],
allowedAttrs: ['id', 'for', 'class', 'name', 'type', 'value', 'aria-labelledby', 'aria-label', 'label', 'placeholder', 'title', 'alt', 'src', 'role'],
allowedRoles: ['button', 'checkbox', 'search', 'textbox', 'tab'],
}

class AiAssistant {

constructor() {
this.config = config.get('openai', defaultConfig);
this.htmlConfig = this.config.html || htmlConfig;
delete this.config.html;
this.html = null;
this.response = null;

this.isEnabled = !!process.env.OPENAI_API_KEY;

if (!this.isEnabled) return;

const configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
});

this.openai = new OpenAIApi(configuration);
}

setHtmlContext(html) {
let processedHTML = html;

if (this.htmlConfig.simplify) processedHTML = removeNonInteractiveElements(processedHTML, {
interactiveElements: this.htmlConfig.interactiveElements,
allowedAttrs: this.htmlConfig.allowedAttrs,
allowedRoles: this.htmlConfig.allowedRoles,
});
if (this.htmlConfig.minify) processedHTML = minifyHtml(processedHTML);
if (this.htmlConfig.maxLength) processedHTML = splitByChunks(processedHTML, this.htmlConfig.maxLength)[0];

debug(processedHTML);

this.html = processedHTML;
}

getResponse() {
return this.response || '';
}

mockResponse(response) {
this.mockedResponse = response;
}

async createCompletion(messages) {
if (!this.openai) return;

debug(messages)

if (this.mockedResponse) return this.mockedResponse;

this.response = null;

try {
const completion = await this.openai.createChatCompletion({
...this.config,
messages,
});

this.response = completion?.data?.choices[0]?.message?.content;

debug(this.response);

return this.response;
} catch (err) {
debug(err.response);
output.print('');
output.error(`OpenAI error: ${err.message}`);
output.error(err?.response?.data?.error?.code);
output.error(err?.response?.data?.error?.message);
return '';
}
}

async healFailedStep(step, err, test) {
if (!this.isEnabled) return [];
if (!this.html) throw new Error('No HTML context provided');

const messages = [
{ role: 'user', content: 'As a test automation engineer I am testing web application using CodeceptJS.' },
{ role: 'user', content: `I want to heal a test that fails. Here is the list of executed steps: ${test.steps.join(', ')}` },
{ role: 'user', content: `Propose how to adjust ${step.toCode()} step to fix the test.` },
{ role: 'user', content: 'Use locators in order of preference: semantic locator by text, CSS, XPath. Use codeblocks marked with ```.' },
{ role: 'user', content: `Here is the error message: ${err.message}` },
{ role: 'user', content: `Here is HTML code of a page where the failure has happened: \n\n${this.html}` },
];

const response = await this.createCompletion(messages);
if (!response) return [];

return parseCodeBlocks(response);
}

async writeSteps(input) {
if (!this.isEnabled) return;
if (!this.html) throw new Error('No HTML context provided');

const snippets = [];

const messages = [
{ role: 'user',
content: `I am test engineer writing test in CodeceptJS
I have opened web page and I want to use CodeceptJS to ${input} on this page
Provide me valid CodeceptJS code to accomplish it
Use only locators from this HTML: \n\n${this.html}` },
{ role: 'user', content: `Propose only CodeceptJS steps code. Do not include Scenario or Feature into response` },

// old prompt
// { role: 'user', content: 'I want to click button Submit using CodeceptJS on this HTML page: <html><body><button>Submit</button></body></html>' },
// { role: 'assistant', content: '```js\nI.click("Submit");\n```' },
// { role: 'user', content: 'I want to click button Submit using CodeceptJS on this HTML page: <html><body><button>Login</button></body></html>' },
// { role: 'assistant', content: 'No suggestions' },
// { role: 'user', content: `Now I want to ${input} on this HTML page using CodeceptJS code` },
// { role: 'user', content: `Provide me with CodeceptJS code to achieve this on THIS page.` },
];
const response = await this.createCompletion(messages);
if (!response) return;
snippets.push(...parseCodeBlocks(response));

debug(snippets[0]);

return snippets[0];
}
}

class DummyAi extends AiAssistant {

constructor() {
super();
this.isEnabled = true;
}

setResponse(response) {
this.response = response;
return this;
}

async createCompletion(messages) {
debug(messages);
return this.response || 'Dummy AI response';
}
}

function parseCodeBlocks(response) {
// Regular expression pattern to match code snippets
const codeSnippetPattern = /```(?:javascript|js|typescript|ts)?\n([\s\S]+?)\n```/g;

// Array to store extracted code snippets
const codeSnippets = [];

// Iterate over matches and extract code snippets
let match;
while ((match = codeSnippetPattern.exec(response)) !== null) {
codeSnippets.push(match[1]);
}

// Remove "Scenario", "Feature", and "require()" lines
const modifiedSnippets = codeSnippets.map(snippet => {
const lines = snippet.split('\n').map(line => line.trim());

const filteredLines = lines.filter(line => !line.includes('I.amOnPage') && !line.startsWith('Scenario') && !line.startsWith('Feature') && !line.includes('= require('));

return filteredLines.join('\n');
// remove snippets that move from current url
}); // .filter(snippet => !line.includes('I.amOnPage'));

return modifiedSnippets.filter(snippet => !!snippet);
}

module.exports = AiAssistant;
AiAssistant.DummyAi = DummyAi;
2 changes: 1 addition & 1 deletion lib/cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ class Cli extends Base {
}

// display artifacts in debug mode
if (test.artifacts && Object.keys(test.artifacts).length) {
if (test?.artifacts && Object.keys(test.artifacts).length) {
log += `\n${output.styles.bold('Artifacts:')}`;
for (const artifact of Object.keys(test.artifacts)) {
log += `\n- ${artifact}: ${test.artifacts[artifact]}`;
Expand Down
17 changes: 16 additions & 1 deletion lib/command/interactive.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
const { getConfig, getTestRoot } = require('./utils');
const recorder = require('../recorder');
const Codecept = require('../codecept');
const Container = require('../container');
const event = require('../event');
const output = require('../output');
const webHelpers = require('../plugin/standardActingHelpers');

module.exports = async function (path, options) {
// Backward compatibility for --profile
Expand All @@ -29,9 +31,22 @@ module.exports = async function (path, options) {
});
event.emit(event.test.before, {
title: '',
artifacts: {}
});


const enabledHelpers = Container.helpers();
for (const helperName of Object.keys(enabledHelpers)) {
if (webHelpers.includes(helperName)) {
const I = enabledHelpers[helperName];
recorder.add(() => I.amOnPage('/'));
recorder.catchWithoutStop(e => output.print(`Error while loading home page: ${e.message}}`));
break;
}
}
require('../pause')();
recorder.add(() => event.emit(event.test.after));
// recorder.catchWithoutStop((err) => console.log(err.stack));
recorder.add(() => event.emit(event.test.after, {}));
recorder.add(() => event.emit(event.suite.after, {}));
recorder.add(() => event.emit(event.all.result, {}));
recorder.add(() => codecept.teardown());
Expand Down
129 changes: 129 additions & 0 deletions lib/helper/OpenAI.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
const Helper = require('@codeceptjs/helper');
const AiAssistant = require('../ai');
const standardActingHelpers = require('../plugin/standardActingHelpers');
const Container = require('../container');
const { splitByChunks } = require('../html');

/**
* OpenAI Helper for CodeceptJS.
*
* This helper class provides integration with the OpenAI GPT-3.5 or 4 language model for generating responses to questions or prompts within the context of web pages. It allows you to interact with the GPT-3.5 model to obtain intelligent responses based on HTML fragments or general prompts.
* This helper should be enabled with any web helpers like Playwright or Puppeteer or WebDrvier to ensure the HTML context is available.
*
* ## Configuration
*
* This helper should be configured in codecept.json or codecept.conf.js
*
* * `chunkSize`: (optional, default: 80000) - The maximum number of characters to send to the OpenAI API at once. We split HTML fragments by 8000 chars to not exceed token limit. Increase this value if you use GPT-4.
*/
class OpenAI extends Helper {

constructor(config) {
super(config);
this.aiAssistant = new AiAssistant();

this.options = {
chunkSize: 80000,
};
this.options = { ...this.options, ...config };

const helpers = Container.helpers();

for (const helperName of standardActingHelpers) {
if (Object.keys(helpers).indexOf(helperName) > -1) {
this.helper = helpers[helperName];
break;
}
}
}


/**
* Asks the OpenAI GPT language model a question based on the provided prompt within the context of the current page's HTML.
*
* ```js
* I.askGptOnPage('what does this page do?');
* ```
*
* @async
* @param {string} prompt - The question or prompt to ask the GPT model.
* @returns {Promise<string>} - A Promise that resolves to the generated responses from the GPT model, joined by newlines.
*/
async askGptOnPage(prompt) {
const html = await this.helper.grabSource();

const htmlChunks = splitByChunks(html, this.options.chunkSize);

if (htmlChunks.length > 1) this.debug(`Splitting HTML into ${}`)

const responses = [];

for (const chunk of htmlChunks) {

const messages = [
{ role: 'user', content: prompt },
{ role: 'user', content: `Within this HTML: ${chunk}` },
]

if (htmlChunks.length > 1) messages.push({ role: 'user', content: `If action is not possible on this page, do not propose anything, I will send another HTML fragment` });

const response = await this.aiAssistant.createCompletion(messages);

console.log(response);

responses.push(response);
}

return responses.join('\n\n');
}

/**
* Asks the OpenAI GPT-3.5 language model a question based on the provided prompt within the context of a specific HTML fragment on the current page.
*
* ```js
* I.askGptOnPageFragment('describe features of this screen', '.screen');
* ```
*
* @async
* @param {string} prompt - The question or prompt to ask the GPT-3.5 model.
* @param {string} locator - The locator or selector used to identify the HTML fragment on the page.
* @returns {Promise<string>} - A Promise that resolves to the generated response from the GPT-3.5 model.
*/
askGptOnPageFragment(prompt, locator) {

const html = await this.helper.grabHTMLFrom(locator);

const messages = [
{ role: 'user', content: prompt },
{ role: 'user', content: `Within this HTML: ${html}` },
]

const response = await this.aiAssistant.createCompletion(messages);

console.log(response);

return response;
}

/**
* Send a general request to ChatGPT and return response.
* @param {string} prompt
* @returns
*/
askGptGeneralPrompt(prompt) {

const messages = [
{ role: 'user', content: prompt },
]

const completion = await this.aiAssistant.createCompletion(messages);

const response = completion?.data?.choices[0]?.message?.content;

console.log(response);

return response;
}


}
Loading

0 comments on commit cd69757

Please sign in to comment.