From a3dbe55c7a99fd6c5ff005d214cd3835de6d0905 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 14 Jul 2023 14:09:37 +0100
Subject: [PATCH 01/14] feat: Prompt ChatGPT function
---
prompt-chatgpt/.gitignore | 1 +
prompt-chatgpt/.prettierrc.json | 6 ++
prompt-chatgpt/README.md | 56 +++++++++++++
prompt-chatgpt/package-lock.json | 126 ++++++++++++++++++++++++++++++
prompt-chatgpt/package.json | 18 +++++
prompt-chatgpt/src/environment.js | 35 +++++++++
prompt-chatgpt/src/main.js | 36 +++++++++
prompt-chatgpt/static/index.html | 86 ++++++++++++++++++++
8 files changed, 364 insertions(+)
create mode 100644 prompt-chatgpt/.gitignore
create mode 100644 prompt-chatgpt/.prettierrc.json
create mode 100644 prompt-chatgpt/README.md
create mode 100644 prompt-chatgpt/package-lock.json
create mode 100644 prompt-chatgpt/package.json
create mode 100644 prompt-chatgpt/src/environment.js
create mode 100644 prompt-chatgpt/src/main.js
create mode 100644 prompt-chatgpt/static/index.html
diff --git a/prompt-chatgpt/.gitignore b/prompt-chatgpt/.gitignore
new file mode 100644
index 00000000..b512c09d
--- /dev/null
+++ b/prompt-chatgpt/.gitignore
@@ -0,0 +1 @@
+node_modules
\ No newline at end of file
diff --git a/prompt-chatgpt/.prettierrc.json b/prompt-chatgpt/.prettierrc.json
new file mode 100644
index 00000000..fa51da29
--- /dev/null
+++ b/prompt-chatgpt/.prettierrc.json
@@ -0,0 +1,6 @@
+{
+ "trailingComma": "es5",
+ "tabWidth": 2,
+ "semi": false,
+ "singleQuote": true
+}
diff --git a/prompt-chatgpt/README.md b/prompt-chatgpt/README.md
new file mode 100644
index 00000000..37660b79
--- /dev/null
+++ b/prompt-chatgpt/README.md
@@ -0,0 +1,56 @@
+# OpenAI GPT-3 Chat Completion Function
+
+**Warning:** This function uses the OpenAI API, which is a paid service. You will be charged for each request made to the API. For more information, see the [OpenAI pricing page](https://openai.com/pricing/).
+
+This function enables the interaction with OpenAI's GPT-3 model in a chat-like format. Users send a request with a prompt and receive a text response generated by the model. Additionally, there's a simple GET endpoint that serves an HTML page.
+
+## Environment Variables
+
+To ensure the function operates as intended, ensure the following variables are set:
+
+- **OPENAI_API_KEY**: This is your OpenAI API key.
+- **OPENAI_MAX_TOKENS**: This is the maximum number of tokens that the OpenAI response should contain. Be aware that OpenAI models read and write a maximum number of tokens per API call, which varies depending on the model. For GPT-3.5-turbo, the limit is 4096 tokens.
+
+## Usage
+
+This function supports two types of requests:
+
+1. **Serving the HTML page**
+
+ - **Request Type:** GET
+ - **Response:**
+ - On success, the function will respond with the HTML file contained in the static folder.
+
+2. **Interacting with the GPT-3 Model**
+
+ - **Request Type:** POST
+ - **Body:**
+ - The text string that is used as a prompt for the model.
+ - **Response:**
+ - On success, the function will respond with the text generated by the GPT-3 model.
+ - If the request body is missing, a 400 error will be returned with the message 'Missing body with a prompt.'
+
+## Example
+
+Here's a simple example of how to use the POST endpoint:
+
+**Request:**
+
+```
+POST / HTTP/1.1
+Host: function-host
+Content-Type: text/plain
+
+"What is the weather like today?"
+```
+
+**Response:**
+
+```json
+{
+ "role": "assistant",
+ "content": "As an AI model, I don't have real-time data capabilities, so I'm unable to provide the current weather. However, you can check a reliable weather website or app for this information."
+}
+```
+
+Note: The response can vary as it is generated by the AI model based on the input prompt.
\ No newline at end of file
diff --git a/prompt-chatgpt/package-lock.json b/prompt-chatgpt/package-lock.json
new file mode 100644
index 00000000..e8a91d71
--- /dev/null
+++ b/prompt-chatgpt/package-lock.json
@@ -0,0 +1,126 @@
+{
+ "name": "prompt-chatgpt",
+ "version": "1.0.0",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "prompt-chatgpt",
+ "version": "1.0.0",
+ "license": "ISC",
+ "dependencies": {
+ "openai": "^3.3.0"
+ },
+ "devDependencies": {
+ "prettier": "^3.0.0"
+ }
+ },
+ "node_modules/asynckit": {
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
+ },
+ "node_modules/combined-stream": {
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
+ "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+ "dependencies": {
+ "delayed-stream": "~1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/delayed-stream": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
+ "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
+ "engines": {
+ "node": ">=0.4.0"
+ }
+ },
+ "node_modules/follow-redirects": {
+ "version": "1.15.2",
+ "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
+ "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
+ "funding": [
+ {
+ "type": "individual",
+ "url": "https://github.com/sponsors/RubenVerborgh"
+ }
+ ],
+ "engines": {
+ "node": ">=4.0"
+ },
+ "peerDependenciesMeta": {
+ "debug": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/form-data": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
+ "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
+ "dependencies": {
+ "asynckit": "^0.4.0",
+ "combined-stream": "^1.0.8",
+ "mime-types": "^2.1.12"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/mime-db": {
+ "version": "1.52.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/mime-types": {
+ "version": "2.1.35",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+ "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
+ "dependencies": {
+ "mime-db": "1.52.0"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/openai": {
+ "version": "3.3.0",
+ "resolved": "https://registry.npmjs.org/openai/-/openai-3.3.0.tgz",
+ "integrity": "sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==",
+ "dependencies": {
+ "axios": "^0.26.0",
+ "form-data": "^4.0.0"
+ }
+ },
+ "node_modules/openai/node_modules/axios": {
+ "version": "0.26.1",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-0.26.1.tgz",
+ "integrity": "sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==",
+ "dependencies": {
+ "follow-redirects": "^1.14.8"
+ }
+ },
+ "node_modules/prettier": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.0.tgz",
+ "integrity": "sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==",
+ "dev": true,
+ "bin": {
+ "prettier": "bin/prettier.cjs"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/prettier/prettier?sponsor=1"
+ }
+ }
+ }
+}
diff --git a/prompt-chatgpt/package.json b/prompt-chatgpt/package.json
new file mode 100644
index 00000000..e140bd53
--- /dev/null
+++ b/prompt-chatgpt/package.json
@@ -0,0 +1,18 @@
+{
+ "name": "prompt-chatgpt",
+ "version": "1.0.0",
+ "description": "",
+ "main": "src/main.js",
+ "scripts": {
+ "format": "prettier --write src/**/*.js"
+ },
+ "keywords": [],
+ "author": "",
+ "license": "ISC",
+ "dependencies": {
+ "openai": "^3.3.0"
+ },
+ "devDependencies": {
+ "prettier": "^3.0.0"
+ }
+}
diff --git a/prompt-chatgpt/src/environment.js b/prompt-chatgpt/src/environment.js
new file mode 100644
index 00000000..aaee1afa
--- /dev/null
+++ b/prompt-chatgpt/src/environment.js
@@ -0,0 +1,35 @@
+module.exports = function getEnvironment() {
+ return {
+ OPENAI_API_KEY: getRequiredEnv('OPENAI_API_KEY'),
+ OPENAI_MAX_TOKENS: getNumberEnv('OPENAI_MAX_TOKENS') ?? 64,
+ }
+}
+
+/**
+ * @param {string} key
+ * @return {string}
+ */
+function getRequiredEnv(key) {
+ const value = process.env[key]
+ if (value === undefined) {
+ throw new Error(`Environment variable ${key} is not set`)
+ }
+ return value
+}
+
+/**
+ * @param {string} key
+ * @return {number | undefined}
+ */
+function getNumberEnv(key) {
+ const value = process.env[key]
+ if (value === undefined) {
+ return undefined
+ }
+
+ try {
+ return parseInt(value)
+ } catch (e) {
+ throw new Error(`Environment variable ${key} is not a number`)
+ }
+}
diff --git a/prompt-chatgpt/src/main.js b/prompt-chatgpt/src/main.js
new file mode 100644
index 00000000..f1733d59
--- /dev/null
+++ b/prompt-chatgpt/src/main.js
@@ -0,0 +1,36 @@
+import fs from 'fs'
+import path from 'path'
+import { fileURLToPath } from 'url'
+import { OpenAIApi, Configuration } from 'openai'
+import getEnvironment from './environment'
+
+const __filename = fileURLToPath(import.meta.url)
+const __dirname = path.dirname(__filename)
+const staticFolder = path.join(__dirname, '../static')
+
+export default async ({ req, res }) => {
+ const { OPENAI_API_KEY, OPENAI_MAX_TOKENS } = getEnvironment()
+
+ const configuration = new Configuration({
+ apiKey: OPENAI_API_KEY,
+ })
+ const openai = new OpenAIApi(configuration)
+
+ if (req.method === 'GET') {
+ let html = fs.readFileSync(path.join(staticFolder, 'index.html')).toString()
+
+ return res.send(html, 200, { 'Content-Type': 'text/html; charset=utf-8' })
+ }
+
+ if (!req.bodyString) {
+ return res.send('Missing body with a prompt.', 400)
+ }
+
+ const chatCompletion = await openai.createChatCompletion({
+ model: 'gpt-3.5-turbo',
+ max_tokens: OPENAI_MAX_TOKENS,
+ messages: [{ role: 'user', content: req.bodyString }],
+ })
+
+ return res.send(chatCompletion.data.choices[0].message, 200)
+}
diff --git a/prompt-chatgpt/static/index.html b/prompt-chatgpt/static/index.html
new file mode 100644
index 00000000..3e1017f9
--- /dev/null
+++ b/prompt-chatgpt/static/index.html
@@ -0,0 +1,86 @@
+
+
+
+
+
+
+ Prompt ChatGPT Demo
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Prompt ChatGPT Demo
+
+
+
+ This is demo application. You can ue this app to ensure
+ implementation with Chat GPT works properly. Use input below to
+ enter prompts and get a response.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Chat GPT:
+
+
+
+
+
+
+
+
+
+
+
+
+
From 9f51c2508bf19f9c358a2b8cf0c8bcae3ac285e9 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 14 Jul 2023 14:09:57 +0100
Subject: [PATCH 02/14] chore: add type module
---
prompt-chatgpt/package.json | 1 +
1 file changed, 1 insertion(+)
diff --git a/prompt-chatgpt/package.json b/prompt-chatgpt/package.json
index e140bd53..4ec02ca1 100644
--- a/prompt-chatgpt/package.json
+++ b/prompt-chatgpt/package.json
@@ -3,6 +3,7 @@
"version": "1.0.0",
"description": "",
"main": "src/main.js",
+ "type": "module",
"scripts": {
"format": "prettier --write src/**/*.js"
},
From 0f5bdb63bd76263c53cc80ef3bcd8cbf334a1e0d Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Mon, 17 Jul 2023 11:26:15 +0100
Subject: [PATCH 03/14] fix: esm migration
---
prompt-chatgpt/src/environment.js | 16 +++++++++-------
prompt-chatgpt/src/main.js | 5 ++---
2 files changed, 11 insertions(+), 10 deletions(-)
diff --git a/prompt-chatgpt/src/environment.js b/prompt-chatgpt/src/environment.js
index aaee1afa..ba31fc75 100644
--- a/prompt-chatgpt/src/environment.js
+++ b/prompt-chatgpt/src/environment.js
@@ -1,10 +1,3 @@
-module.exports = function getEnvironment() {
- return {
- OPENAI_API_KEY: getRequiredEnv('OPENAI_API_KEY'),
- OPENAI_MAX_TOKENS: getNumberEnv('OPENAI_MAX_TOKENS') ?? 64,
- }
-}
-
/**
* @param {string} key
* @return {string}
@@ -33,3 +26,12 @@ function getNumberEnv(key) {
throw new Error(`Environment variable ${key} is not a number`)
}
}
+
+function EnvironmentService() {
+ return {
+ OPENAI_API_KEY: getRequiredEnv('OPENAI_API_KEY'),
+ OPENAI_MAX_TOKENS: getNumberEnv('OPENAI_MAX_TOKENS') ?? 64,
+ }
+}
+
+export default EnvironmentService
diff --git a/prompt-chatgpt/src/main.js b/prompt-chatgpt/src/main.js
index f1733d59..f531f5c5 100644
--- a/prompt-chatgpt/src/main.js
+++ b/prompt-chatgpt/src/main.js
@@ -2,14 +2,14 @@ import fs from 'fs'
import path from 'path'
import { fileURLToPath } from 'url'
import { OpenAIApi, Configuration } from 'openai'
-import getEnvironment from './environment'
+import EnvironmentService from './environment.js'
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
const staticFolder = path.join(__dirname, '../static')
export default async ({ req, res }) => {
- const { OPENAI_API_KEY, OPENAI_MAX_TOKENS } = getEnvironment()
+ const { OPENAI_API_KEY, OPENAI_MAX_TOKENS } = EnvironmentService()
const configuration = new Configuration({
apiKey: OPENAI_API_KEY,
@@ -18,7 +18,6 @@ export default async ({ req, res }) => {
if (req.method === 'GET') {
let html = fs.readFileSync(path.join(staticFolder, 'index.html')).toString()
-
return res.send(html, 200, { 'Content-Type': 'text/html; charset=utf-8' })
}
From 124c53879f72edcf87cadac57dc109d378c139cc Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Wed, 19 Jul 2023 11:03:46 +0100
Subject: [PATCH 04/14] chore: add semis, del pjson extras
---
prompt-chatgpt/.gitignore | 131 +++++++++++++++++++++++++++++-
prompt-chatgpt/.prettierrc.json | 2 +-
prompt-chatgpt/package.json | 2 -
prompt-chatgpt/src/environment.js | 18 ++--
prompt-chatgpt/src/main.js | 36 ++++----
prompt-chatgpt/static/index.html | 8 +-
6 files changed, 163 insertions(+), 34 deletions(-)
diff --git a/prompt-chatgpt/.gitignore b/prompt-chatgpt/.gitignore
index b512c09d..6a7d6d8e 100644
--- a/prompt-chatgpt/.gitignore
+++ b/prompt-chatgpt/.gitignore
@@ -1 +1,130 @@
-node_modules
\ No newline at end of file
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+lerna-debug.log*
+.pnpm-debug.log*
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+pids
+*.pid
+*.seed
+*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+lib-cov
+
+# Coverage directory used by tools like istanbul
+coverage
+*.lcov
+
+# nyc test coverage
+.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+.grunt
+
+# Bower dependency directory (https://bower.io/)
+bower_components
+
+# node-waf configuration
+.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+build/Release
+
+# Dependency directories
+node_modules/
+jspm_packages/
+
+# Snowpack dependency directory (https://snowpack.dev/)
+web_modules/
+
+# TypeScript cache
+*.tsbuildinfo
+
+# Optional npm cache directory
+.npm
+
+# Optional eslint cache
+.eslintcache
+
+# Optional stylelint cache
+.stylelintcache
+
+# Microbundle cache
+.rpt2_cache/
+.rts2_cache_cjs/
+.rts2_cache_es/
+.rts2_cache_umd/
+
+# Optional REPL history
+.node_repl_history
+
+# Output of 'npm pack'
+*.tgz
+
+# Yarn Integrity file
+.yarn-integrity
+
+# dotenv environment variable files
+.env
+.env.development.local
+.env.test.local
+.env.production.local
+.env.local
+
+# parcel-bundler cache (https://parceljs.org/)
+.cache
+.parcel-cache
+
+# Next.js build output
+.next
+out
+
+# Nuxt.js build / generate output
+.nuxt
+dist
+
+# Gatsby files
+.cache/
+# Comment in the public line in if your project uses Gatsby and not Next.js
+# https://nextjs.org/blog/next-9-1#public-directory-support
+# public
+
+# vuepress build output
+.vuepress/dist
+
+# vuepress v2.x temp and cache directory
+.temp
+.cache
+
+# Docusaurus cache and generated files
+.docusaurus
+
+# Serverless directories
+.serverless/
+
+# FuseBox cache
+.fusebox/
+
+# DynamoDB Local files
+.dynamodb/
+
+# TernJS port file
+.tern-port
+
+# Stores VSCode versions used for testing VSCode extensions
+.vscode-test
+
+# yarn v2
+.yarn/cache
+.yarn/unplugged
+.yarn/build-state.yml
+.yarn/install-state.gz
+.pnp.*
\ No newline at end of file
diff --git a/prompt-chatgpt/.prettierrc.json b/prompt-chatgpt/.prettierrc.json
index fa51da29..0a725205 100644
--- a/prompt-chatgpt/.prettierrc.json
+++ b/prompt-chatgpt/.prettierrc.json
@@ -1,6 +1,6 @@
{
"trailingComma": "es5",
"tabWidth": 2,
- "semi": false,
+ "semi": true,
"singleQuote": true
}
diff --git a/prompt-chatgpt/package.json b/prompt-chatgpt/package.json
index 4ec02ca1..b2ae445f 100644
--- a/prompt-chatgpt/package.json
+++ b/prompt-chatgpt/package.json
@@ -8,8 +8,6 @@
"format": "prettier --write src/**/*.js"
},
"keywords": [],
- "author": "",
- "license": "ISC",
"dependencies": {
"openai": "^3.3.0"
},
diff --git a/prompt-chatgpt/src/environment.js b/prompt-chatgpt/src/environment.js
index ba31fc75..e9fc0dc6 100644
--- a/prompt-chatgpt/src/environment.js
+++ b/prompt-chatgpt/src/environment.js
@@ -3,11 +3,11 @@
* @return {string}
*/
function getRequiredEnv(key) {
- const value = process.env[key]
+ const value = process.env[key];
if (value === undefined) {
- throw new Error(`Environment variable ${key} is not set`)
+ throw new Error(`Environment variable ${key} is not set`);
}
- return value
+ return value;
}
/**
@@ -15,15 +15,15 @@ function getRequiredEnv(key) {
* @return {number | undefined}
*/
function getNumberEnv(key) {
- const value = process.env[key]
+ const value = process.env[key];
if (value === undefined) {
- return undefined
+ return undefined;
}
try {
- return parseInt(value)
+ return parseInt(value);
} catch (e) {
- throw new Error(`Environment variable ${key} is not a number`)
+ throw new Error(`Environment variable ${key} is not a number`);
}
}
@@ -31,7 +31,7 @@ function EnvironmentService() {
return {
OPENAI_API_KEY: getRequiredEnv('OPENAI_API_KEY'),
OPENAI_MAX_TOKENS: getNumberEnv('OPENAI_MAX_TOKENS') ?? 64,
- }
+ };
}
-export default EnvironmentService
+export default EnvironmentService;
diff --git a/prompt-chatgpt/src/main.js b/prompt-chatgpt/src/main.js
index f531f5c5..de512b39 100644
--- a/prompt-chatgpt/src/main.js
+++ b/prompt-chatgpt/src/main.js
@@ -1,35 +1,37 @@
-import fs from 'fs'
-import path from 'path'
-import { fileURLToPath } from 'url'
-import { OpenAIApi, Configuration } from 'openai'
-import EnvironmentService from './environment.js'
+import fs from 'fs';
+import path from 'path';
+import { fileURLToPath } from 'url';
+import { OpenAIApi, Configuration } from 'openai';
+import EnvironmentService from './environment.js';
-const __filename = fileURLToPath(import.meta.url)
-const __dirname = path.dirname(__filename)
-const staticFolder = path.join(__dirname, '../static')
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+const staticFolder = path.join(__dirname, '../static');
export default async ({ req, res }) => {
- const { OPENAI_API_KEY, OPENAI_MAX_TOKENS } = EnvironmentService()
+ const { OPENAI_API_KEY, OPENAI_MAX_TOKENS } = EnvironmentService();
const configuration = new Configuration({
apiKey: OPENAI_API_KEY,
- })
- const openai = new OpenAIApi(configuration)
+ });
+ const openai = new OpenAIApi(configuration);
if (req.method === 'GET') {
- let html = fs.readFileSync(path.join(staticFolder, 'index.html')).toString()
- return res.send(html, 200, { 'Content-Type': 'text/html; charset=utf-8' })
+ let html = fs
+ .readFileSync(path.join(staticFolder, 'index.html'))
+ .toString();
+ return res.send(html, 200, { 'Content-Type': 'text/html; charset=utf-8' });
}
if (!req.bodyString) {
- return res.send('Missing body with a prompt.', 400)
+ return res.send('Missing body with a prompt.', 400);
}
const chatCompletion = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
max_tokens: OPENAI_MAX_TOKENS,
messages: [{ role: 'user', content: req.bodyString }],
- })
+ });
- return res.send(chatCompletion.data.choices[0].message, 200)
-}
+ return res.send(chatCompletion.data.choices[0].message, 200);
+};
diff --git a/prompt-chatgpt/static/index.html b/prompt-chatgpt/static/index.html
index 3e1017f9..40198a87 100644
--- a/prompt-chatgpt/static/index.html
+++ b/prompt-chatgpt/static/index.html
@@ -1,4 +1,4 @@
-
+
@@ -8,11 +8,11 @@
From c1006cf3a0c6b185501ca199586d0ce1b7607bc8 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 28 Jul 2023 11:50:27 +0100
Subject: [PATCH 10/14] chore: throwIfMissing pattern
---
node/prompt-chatgpt/env.d.ts | 10 ++++++++++
node/prompt-chatgpt/src/main.js | 17 +++++++++--------
node/prompt-chatgpt/src/utils.js | 12 ++++++++++++
3 files changed, 31 insertions(+), 8 deletions(-)
create mode 100644 node/prompt-chatgpt/env.d.ts
create mode 100644 node/prompt-chatgpt/src/utils.js
diff --git a/node/prompt-chatgpt/env.d.ts b/node/prompt-chatgpt/env.d.ts
new file mode 100644
index 00000000..2a46aaa2
--- /dev/null
+++ b/node/prompt-chatgpt/env.d.ts
@@ -0,0 +1,10 @@
+declare global {
+ namespace NodeJS {
+ interface ProcessEnv {
+ OPENAI_API_KEY?: string;
+ OPENAI_MAX_TOKENS?: string;
+ }
+ }
+}
+
+export {};
diff --git a/node/prompt-chatgpt/src/main.js b/node/prompt-chatgpt/src/main.js
index 19eb0ae3..2b989224 100644
--- a/node/prompt-chatgpt/src/main.js
+++ b/node/prompt-chatgpt/src/main.js
@@ -2,15 +2,14 @@ import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
import { OpenAIApi, Configuration } from 'openai';
+import { throwIfMissing } from './utils';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const staticFolder = path.join(__dirname, '../static');
-export default async ({ req, res }) => {
- if (!process.env.OPENAI_API_KEY || !process.env.OPENAI_MAX_TOKENS) {
- throw new Error('Missing environment variables.');
- }
+export default async ({ req, res, error }) => {
+ throwIfMissing(process.env, ['OPENAI_API_KEY', 'OPENAI_MAX_TOKENS']);
const configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY,
@@ -24,17 +23,19 @@ export default async ({ req, res }) => {
return res.send(html, 200, { 'Content-Type': 'text/html; charset=utf-8' });
}
- if (!req.body.prompt) {
- return res.json({ ok: false, error: 'Missing body with a prompt.' }, 400);
+ try {
+ throwIfMissing(req.body, ['prompt']);
+ } catch (err) {
+ return res.json({ ok: false, error: err.message }, 400);
}
const response = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
- max_tokens: parseInt(process.env.OPENAI_MAX_TOKENS) ?? 512,
+ max_tokens: parseInt(process.env.OPENAI_MAX_TOKENS ?? '512'),
messages: [{ role: 'user', content: req.body.prompt }],
});
- const completion = response.data.choices[0].message;
+ const completion = response.data.choices[0].message;
if (!completion) {
return res.json({ ok: false, error: 'Failed to query model.' }, 500);
}
diff --git a/node/prompt-chatgpt/src/utils.js b/node/prompt-chatgpt/src/utils.js
new file mode 100644
index 00000000..9e22457d
--- /dev/null
+++ b/node/prompt-chatgpt/src/utils.js
@@ -0,0 +1,12 @@
+/**
+ *
+ * @param {*} obj
+ * @param {string[]} keys
+ */
+export function throwIfMissing(obj, keys) {
+ for (let key of keys) {
+ if (!(key in obj) || !obj[key]) {
+ throw new Error(`Missing required value: ${key}`);
+ }
+ }
+}
From d594634f3d65b0a04609b26c20bae95d6422f6a8 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 28 Jul 2023 11:52:15 +0100
Subject: [PATCH 11/14] chore: better
---
node/prompt-chatgpt/src/utils.js | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/node/prompt-chatgpt/src/utils.js b/node/prompt-chatgpt/src/utils.js
index 9e22457d..80adbd3b 100644
--- a/node/prompt-chatgpt/src/utils.js
+++ b/node/prompt-chatgpt/src/utils.js
@@ -4,9 +4,13 @@
* @param {string[]} keys
*/
export function throwIfMissing(obj, keys) {
+ const missing = [];
for (let key of keys) {
if (!(key in obj) || !obj[key]) {
- throw new Error(`Missing required value: ${key}`);
+ missing.push(key);
}
}
+ if (missing.length > 0) {
+ throw new Error(`Missing required fields: ${missing.join(', ')}`);
+ }
}
From 076627b3f475001a0ca4730c22d25c0c58728396 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 28 Jul 2023 12:00:31 +0100
Subject: [PATCH 12/14] chore: utils
---
node/prompt-chatgpt/src/main.js | 16 ++++------------
node/prompt-chatgpt/src/utils.js | 20 +++++++++++++++++++-
2 files changed, 23 insertions(+), 13 deletions(-)
diff --git a/node/prompt-chatgpt/src/main.js b/node/prompt-chatgpt/src/main.js
index 2b989224..0ac3a1af 100644
--- a/node/prompt-chatgpt/src/main.js
+++ b/node/prompt-chatgpt/src/main.js
@@ -1,12 +1,5 @@
-import fs from 'fs';
-import path from 'path';
-import { fileURLToPath } from 'url';
import { OpenAIApi, Configuration } from 'openai';
-import { throwIfMissing } from './utils';
-
-const __filename = fileURLToPath(import.meta.url);
-const __dirname = path.dirname(__filename);
-const staticFolder = path.join(__dirname, '../static');
+import { getStaticFile, throwIfMissing } from './utils';
export default async ({ req, res, error }) => {
throwIfMissing(process.env, ['OPENAI_API_KEY', 'OPENAI_MAX_TOKENS']);
@@ -17,10 +10,9 @@ export default async ({ req, res, error }) => {
const openai = new OpenAIApi(configuration);
if (req.method === 'GET') {
- let html = fs
- .readFileSync(path.join(staticFolder, 'index.html'))
- .toString();
- return res.send(html, 200, { 'Content-Type': 'text/html; charset=utf-8' });
+ return res.send(getStaticFile('index.html'), 200, {
+ 'Content-Type': 'text/html; charset=utf-8',
+ });
}
try {
diff --git a/node/prompt-chatgpt/src/utils.js b/node/prompt-chatgpt/src/utils.js
index 80adbd3b..5b93ba58 100644
--- a/node/prompt-chatgpt/src/utils.js
+++ b/node/prompt-chatgpt/src/utils.js
@@ -1,7 +1,12 @@
+import path from 'path';
+import { fileURLToPath } from 'url';
+import fs from 'fs';
+
/**
- *
+ * Throws an error if any of the keys are missing from the object
* @param {*} obj
* @param {string[]} keys
+ * @throws {Error}
*/
export function throwIfMissing(obj, keys) {
const missing = [];
@@ -14,3 +19,16 @@ export function throwIfMissing(obj, keys) {
throw new Error(`Missing required fields: ${missing.join(', ')}`);
}
}
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+const staticFolder = path.join(__dirname, '../static');
+
+/**
+ * Returns the contents of a file in the static folder
+ * @param {string} fileName
+ * @returns {string} Contents of static/{fileName}
+ */
+export function getStaticFile(fileName) {
+ return fs.readFileSync(path.join(staticFolder, fileName)).toString();
+}
From 31987568f8b6242ae560c4b67a6c3abd77828e41 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 28 Jul 2023 14:35:25 +0100
Subject: [PATCH 13/14] fix: esm import
---
node/prompt-chatgpt/src/main.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/node/prompt-chatgpt/src/main.js b/node/prompt-chatgpt/src/main.js
index 0ac3a1af..84ba522e 100644
--- a/node/prompt-chatgpt/src/main.js
+++ b/node/prompt-chatgpt/src/main.js
@@ -1,5 +1,5 @@
import { OpenAIApi, Configuration } from 'openai';
-import { getStaticFile, throwIfMissing } from './utils';
+import { getStaticFile, throwIfMissing } from './utils.js';
export default async ({ req, res, error }) => {
throwIfMissing(process.env, ['OPENAI_API_KEY', 'OPENAI_MAX_TOKENS']);
From b6ba6650b47d85afefd65a8b3e6f398f67f3b5a6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Matej=20Ba=C4=8Do?=
Date: Sat, 29 Jul 2023 15:48:44 +0000
Subject: [PATCH 14/14] PR review changes
---
node/prompt-chatgpt/README.md | 10 ++++------
node/prompt-chatgpt/src/main.js | 11 +++++------
node/prompt-chatgpt/src/utils.js | 26 +++++++++++++-------------
node/prompt-chatgpt/static/index.html | 4 ++--
4 files changed, 24 insertions(+), 27 deletions(-)
diff --git a/node/prompt-chatgpt/README.md b/node/prompt-chatgpt/README.md
index 95aedca1..2a782efc 100644
--- a/node/prompt-chatgpt/README.md
+++ b/node/prompt-chatgpt/README.md
@@ -1,6 +1,6 @@
-# 🤖 Node OpenAI ChatGPT Function
+# 🤖 Node Prompt ChatGPT Function
-Query the OpenAI GPT-3.5-turbo model for chat completions.
+Ask question, and let OpenAI GPT-3.5-turbo answer.
## 🧰 Usage
@@ -26,9 +26,7 @@ Response from the model.
```json
{
"ok": true,
- "completion": "Monday's heavy weight,
-Dawning with a sigh of grey,
-Hopeful hearts await."
+ "completion": "Monday's heavy weight, Dawning with a sigh of grey, Hopeful hearts await."
}
```
@@ -73,7 +71,7 @@ A unique key used to authenticate with the OpenAI API. Please note that this is
| Question | Answer |
| ------------- | --------------------------------------------------------------------------- |
| Required | Yes |
-| Sample Value | `d1efb...aec35` |
+| Sample Value | `sk-wzG...vcy` |
| Documentation | [OpenAI Docs](https://platform.openai.com/docs/quickstart/add-your-api-key) |
### OPENAI_MAX_TOKENS
diff --git a/node/prompt-chatgpt/src/main.js b/node/prompt-chatgpt/src/main.js
index 84ba522e..6c9ef7e3 100644
--- a/node/prompt-chatgpt/src/main.js
+++ b/node/prompt-chatgpt/src/main.js
@@ -4,11 +4,6 @@ import { getStaticFile, throwIfMissing } from './utils.js';
export default async ({ req, res, error }) => {
throwIfMissing(process.env, ['OPENAI_API_KEY', 'OPENAI_MAX_TOKENS']);
- const configuration = new Configuration({
- apiKey: process.env.OPENAI_API_KEY,
- });
- const openai = new OpenAIApi(configuration);
-
if (req.method === 'GET') {
return res.send(getStaticFile('index.html'), 200, {
'Content-Type': 'text/html; charset=utf-8',
@@ -21,13 +16,17 @@ export default async ({ req, res, error }) => {
return res.json({ ok: false, error: err.message }, 400);
}
+ const openai = new OpenAIApi(new Configuration({
+ apiKey: process.env.OPENAI_API_KEY,
+ }));
+
const response = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
max_tokens: parseInt(process.env.OPENAI_MAX_TOKENS ?? '512'),
messages: [{ role: 'user', content: req.body.prompt }],
});
- const completion = response.data.choices[0].message;
+ const completion = response.data?.choices[0]?.message ?? '';
if (!completion) {
return res.json({ ok: false, error: 'Failed to query model.' }, 500);
}
diff --git a/node/prompt-chatgpt/src/utils.js b/node/prompt-chatgpt/src/utils.js
index 5b93ba58..02a201ab 100644
--- a/node/prompt-chatgpt/src/utils.js
+++ b/node/prompt-chatgpt/src/utils.js
@@ -2,6 +2,19 @@ import path from 'path';
import { fileURLToPath } from 'url';
import fs from 'fs';
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+const staticFolder = path.join(__dirname, '../static');
+
+/**
+ * Returns the contents of a file in the static folder
+ * @param {string} fileName
+ * @returns {string} Contents of static/{fileName}
+ */
+export function getStaticFile(fileName) {
+ return fs.readFileSync(path.join(staticFolder, fileName)).toString();
+}
+
/**
* Throws an error if any of the keys are missing from the object
* @param {*} obj
@@ -19,16 +32,3 @@ export function throwIfMissing(obj, keys) {
throw new Error(`Missing required fields: ${missing.join(', ')}`);
}
}
-
-const __filename = fileURLToPath(import.meta.url);
-const __dirname = path.dirname(__filename);
-const staticFolder = path.join(__dirname, '../static');
-
-/**
- * Returns the contents of a file in the static folder
- * @param {string} fileName
- * @returns {string} Contents of static/{fileName}
- */
-export function getStaticFile(fileName) {
- return fs.readFileSync(path.join(staticFolder, fileName)).toString();
-}
diff --git a/node/prompt-chatgpt/static/index.html b/node/prompt-chatgpt/static/index.html
index b8239d1a..ddbe6912 100644
--- a/node/prompt-chatgpt/static/index.html
+++ b/node/prompt-chatgpt/static/index.html
@@ -46,7 +46,7 @@
Prompt ChatGPT Demo
style="max-width: 50rem"
>
This is demo application. You can ue this app to ensure
- implementation with Chat GPT works properly. Use input below to
+ implementation with ChatGPT works properly. Use input below to
enter prompts and get a response.
@@ -76,7 +76,7 @@