From 94ddd012b49a9dd9452a601c5c968d2be1cdc5f9 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Mon, 26 Jun 2023 18:23:50 +0100 Subject: [PATCH 01/14] Add initial seeding support using users https://eaflood.atlassian.net/browse/WATER-3981 The [water-abstraction-service](https://gitub.com/DEFRA/water-abstraction-service) has a mechanism for seeding data for testing that relies on reading in YAML fixture files, sort of. Some content comes from these but other stuff is hard coded into the readers. Also, because you often need to add the ID of a record into something else your seeding it supports a system of 'named references' so the fixture-loader knows how to link things. Again though, it depends if the specific fixture-loader has been written to support this. What we've been left with is an extremely complex, brittle seeding mechanism that is also very slow. If only there was something simpler that existed to do this!? There is, and we're already using it. [Knex](https://knexjs.org/guide/migrations.html#seed-files) supports seeding using the same syntax we already use elsewhere in the app. It's something we've used previously on the [charging-module-api](https://github.com/DEFRA/sroc-charging-module-api). We think this is a better way to go and means we can start to move away from the hand-cranked version in **water-abstraction-service**. This change gets the ball rolling by creating a seed for users, and hooking it into the project so it can be triggered as a package.json script or from an endpoint (needed to support [water-abstraction-acceptance-tests](https://github.com/DEFRA/water-abstraction-acceptance-tests)). From e4cbff58cb85a4cc85a765c4d8d7106712cc9691 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 12:47:29 +0100 Subject: [PATCH 02/14] Add package.json run script This allows you to kick off the seed process from the command line. --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index 21a5a53968..fee1ab422f 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "migrate:db:test": "NODE_ENV=test knex migrate:latest", "rollback:db": "knex migrate:rollback --all", "rollback:db:test": "NODE_ENV=test knex migrate:rollback --all", + "seed:db": "knex seed:run --knexfile knexfile.application.js", "lint": "standard", "test": "lab --silent-skips --shuffle", "postinstall": "npm run build", From 290ac7ee9c5c9007074d88e89858cd5ee48eba10 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 12:48:48 +0100 Subject: [PATCH 03/14] Add default dev/test user password We know we want to seed the dev/test users as our first 'test' of seeding. We want to take the opportunity to move away from hard coding the password we use in the source. So, in preparation for this we add a new env var, something we do on all our previous services. --- .env.example | 1 + config/database.config.js | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.env.example b/.env.example index eaeb3ebf4d..1f53595d93 100644 --- a/.env.example +++ b/.env.example @@ -10,6 +10,7 @@ POSTGRES_HOST=db POSTGRES_PORT=5432 POSTGRES_DB=wabs POSTGRES_DB_TEST=wabs_system_test +DEFAULT_USER_PASSWORD=P@55word # Server config PORT=3000 diff --git a/config/database.config.js b/config/database.config.js index b591e32d23..7329f3a536 100644 --- a/config/database.config.js +++ b/config/database.config.js @@ -15,7 +15,9 @@ const config = { password: process.env.POSTGRES_PASSWORD, port: process.env.POSTGRES_PORT, database: process.env.POSTGRES_DB, - testDatabase: process.env.POSTGRES_DB_TEST + testDatabase: process.env.POSTGRES_DB_TEST, + // Only used when seeding our dev/test user records + defaultUserPassword: process.env.DEFAULT_USER_PASSWORD } module.exports = config From 2f235555fd23acdba43ddf4426a6989c7c557cfb Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 12:50:50 +0100 Subject: [PATCH 04/14] Add first seed file for users First thing to get your head around is that unlike migrations Knex always runs _all_ seed files. So, it's important to write them in the knowledge the data they are seeding might already exist. For users, our initial seed example, we intend to only ever seed them once. So, we have implemented a method that first checks if a user already exists. If it doesn't we create the new user. This then applies to the user groups, which determines the permissions a user has in the service. To be able to log in with these users the password we generate must be recognised by the legacy code. So, we're forced to use the same mechanism currently used in [water-abstraction-tactical-idm](https://github.com/DEFRA/water-abstraction-tactical-idm) even if the package now appears to be no longer maintained. --- db/seeds/01-users.js | 136 +++++++++++++++++++++++++++++++++++++++++++ package-lock.json | 11 ++++ package.json | 1 + 3 files changed, 148 insertions(+) create mode 100644 db/seeds/01-users.js diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js new file mode 100644 index 0000000000..f73713c883 --- /dev/null +++ b/db/seeds/01-users.js @@ -0,0 +1,136 @@ +'use strict' + +const bcrypt = require('bcryptjs') +const { randomUUID } = require('crypto') + +const DatabaseConfig = require('../../config/database.config.js') + +const seedUsers = [ + { + userName: 'admin-internal@wrls.gov.uk', + application: 'water_admin', + group: 'super' + }, + { + userName: 'super.user@wrls.gov.uk', + application: 'water_admin', + group: 'super' + }, + { + userName: 'environment.officer@wrls.gov.uk', + application: 'water_admin', + group: 'environment_officer' + }, + { + userName: 'waster.industry.regulatory.services@wrls.gov.uk', + application: 'water_admin', + group: 'wirs' + }, + { + userName: 'billing.data@wrls.gov.uk', + application: 'water_admin', + group: 'billing_and_data' + }, + { + userName: 'permitting.support.centre@wrls.gov.uk', + application: 'water_admin', + group: 'psc' + }, + { + userName: 'external@example.co.uk', + application: 'water_vml' + }, + { + userName: 'jon.lee@example.co.uk', + application: 'water_vml' + }, + { + userName: 'rachel.stevens@example.co.uk', + application: 'water_vml' + } +] + +async function seed (knex) { + await _insertUsersWhereNotExists(knex) + + const users = await _users(knex) + const groups = await _groups(knex) + + seedUsers.forEach((seedUser) => { + const user = users.find(({ userName }) => { + return userName === seedUser.userName + }) + seedUser.userId = user.userId + + if (seedUser.group) { + const userGroup = groups.find(({ group }) => group === seedUser.group) + seedUser.groupId = userGroup.groupId + } + }) + + await _insertUserGroupsWhereNotExists(knex) +} + +async function _insertUsersWhereNotExists (knex) { + const password = _generateHashedPassword() + + for (const seedUser of seedUsers) { + await knex('idm.users') + .select('userId') + .where('userName', seedUser.userName) + .andWhere('application', seedUser.application) + .then(async (results) => { + if (results.length === 0) { + await knex('idm.users') + .insert({ + userName: seedUser.userName, + application: seedUser.application, + password, + userData: '{ "source": "Seeded" }', + resetRequired: 0, + badLogins: 0 + }) + } + }) + } +} + +async function _insertUserGroupsWhereNotExists (knex) { + const seedUsersWithGroups = seedUsers.filter((seedData) => seedData.group) + + for (const seedUser of seedUsersWithGroups) { + await knex('idm.userGroups') + .select('userGroupId') + .where('userId', seedUser.userId) + .andWhere('groupId', seedUser.groupId) + .then(async (results) => { + if (results.length === 0) { + await knex('idm.userGroups') + .insert({ + userGroupId: randomUUID({ disableEntropyCache: true }), + userId: seedUser.userId, + groupId: seedUser.groupId + }) + } + }) + } +} + +function _generateHashedPassword () { + const salt = bcrypt.genSaltSync(10) + + return bcrypt.hashSync(DatabaseConfig.defaultUserPassword, salt) +} + +async function _groups (knex) { + return knex('idm.groups') + .select('groupId', 'group') +} + +async function _users (knex) { + return knex('idm.users') + .select('userId', 'userName') + .whereJsonPath('userData', '$.source', '=', 'Seeded') +} + +module.exports.seed = seed diff --git a/package-lock.json b/package-lock.json index 04d7e6bf80..711950da32 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,6 +16,7 @@ "@hapi/hapi": "^21.1.0", "@hapi/inert": "^7.0.0", "@hapi/vision": "^7.0.0", + "bcryptjs": "^2.4.3", "blipp": "^4.0.2", "dotenv": "^16.0.3", "got": "^12.5.3", @@ -3019,6 +3020,11 @@ } ] }, + "node_modules/bcryptjs": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz", + "integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==" + }, "node_modules/binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", @@ -10291,6 +10297,11 @@ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, + "bcryptjs": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz", + "integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==" + }, "binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", diff --git a/package.json b/package.json index fee1ab422f..d9a2d787d0 100644 --- a/package.json +++ b/package.json @@ -31,6 +31,7 @@ "@hapi/hapi": "^21.1.0", "@hapi/inert": "^7.0.0", "@hapi/vision": "^7.0.0", + "bcryptjs": "^2.4.3", "blipp": "^4.0.2", "dotenv": "^16.0.3", "got": "^12.5.3", From 79ba6c6fdc6c58bc065acbbc2ea1b05c61eb55d0 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 12:55:38 +0100 Subject: [PATCH 05/14] Add endpoint to trigger seed process Normally you would run the Knex seed process from the command line as part of your deployment. But we intend to use this mechanism within our acceptance tests. This means we need a method to programmatically trigger the process. This adds a new endpoint, which with some help from Stackoverflow allows us to do that! --- app/controllers/data/data.controller.js | 32 +++++++++++++++++-------- app/routes/data.routes.js | 17 +++++++++---- app/services/data/seed/seed.service.js | 28 ++++++++++++++++++++++ 3 files changed, 63 insertions(+), 14 deletions(-) create mode 100644 app/services/data/seed/seed.service.js diff --git a/app/controllers/data/data.controller.js b/app/controllers/data/data.controller.js index 1a96a29414..e48319fb7b 100644 --- a/app/controllers/data/data.controller.js +++ b/app/controllers/data/data.controller.js @@ -8,11 +8,21 @@ const Boom = require('@hapi/boom') const DbExportService = require('../../services/db-export/db-export.service.js') +const SeedService = require('../../services/data/seed/seed.service.js') const TearDownService = require('../../services/data/tear-down/tear-down.service.js') -async function tearDown (_request, h) { +/** + * Triggers export of all relevant tables to CSV and then uploads them to S3 + */ +async function dbExport (_request, h) { + DbExportService.go() + + return h.response().code(204) +} + +async function seed (_request, h) { try { - await TearDownService.go() + await SeedService.go() return h.response().code(204) } catch (error) { @@ -20,16 +30,18 @@ async function tearDown (_request, h) { } } -/** - * Triggers export of all relevant tables to CSV and then uploads them to S3 - */ -async function dbExport (_request, h) { - DbExportService.go() +async function tearDown (_request, h) { + try { + await TearDownService.go() - return h.response().code(204) + return h.response().code(204) + } catch (error) { + return Boom.badImplementation(error.message) + } } module.exports = { - tearDown, - dbExport + dbExport, + seed, + tearDown } diff --git a/app/routes/data.routes.js b/app/routes/data.routes.js index 5ee76a4a22..ff81f8cf63 100644 --- a/app/routes/data.routes.js +++ b/app/routes/data.routes.js @@ -3,6 +3,15 @@ const DataController = require('../controllers/data/data.controller.js') const routes = [ + { + method: 'GET', + path: '/data/db-export', + handler: DataController.dbExport, + options: { + description: 'Used to export the database and upload the file to our AWS S3 bucket', + app: { excludeFromProd: true } + } + }, { method: 'POST', path: '/data/tear-down', @@ -13,11 +22,11 @@ const routes = [ } }, { - method: 'GET', - path: '/data/db-export', - handler: DataController.dbExport, + method: 'POST', + path: '/data/seed', + handler: DataController.seed, options: { - description: 'Used to export the database and upload the file to our AWS S3 bucket', + description: 'Used to seed test data in the database', app: { excludeFromProd: true } } } diff --git a/app/services/data/seed/seed.service.js b/app/services/data/seed/seed.service.js new file mode 100644 index 0000000000..90263e07f8 --- /dev/null +++ b/app/services/data/seed/seed.service.js @@ -0,0 +1,28 @@ +'use strict' + +/** + * Runs the Knex seed process programmatically + * @module SeedService + */ + +const { db } = require('../../../../db/db.js') + +/** + * Triggers the Knex seed process programmatically + * + * This is the same as calling `knex seed:run` on the command line. Only we pull in `db.js` because that is our file + * which setups up Knex with the right config and all our 'tweaks'. + * + * In this context you can read `db.seed.run()` as `knex.seed.run()`. + * + * See {@link https://knexjs.org/guide/migrations.html#seed-files | Seed files} for more details. + * + * Credit to {@link https://stackoverflow.com/a/53169879 | Programmatically run knex seed:run} + */ +async function go () { + await db.seed.run() +} + +module.exports = { + go +} From adf87e4366532618a41b0cda019e00dc0bea0044 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 12:57:53 +0100 Subject: [PATCH 06/14] Add some tests We admit they are pretty 'noddy'. But all the work is done by Knex and you don't typically write unit tests for the seed files themselves. --- test/controllers/data/data.controller.test.js | 56 +++++++++++++++---- test/services/data/seed/seed.service.test.js | 37 ++++++++++++ 2 files changed, 82 insertions(+), 11 deletions(-) create mode 100644 test/services/data/seed/seed.service.test.js diff --git a/test/controllers/data/data.controller.test.js b/test/controllers/data/data.controller.test.js index 7058e5d00e..e0dbf06dd3 100644 --- a/test/controllers/data/data.controller.test.js +++ b/test/controllers/data/data.controller.test.js @@ -10,6 +10,7 @@ const { expect } = Code // Things we need to stub const DbExportService = require('../../../app/services/db-export/db-export.service') +const SeedService = require('../../../app/services/data/seed/seed.service.js') const TearDownService = require('../../../app/services/data/tear-down/tear-down.service.js') // For running our service @@ -34,18 +35,37 @@ describe('Data controller', () => { Sinon.restore() }) - describe('POST /data/tear-down', () => { + describe('GET /data/db-export', () => { + const options = { + method: 'GET', + url: '/data/db-export' + } + + describe('when the request succeeds', () => { + beforeEach(async () => { + Sinon.stub(DbExportService, 'go').resolves() + }) + + it('displays the correct message', async () => { + const response = await server.inject(options) + + expect(response.statusCode).to.equal(204) + }) + }) + }) + + describe('POST /data/seed', () => { const options = { method: 'POST', - url: '/data/tear-down' + url: '/data/seed' } describe('when the request succeeds', () => { beforeEach(async () => { - Sinon.stub(TearDownService, 'go').resolves() + Sinon.stub(SeedService, 'go').resolves() }) - it('returns a 204 status', async () => { + it('displays the correct message', async () => { const response = await server.inject(options) expect(response.statusCode).to.equal(204) @@ -53,9 +73,9 @@ describe('Data controller', () => { }) describe('when the request fails', () => { - describe('because the TearDownService errors', () => { + describe('because the SeedService errors', () => { beforeEach(async () => { - Sinon.stub(TearDownService, 'go').rejects() + Sinon.stub(SeedService, 'go').rejects() }) it('returns a 500 status', async () => { @@ -67,22 +87,36 @@ describe('Data controller', () => { }) }) - describe('GET /data/db-export', () => { + describe('POST /data/tear-down', () => { const options = { - method: 'GET', - url: '/data/db-export' + method: 'POST', + url: '/data/tear-down' } describe('when the request succeeds', () => { beforeEach(async () => { - Sinon.stub(DbExportService, 'go').resolves() + Sinon.stub(TearDownService, 'go').resolves() }) - it('displays the correct message', async () => { + it('returns a 204 status', async () => { const response = await server.inject(options) expect(response.statusCode).to.equal(204) }) }) + + describe('when the request fails', () => { + describe('because the TearDownService errors', () => { + beforeEach(async () => { + Sinon.stub(TearDownService, 'go').rejects() + }) + + it('returns a 500 status', async () => { + const response = await server.inject(options) + + expect(response.statusCode).to.equal(500) + }) + }) + }) }) }) diff --git a/test/services/data/seed/seed.service.test.js b/test/services/data/seed/seed.service.test.js new file mode 100644 index 0000000000..08ebba2022 --- /dev/null +++ b/test/services/data/seed/seed.service.test.js @@ -0,0 +1,37 @@ +'use strict' + +// Test framework dependencies +const Lab = require('@hapi/lab') +const Code = require('@hapi/code') +const Sinon = require('sinon') + +const { describe, it, beforeEach, afterEach } = exports.lab = Lab.script() +const { expect } = Code + +// Things we need to stub +const { db } = require('../../../../db/db.js') + +// Thing under test +const SeedService = require('../../../../app/services/data/seed/seed.service.js') + +describe('Seed service', () => { + let knexRunStub + + beforeEach(async () => { + knexRunStub = Sinon.stub().resolves() + + Sinon.replaceGetter(db, 'seed', () => { + return { run: knexRunStub } + }) + }) + + afterEach(() => { + Sinon.restore() + }) + + it('uses the knex instance we configure to run the seed process', async () => { + await SeedService.go() + + expect(knexRunStub.called).to.be.true() + }) +}) From ce3fa1cfcac1293b5cc52f451e401c9adc084dad Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 13:07:15 +0100 Subject: [PATCH 07/14] Get the alphabet correct! --- app/routes/data.routes.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/app/routes/data.routes.js b/app/routes/data.routes.js index ff81f8cf63..f09d747466 100644 --- a/app/routes/data.routes.js +++ b/app/routes/data.routes.js @@ -14,19 +14,19 @@ const routes = [ }, { method: 'POST', - path: '/data/tear-down', - handler: DataController.tearDown, + path: '/data/seed', + handler: DataController.seed, options: { - description: 'Used to remove the acceptance test data from the database', + description: 'Used to seed test data in the database', app: { excludeFromProd: true } } }, { method: 'POST', - path: '/data/seed', - handler: DataController.seed, + path: '/data/tear-down', + handler: DataController.tearDown, options: { - description: 'Used to seed test data in the database', + description: 'Used to remove the acceptance test data from the database', app: { excludeFromProd: true } } } From 987e539d86f484dc1e10cf0ffb5576c510d1fe6f Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 16:28:45 +0100 Subject: [PATCH 08/14] Refactor insert where not exists pattern @StuAA78 had a great suggestion in the comments to make the pattern of checking if something exists before inserting easier to follow. --- db/seeds/01-users.js | 50 +++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js index f73713c883..941a1d9a44 100644 --- a/db/seeds/01-users.js +++ b/db/seeds/01-users.js @@ -75,23 +75,22 @@ async function _insertUsersWhereNotExists (knex) { const password = _generateHashedPassword() for (const seedUser of seedUsers) { - await knex('idm.users') - .select('userId') + const existingUser = await knex('idm.users') + .first('userId') .where('userName', seedUser.userName) .andWhere('application', seedUser.application) - .then(async (results) => { - if (results.length === 0) { - await knex('idm.users') - .insert({ - userName: seedUser.userName, - application: seedUser.application, - password, - userData: '{ "source": "Seeded" }', - resetRequired: 0, - badLogins: 0 - }) - } - }) + + if (!existingUser) { + await knex('idm.users') + .insert({ + userName: seedUser.userName, + application: seedUser.application, + password, + userData: '{ "source": "Seeded" }', + resetRequired: 0, + badLogins: 0 + }) + } } } @@ -99,20 +98,19 @@ async function _insertUserGroupsWhereNotExists (knex) { const seedUsersWithGroups = seedUsers.filter((seedData) => seedData.group) for (const seedUser of seedUsersWithGroups) { - await knex('idm.userGroups') + const existingUserGroup = await knex('idm.userGroups') .select('userGroupId') .where('userId', seedUser.userId) .andWhere('groupId', seedUser.groupId) - .then(async (results) => { - if (results.length === 0) { - await knex('idm.userGroups') - .insert({ - userGroupId: randomUUID({ disableEntropyCache: true }), - userId: seedUser.userId, - groupId: seedUser.groupId - }) - } - }) + + if (!existingUserGroup) { + await knex('idm.userGroups') + .insert({ + userGroupId: randomUUID({ disableEntropyCache: true }), + userId: seedUser.userId, + groupId: seedUser.groupId + }) + } } } From 39c43140201e8d7cc1b8029a854b5bd08a9437da Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 16:32:16 +0100 Subject: [PATCH 09/14] Refactor updating our seed data with ID's Another great suggestion by @StuAA78 --- db/seeds/01-users.js | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js index 941a1d9a44..8bb72e931b 100644 --- a/db/seeds/01-users.js +++ b/db/seeds/01-users.js @@ -53,20 +53,7 @@ const seedUsers = [ async function seed (knex) { await _insertUsersWhereNotExists(knex) - const users = await _users(knex) - const groups = await _groups(knex) - - seedUsers.forEach((seedUser) => { - const user = users.find(({ userName }) => { - return userName === seedUser.userName - }) - seedUser.userId = user.userId - - if (seedUser.group) { - const userGroup = groups.find(({ group }) => group === seedUser.group) - seedUser.groupId = userGroup.groupId - } - }) + await _updateSeedUsersWithUserIdAndGroupId(knex) await _insertUserGroupsWhereNotExists(knex) } @@ -125,6 +112,23 @@ async function _groups (knex) { .select('groupId', 'group') } +async function _updateSeedUsersWithUserIdAndGroupId (knex) { + const users = await _users(knex) + const groups = await _groups(knex) + + seedUsers.forEach((seedUser) => { + const user = users.find(({ userName }) => { + return userName === seedUser.userName + }) + seedUser.userId = user.userId + + if (seedUser.group) { + const userGroup = groups.find(({ group }) => group === seedUser.group) + seedUser.groupId = userGroup.groupId + } + }) +} + async function _users (knex) { return knex('idm.users') .select('userId', 'userName') From 19ce6b04bbee3e3de2d818ab37a83863b51e5be1 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 16:34:14 +0100 Subject: [PATCH 10/14] Sort out the order of functions --- db/seeds/01-users.js | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js index 8bb72e931b..0595baf656 100644 --- a/db/seeds/01-users.js +++ b/db/seeds/01-users.js @@ -58,6 +58,17 @@ async function seed (knex) { await _insertUserGroupsWhereNotExists(knex) } +function _generateHashedPassword () { + const salt = bcrypt.genSaltSync(10) + + return bcrypt.hashSync(DatabaseConfig.defaultUserPassword, salt) +} + +async function _groups (knex) { + return knex('idm.groups') + .select('groupId', 'group') +} + async function _insertUsersWhereNotExists (knex) { const password = _generateHashedPassword() @@ -101,17 +112,6 @@ async function _insertUserGroupsWhereNotExists (knex) { } } -function _generateHashedPassword () { - const salt = bcrypt.genSaltSync(10) - - return bcrypt.hashSync(DatabaseConfig.defaultUserPassword, salt) -} - -async function _groups (knex) { - return knex('idm.groups') - .select('groupId', 'group') -} - async function _updateSeedUsersWithUserIdAndGroupId (knex) { const users = await _users(knex) const groups = await _groups(knex) From fc001f944b8d050ec71f11cad84298c31ccb8eba Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 16:15:56 +0100 Subject: [PATCH 11/14] Use our standard convention for module.exports I swear this didn't work when I tried it! Co-authored-by: Stuart Adair <43574728+StuAA78@users.noreply.github.com> --- db/seeds/01-users.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js index 0595baf656..46834ba958 100644 --- a/db/seeds/01-users.js +++ b/db/seeds/01-users.js @@ -135,4 +135,6 @@ async function _users (knex) { .whereJsonPath('userData', '$.source', '=', 'Seeded') } -module.exports.seed = seed +module.exports = { + seed +} From 5183c45acb23c46f6021e05d260fe1950434dd39 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Tue, 27 Jun 2023 16:16:22 +0100 Subject: [PATCH 12/14] Typo in comment Co-authored-by: Stuart Adair <43574728+StuAA78@users.noreply.github.com> --- app/services/data/seed/seed.service.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/services/data/seed/seed.service.js b/app/services/data/seed/seed.service.js index 90263e07f8..fcb377d525 100644 --- a/app/services/data/seed/seed.service.js +++ b/app/services/data/seed/seed.service.js @@ -11,7 +11,7 @@ const { db } = require('../../../../db/db.js') * Triggers the Knex seed process programmatically * * This is the same as calling `knex seed:run` on the command line. Only we pull in `db.js` because that is our file - * which setups up Knex with the right config and all our 'tweaks'. + * which sets up Knex with the right config and all our 'tweaks'. * * In this context you can read `db.seed.run()` as `knex.seed.run()`. * From a19577a41c38e9d2d0bb8419678b2a88bc863817 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Wed, 28 Jun 2023 17:26:51 +0100 Subject: [PATCH 13/14] Implement suggestion Co-authored-by: Stuart Adair <43574728+StuAA78@users.noreply.github.com> --- db/seeds/01-users.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js index 46834ba958..8e55f3923d 100644 --- a/db/seeds/01-users.js +++ b/db/seeds/01-users.js @@ -117,9 +117,7 @@ async function _updateSeedUsersWithUserIdAndGroupId (knex) { const groups = await _groups(knex) seedUsers.forEach((seedUser) => { - const user = users.find(({ userName }) => { - return userName === seedUser.userName - }) + const user = users.find(({ userName }) => userName === seedUser.userName) seedUser.userId = user.userId if (seedUser.group) { From a5bd591a2a35c7136ce90cc0b280fc2eca2d3b28 Mon Sep 17 00:00:00 2001 From: Alan Cruikshanks Date: Thu, 6 Jul 2023 11:37:22 +0100 Subject: [PATCH 14/14] Implement @Jozzey salt suggestion --- db/seeds/01-users.js | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js index 8e55f3923d..ed6462ac03 100644 --- a/db/seeds/01-users.js +++ b/db/seeds/01-users.js @@ -59,9 +59,12 @@ async function seed (knex) { } function _generateHashedPassword () { - const salt = bcrypt.genSaltSync(10) - - return bcrypt.hashSync(DatabaseConfig.defaultUserPassword, salt) + // 10 is the number of salt rounds to perform to generate the salt. The legacy code uses + // const salt = bcrypt.genSaltSync(10) to pre-generate the salt before passing it to hashSync(). But this is + // intended for operations where you need to hash a large number of values. If you just pass in a number bcrypt will + // autogenerate the salt for you. + // https://github.com/kelektiv/node.bcrypt.js#usage + return bcrypt.hashSync(DatabaseConfig.defaultUserPassword, 10) } async function _groups (knex) {