diff --git a/.env.example b/.env.example index eaeb3ebf4d..1f53595d93 100644 --- a/.env.example +++ b/.env.example @@ -10,6 +10,7 @@ POSTGRES_HOST=db POSTGRES_PORT=5432 POSTGRES_DB=wabs POSTGRES_DB_TEST=wabs_system_test +DEFAULT_USER_PASSWORD=P@55word # Server config PORT=3000 diff --git a/app/controllers/data/data.controller.js b/app/controllers/data/data.controller.js index 1a96a29414..e48319fb7b 100644 --- a/app/controllers/data/data.controller.js +++ b/app/controllers/data/data.controller.js @@ -8,11 +8,21 @@ const Boom = require('@hapi/boom') const DbExportService = require('../../services/db-export/db-export.service.js') +const SeedService = require('../../services/data/seed/seed.service.js') const TearDownService = require('../../services/data/tear-down/tear-down.service.js') -async function tearDown (_request, h) { +/** + * Triggers export of all relevant tables to CSV and then uploads them to S3 + */ +async function dbExport (_request, h) { + DbExportService.go() + + return h.response().code(204) +} + +async function seed (_request, h) { try { - await TearDownService.go() + await SeedService.go() return h.response().code(204) } catch (error) { @@ -20,16 +30,18 @@ async function tearDown (_request, h) { } } -/** - * Triggers export of all relevant tables to CSV and then uploads them to S3 - */ -async function dbExport (_request, h) { - DbExportService.go() +async function tearDown (_request, h) { + try { + await TearDownService.go() - return h.response().code(204) + return h.response().code(204) + } catch (error) { + return Boom.badImplementation(error.message) + } } module.exports = { - tearDown, - dbExport + dbExport, + seed, + tearDown } diff --git a/app/routes/data.routes.js b/app/routes/data.routes.js index 5ee76a4a22..f09d747466 100644 --- a/app/routes/data.routes.js +++ b/app/routes/data.routes.js @@ -3,21 +3,30 @@ const DataController = require('../controllers/data/data.controller.js') const routes = [ + { + method: 'GET', + path: '/data/db-export', + handler: DataController.dbExport, + options: { + description: 'Used to export the database and upload the file to our AWS S3 bucket', + app: { excludeFromProd: true } + } + }, { method: 'POST', - path: '/data/tear-down', - handler: DataController.tearDown, + path: '/data/seed', + handler: DataController.seed, options: { - description: 'Used to remove the acceptance test data from the database', + description: 'Used to seed test data in the database', app: { excludeFromProd: true } } }, { - method: 'GET', - path: '/data/db-export', - handler: DataController.dbExport, + method: 'POST', + path: '/data/tear-down', + handler: DataController.tearDown, options: { - description: 'Used to export the database and upload the file to our AWS S3 bucket', + description: 'Used to remove the acceptance test data from the database', app: { excludeFromProd: true } } } diff --git a/app/services/data/seed/seed.service.js b/app/services/data/seed/seed.service.js new file mode 100644 index 0000000000..fcb377d525 --- /dev/null +++ b/app/services/data/seed/seed.service.js @@ -0,0 +1,28 @@ +'use strict' + +/** + * Runs the Knex seed process programmatically + * @module SeedService + */ + +const { db } = require('../../../../db/db.js') + +/** + * Triggers the Knex seed process programmatically + * + * This is the same as calling `knex seed:run` on the command line. Only we pull in `db.js` because that is our file + * which sets up Knex with the right config and all our 'tweaks'. + * + * In this context you can read `db.seed.run()` as `knex.seed.run()`. + * + * See {@link https://knexjs.org/guide/migrations.html#seed-files | Seed files} for more details. + * + * Credit to {@link https://stackoverflow.com/a/53169879 | Programmatically run knex seed:run} + */ +async function go () { + await db.seed.run() +} + +module.exports = { + go +} diff --git a/config/database.config.js b/config/database.config.js index b591e32d23..7329f3a536 100644 --- a/config/database.config.js +++ b/config/database.config.js @@ -15,7 +15,9 @@ const config = { password: process.env.POSTGRES_PASSWORD, port: process.env.POSTGRES_PORT, database: process.env.POSTGRES_DB, - testDatabase: process.env.POSTGRES_DB_TEST + testDatabase: process.env.POSTGRES_DB_TEST, + // Only used when seeding our dev/test user records + defaultUserPassword: process.env.DEFAULT_USER_PASSWORD } module.exports = config diff --git a/db/seeds/01-users.js b/db/seeds/01-users.js new file mode 100644 index 0000000000..ed6462ac03 --- /dev/null +++ b/db/seeds/01-users.js @@ -0,0 +1,141 @@ +'use strict' + +const bcrypt = require('bcryptjs') +const { randomUUID } = require('crypto') + +const DatabaseConfig = require('../../config/database.config.js') + +const seedUsers = [ + { + userName: 'admin-internal@wrls.gov.uk', + application: 'water_admin', + group: 'super' + }, + { + userName: 'super.user@wrls.gov.uk', + application: 'water_admin', + group: 'super' + }, + { + userName: 'environment.officer@wrls.gov.uk', + application: 'water_admin', + group: 'environment_officer' + }, + { + userName: 'waster.industry.regulatory.services@wrls.gov.uk', + application: 'water_admin', + group: 'wirs' + }, + { + userName: 'billing.data@wrls.gov.uk', + application: 'water_admin', + group: 'billing_and_data' + }, + { + userName: 'permitting.support.centre@wrls.gov.uk', + application: 'water_admin', + group: 'psc' + }, + { + userName: 'external@example.co.uk', + application: 'water_vml' + }, + { + userName: 'jon.lee@example.co.uk', + application: 'water_vml' + }, + { + userName: 'rachel.stevens@example.co.uk', + application: 'water_vml' + } +] + +async function seed (knex) { + await _insertUsersWhereNotExists(knex) + + await _updateSeedUsersWithUserIdAndGroupId(knex) + + await _insertUserGroupsWhereNotExists(knex) +} + +function _generateHashedPassword () { + // 10 is the number of salt rounds to perform to generate the salt. The legacy code uses + // const salt = bcrypt.genSaltSync(10) to pre-generate the salt before passing it to hashSync(). But this is + // intended for operations where you need to hash a large number of values. If you just pass in a number bcrypt will + // autogenerate the salt for you. + // https://github.com/kelektiv/node.bcrypt.js#usage + return bcrypt.hashSync(DatabaseConfig.defaultUserPassword, 10) +} + +async function _groups (knex) { + return knex('idm.groups') + .select('groupId', 'group') +} + +async function _insertUsersWhereNotExists (knex) { + const password = _generateHashedPassword() + + for (const seedUser of seedUsers) { + const existingUser = await knex('idm.users') + .first('userId') + .where('userName', seedUser.userName) + .andWhere('application', seedUser.application) + + if (!existingUser) { + await knex('idm.users') + .insert({ + userName: seedUser.userName, + application: seedUser.application, + password, + userData: '{ "source": "Seeded" }', + resetRequired: 0, + badLogins: 0 + }) + } + } +} + +async function _insertUserGroupsWhereNotExists (knex) { + const seedUsersWithGroups = seedUsers.filter((seedData) => seedData.group) + + for (const seedUser of seedUsersWithGroups) { + const existingUserGroup = await knex('idm.userGroups') + .select('userGroupId') + .where('userId', seedUser.userId) + .andWhere('groupId', seedUser.groupId) + + if (!existingUserGroup) { + await knex('idm.userGroups') + .insert({ + userGroupId: randomUUID({ disableEntropyCache: true }), + userId: seedUser.userId, + groupId: seedUser.groupId + }) + } + } +} + +async function _updateSeedUsersWithUserIdAndGroupId (knex) { + const users = await _users(knex) + const groups = await _groups(knex) + + seedUsers.forEach((seedUser) => { + const user = users.find(({ userName }) => userName === seedUser.userName) + seedUser.userId = user.userId + + if (seedUser.group) { + const userGroup = groups.find(({ group }) => group === seedUser.group) + seedUser.groupId = userGroup.groupId + } + }) +} + +async function _users (knex) { + return knex('idm.users') + .select('userId', 'userName') + .whereJsonPath('userData', '$.source', '=', 'Seeded') +} + +module.exports = { + seed +} diff --git a/package-lock.json b/package-lock.json index 824f9592e9..2cb57e15d4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -16,6 +16,7 @@ "@hapi/hapi": "^21.1.0", "@hapi/inert": "^7.0.0", "@hapi/vision": "^7.0.0", + "bcryptjs": "^2.4.3", "blipp": "^4.0.2", "dotenv": "^16.0.3", "got": "^12.5.3", @@ -3063,6 +3064,11 @@ } ] }, + "node_modules/bcryptjs": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz", + "integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==" + }, "node_modules/binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", @@ -10373,6 +10379,11 @@ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, + "bcryptjs": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-2.4.3.tgz", + "integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==" + }, "binary-extensions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", diff --git a/package.json b/package.json index 21a5a53968..d9a2d787d0 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,7 @@ "migrate:db:test": "NODE_ENV=test knex migrate:latest", "rollback:db": "knex migrate:rollback --all", "rollback:db:test": "NODE_ENV=test knex migrate:rollback --all", + "seed:db": "knex seed:run --knexfile knexfile.application.js", "lint": "standard", "test": "lab --silent-skips --shuffle", "postinstall": "npm run build", @@ -30,6 +31,7 @@ "@hapi/hapi": "^21.1.0", "@hapi/inert": "^7.0.0", "@hapi/vision": "^7.0.0", + "bcryptjs": "^2.4.3", "blipp": "^4.0.2", "dotenv": "^16.0.3", "got": "^12.5.3", diff --git a/test/controllers/data/data.controller.test.js b/test/controllers/data/data.controller.test.js index 7058e5d00e..e0dbf06dd3 100644 --- a/test/controllers/data/data.controller.test.js +++ b/test/controllers/data/data.controller.test.js @@ -10,6 +10,7 @@ const { expect } = Code // Things we need to stub const DbExportService = require('../../../app/services/db-export/db-export.service') +const SeedService = require('../../../app/services/data/seed/seed.service.js') const TearDownService = require('../../../app/services/data/tear-down/tear-down.service.js') // For running our service @@ -34,18 +35,37 @@ describe('Data controller', () => { Sinon.restore() }) - describe('POST /data/tear-down', () => { + describe('GET /data/db-export', () => { + const options = { + method: 'GET', + url: '/data/db-export' + } + + describe('when the request succeeds', () => { + beforeEach(async () => { + Sinon.stub(DbExportService, 'go').resolves() + }) + + it('displays the correct message', async () => { + const response = await server.inject(options) + + expect(response.statusCode).to.equal(204) + }) + }) + }) + + describe('POST /data/seed', () => { const options = { method: 'POST', - url: '/data/tear-down' + url: '/data/seed' } describe('when the request succeeds', () => { beforeEach(async () => { - Sinon.stub(TearDownService, 'go').resolves() + Sinon.stub(SeedService, 'go').resolves() }) - it('returns a 204 status', async () => { + it('displays the correct message', async () => { const response = await server.inject(options) expect(response.statusCode).to.equal(204) @@ -53,9 +73,9 @@ describe('Data controller', () => { }) describe('when the request fails', () => { - describe('because the TearDownService errors', () => { + describe('because the SeedService errors', () => { beforeEach(async () => { - Sinon.stub(TearDownService, 'go').rejects() + Sinon.stub(SeedService, 'go').rejects() }) it('returns a 500 status', async () => { @@ -67,22 +87,36 @@ describe('Data controller', () => { }) }) - describe('GET /data/db-export', () => { + describe('POST /data/tear-down', () => { const options = { - method: 'GET', - url: '/data/db-export' + method: 'POST', + url: '/data/tear-down' } describe('when the request succeeds', () => { beforeEach(async () => { - Sinon.stub(DbExportService, 'go').resolves() + Sinon.stub(TearDownService, 'go').resolves() }) - it('displays the correct message', async () => { + it('returns a 204 status', async () => { const response = await server.inject(options) expect(response.statusCode).to.equal(204) }) }) + + describe('when the request fails', () => { + describe('because the TearDownService errors', () => { + beforeEach(async () => { + Sinon.stub(TearDownService, 'go').rejects() + }) + + it('returns a 500 status', async () => { + const response = await server.inject(options) + + expect(response.statusCode).to.equal(500) + }) + }) + }) }) }) diff --git a/test/services/data/seed/seed.service.test.js b/test/services/data/seed/seed.service.test.js new file mode 100644 index 0000000000..08ebba2022 --- /dev/null +++ b/test/services/data/seed/seed.service.test.js @@ -0,0 +1,37 @@ +'use strict' + +// Test framework dependencies +const Lab = require('@hapi/lab') +const Code = require('@hapi/code') +const Sinon = require('sinon') + +const { describe, it, beforeEach, afterEach } = exports.lab = Lab.script() +const { expect } = Code + +// Things we need to stub +const { db } = require('../../../../db/db.js') + +// Thing under test +const SeedService = require('../../../../app/services/data/seed/seed.service.js') + +describe('Seed service', () => { + let knexRunStub + + beforeEach(async () => { + knexRunStub = Sinon.stub().resolves() + + Sinon.replaceGetter(db, 'seed', () => { + return { run: knexRunStub } + }) + }) + + afterEach(() => { + Sinon.restore() + }) + + it('uses the knex instance we configure to run the seed process', async () => { + await SeedService.go() + + expect(knexRunStub.called).to.be.true() + }) +})