diff --git a/.eslintrc b/.eslintrc new file mode 100644 index 0000000..237b2cd --- /dev/null +++ b/.eslintrc @@ -0,0 +1,27 @@ +{ + "parser": "babel-eslint", + "plugins": ["prettier"], + "parserOptions": { + "ecmaVersion": 2017, + "sourceType": "module" + }, + "env": { + "es6": true, + "node": true, + "mocha": true, + "jest": true + }, + "extends": ["eslint:recommended", "airbnb-base", "prettier"], + "rules": { + "prettier/prettier": "error", + "import/no-unresolved": "false", + "import/no-dynamic-require": ["off"], + "import/prefer-default-export": "false", + "import/no-extraneous-dependencies": ["error", { "devDependencies": true }], + "consistent-return": ["off"], + "no-restricted-syntax": "off", + "array-callback-return": "off", + "no-underscore-dangle": "off", + "no-await-in-loop": "off" + } +} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..17e8675 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +node_modules +.hfc-key-store +.DS_Store +.env diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..751faee --- /dev/null +++ b/Dockerfile @@ -0,0 +1,2 @@ +FROM trase/mu-javascript-template +LABEL maintainer="Wouter Van Hecke " \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..1092ce7 --- /dev/null +++ b/README.md @@ -0,0 +1,7 @@ +## Setup + +``` +# Install Deps & Rebuild gRPC for docker +npm install --target=8.12.0 --target_platform=linux --target_arch=x64 --target_libc=musl . +npm rebuild --target=8.12.0 --target_platform=linux --target_arch=x64 --target_libc=musl . +``` diff --git a/app.js b/app.js new file mode 100644 index 0000000..f13b843 --- /dev/null +++ b/app.js @@ -0,0 +1,83 @@ +import { app, errorHandler } from "mu"; +import mongoose from "mongoose"; +import util from "util"; +import https from "https"; +import fs from "fs"; + +import routes from "./app.routes"; +import logger from "./config/Log"; +import network from "./services/network.service"; +import config from "./config/config"; + +const init = async () => { + logger.info("=========== STARTING UP DECISION SERVER ==========="); + app.use((req, res, next) => { + res.header("Access-Control-Allow-Origin", "*"); + res.header( + "Access-Control-Allow-Headers", + "Origin, X-Requested-With, Content-Type, Accept" + ); + next(); + }); + + app.use(routes); + app.use(errorHandler); + + try { + await network.initFabric(); + } catch (e) { + logger.info(`Please restart the resource server. ${e}`); + process.exit(1); + } + + const mongoUri = + config.env === "production" + ? "mongodb://mongodb:27017/abb-lblod" + : "mongodb://mongodb:27017/abb-lblod-dev"; + + // connect to mongo db + await mongoose + .connect( + mongoUri, + { + useCreateIndex: true, + useNewUrlParser: true, + poolSize: 2 + } + ) + .catch( + e => new Error(`unable to connect to database: ${config.mongoUri}`, e) + ); + + if (config.env === "production") { + const httpsOptions = { + key: fs.readFileSync("certs/localhost-key.pem"), + cert: fs.readFileSync("certs/localhost.pem") + }; + https.createServer(httpsOptions, app).listen(443, () => { + logger.info( + `Started decision server on port 443 in ${app.get("env")} mode` + ); + }); + } else { + // start server + app.listen(80, () => + logger.info( + `Started decision server on port 80 in ${app.get("env")} mode` + ) + ); + } + + // print mongoose logs in dev env + if (app.get("env") === "development") { + mongoose.set("debug", (collectionName, method, query, doc) => { + logger.info( + `${collectionName}.${method}`, + util.inspect(query, false, 20), + doc + ); + }); + } +}; + +init(); diff --git a/app.routes.js b/app.routes.js new file mode 100644 index 0000000..4784525 --- /dev/null +++ b/app.routes.js @@ -0,0 +1,10 @@ +import { Router } from "express"; + +import decisionRoutes from "./endpoints/decision/decision.route"; + +export default Router() + /** GET /health-check - Check service health */ + .get("/health-check", (req, res) => + res.send("LBLOD Blockchain decision service up and running!") + ) + .use("/decision", decisionRoutes); diff --git a/config/Log.js b/config/Log.js new file mode 100644 index 0000000..14247ec --- /dev/null +++ b/config/Log.js @@ -0,0 +1,38 @@ +import expressWinston from "express-winston"; +import { transports, format, createLogger } from "winston"; + +const { combine, prettyPrint, colorize, printf } = format; + +const myFormat = printf( + ({ level, timestamp, label = "FABRIC", message }) => + `\n${level}: [${label} - ${timestamp}] - ${message} \n` +); + +const defaultTransports = [ + new transports.Console({ + format: format.combine(format.prettyPrint(), format.colorize()) + }) +]; + +const Logger = createLogger({ + format: combine(format.timestamp(), prettyPrint(), colorize(), myFormat), + transports: [ + new transports.Console(), + new transports.File({ filename: "combined.log" }) + ], + handleExceptions: true, + exitOnError: false +}); + +export const expressLogger = expressWinston.logger({ + meta: false, + transports: defaultTransports +}); + +export const expressErrorLogger = expressWinston.errorLogger({ + meta: false, + blacklistedMetaFields: ["process", "trace", "os", "req"], + transports: defaultTransports +}); + +export default Logger; diff --git a/config/config.js b/config/config.js new file mode 100644 index 0000000..98b2de0 --- /dev/null +++ b/config/config.js @@ -0,0 +1,45 @@ +import Joi from "joi"; + +// require and configure dotenv, will load vars in .env in PROCESS.ENV +import dotenv from "dotenv"; + +dotenv.config(); + +// define validation for all the env vars +const envVarsSchema = Joi.object({ + PORT: Joi.number().default(3000), + MONGO_PORT: Joi.number().default(27017), + USE_DB: Joi.boolean().default(false), + NODE_ENV: Joi.string() + .allow(["development", "production", "test", "provision"]) + .default("development"), + MONGOOSE_DEBUG: Joi.boolean().when("NODE_ENV", { + is: Joi.string().equal("development"), + then: Joi.boolean().default(true), + otherwise: Joi.boolean().default(false) + }), + ADMIN_PW: Joi.string() + .required() + .description("Admin password required") +}) + .unknown() + .required(); + +const { error, value: envVars } = Joi.validate(process.env, envVarsSchema); + +if (error) { + throw new Error( + `Check your '.env' file (located at the root of this project), + validation error: ${error.message}` + ); +} + +const config = { + env: envVars.NODE_ENV, + port: envVars.PORT, + mongooseDebug: envVars.MONGOOSE_DEBUG, + useDb: envVars.USE_DB, + ADMIN_PW: envVars.ADMIN_PW +}; + +export default config; diff --git a/deploy.sh b/deploy.sh new file mode 100755 index 0000000..87b3257 --- /dev/null +++ b/deploy.sh @@ -0,0 +1,7 @@ +#! /bin/sh + +DOCKER_ID_USER="trase" + +rm -rf .hfc-key-store +docker build -t $DOCKER_ID_USER/decision-service:0.1.1 . +docker push $DOCKER_ID_USER/decision-service:0.1.1 \ No newline at end of file diff --git a/endpoints/decision/decision.controller.js b/endpoints/decision/decision.controller.js new file mode 100644 index 0000000..bd9ee44 --- /dev/null +++ b/endpoints/decision/decision.controller.js @@ -0,0 +1,97 @@ +import httpStatus from "http-status"; + +import logger from "../../config/Log"; +import decisionService from "../../services/decision.service"; + +let counter = 0; + +const publish = async (req, res, next) => { + try { + counter++; + console.log("Decison counter", counter); + logger.info("Publishing resources.."); + const resource = req.body; + const result = await decisionService.Publish(resource); + res.status(httpStatus.OK).json({ result }); + } catch (e) { + next(e); + } +}; + +const sign = async (req, res, next) => { + try { + logger.info("Signing resources.."); + const resource = req.body; + const result = await decisionService.Sign(resource); + res.status(httpStatus.OK).json({ result }); + } catch (e) { + next(e); + } +}; + +const getAll = async (req, res, next) => { + try { + const result = await decisionService.GetAll(); + res.status(httpStatus.OK).json({ result }); + } catch (e) { + next(e); + } +}; + +const validate = async (req, res, next) => { + try { + const { id, hash } = req.body; + const { result, blockchainHash } = await decisionService.Validate(id, hash); + res.status(httpStatus.OK).json({ + id, + hash, + result, + blockchainHash + }); + } catch (e) { + next(e); + } +}; + +const queryById = async (req, res, next) => { + try { + const { id } = req.body; + const result = await decisionService.GetResourceById(id); + res.status(httpStatus.OK).json({ result }); + } catch (e) { + next(e); + } +}; + +const queryHistory = async (req, res, next) => { + try { + const { id } = req.body; + const result = await decisionService.GetResourceHistory(id); + res.status(httpStatus.OK).json({ result }); + } catch (e) { + next(e); + } +}; + +const queryHistoryByVersion = async (req, res, next) => { + try { + const { id, version } = req.body; + const result = await decisionService.GetResourceHistoryByVersion( + id, + version + ); + res.status(httpStatus.OK).json({ result }); + } catch (e) { + next(e); + } +}; + +export default { + publish, + getAll, + sign, + validate, + queryById, + queryHistory, + queryHistoryByVersion +}; diff --git a/endpoints/decision/decision.param.validation.js b/endpoints/decision/decision.param.validation.js new file mode 100644 index 0000000..bec27da --- /dev/null +++ b/endpoints/decision/decision.param.validation.js @@ -0,0 +1,37 @@ +import Joi from "joi"; + +export const resourceScheme = { + body: { + id: Joi.string().required(), + content: Joi.string().required(), + oit: { + identifier: Joi.string().required(), + roles: Joi.array().required(), + secret: Joi.string().required(), + fullIdentifier: Joi.string().required() + }, + resourceId: Joi.string().required(), + subject: Joi.string().required(), + timestamp: Joi.string().required() + } +}; + +export const validationScheme = { + body: { + id: Joi.string().required(), + hash: Joi.string().required() + } +}; + +export const queryHistoryScheme = { + body: { + id: Joi.string().required() + } +}; + +export const queryHistoryByVersionScheme = { + body: { + id: Joi.string().required(), + version: Joi.string().required() + } +}; diff --git a/endpoints/decision/decision.route.js b/endpoints/decision/decision.route.js new file mode 100644 index 0000000..b4be574 --- /dev/null +++ b/endpoints/decision/decision.route.js @@ -0,0 +1,37 @@ +import { Router } from "express"; +import validate from "express-validation"; + +import decisionController from "./decision.controller"; +import { + resourceScheme, + validationScheme, + queryHistoryScheme, + queryHistoryByVersionScheme +} from "./decision.param.validation"; + +const router = Router(); +router.route("/sign").post(validate(resourceScheme), decisionController.sign); + +router + .route("/publish") + .post(validate(resourceScheme), decisionController.publish); + +router.route("/getAll").post(decisionController.getAll); +router.route("/queryById").post(decisionController.queryById); + +router + .route("/queryHistory") + .post(validate(queryHistoryScheme), decisionController.queryHistory); + +router + .route("/queryHistoryByVersion") + .post( + validate(queryHistoryByVersionScheme), + decisionController.queryHistoryByVersion + ); + +router + .route("/validate") + .post(validate(validationScheme), decisionController.validate); + +export default router; diff --git a/models/auth.model.js b/models/auth.model.js new file mode 100644 index 0000000..cb24e91 --- /dev/null +++ b/models/auth.model.js @@ -0,0 +1,93 @@ +import Promise from "bluebird"; +import mongoose from "mongoose"; + +/** + * User Schema + */ +const UserSchema = new mongoose.Schema({ + username: { + type: String, + required: true, + unique: true + }, + encryptedCert: { + type: String, + required: true + }, + encryptedKey: { + type: String, + required: true + }, + createdAt: { + type: Date, + default: Date.now + } +}); + +/** + * Add your + * - pre-save hooks + * - validations + * - virtuals + */ + +/** + * Methods + */ +UserSchema.method({}); + +/** + * Statics + */ +UserSchema.statics = { + /** + * Get user + * @param {ObjectId} id - The objectId of user. + * @returns {Promise} + */ + get(id) { + return this.findById(id) + .exec() + .then(user => { + if (user) { + return user; + } + return Promise.reject(new Error("No such user exists!")); + }); + }, + + /** + * Get user + * @param {ObjectId} username - The username of user. + * @returns {Promise} + */ + getByName(username) { + return this.findOne({ username }) + .exec() + .then(user => { + if (user) { + return user; + } + return null; + }); + }, + + /** + * List users in descending order of 'createdAt' timestamp. + * @param {number} skip - Number of users to be skipped. + * @param {number} limit - Limit number of users to be returned. + * @returns {Promise} + */ + list({ skip = 0, limit = 50 } = {}) { + return this.find() + .sort({ createdAt: -1 }) + .skip(+skip) + .limit(+limit) + .exec(); + } +}; + +/** + * @typedef User + */ +export default mongoose.model("User", UserSchema); diff --git a/package.json b/package.json new file mode 100644 index 0000000..dc6640d --- /dev/null +++ b/package.json @@ -0,0 +1,44 @@ +{ + "name": "lblod-decision-service", + "version": "1.0.0", + "description": "lblod decision service", + "main": "app.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "lint": "eslint --ignore-pattern '/node_modules/' .", + "start": "node app.js" + }, + "author": "Wouter Van Hecke", + "license": "ISC", + "dependencies": { + "bluebird": "^3.5.3", + "crypto-js": "^3.1.9-1", + "dotenv": "^6.2.0", + "elliptic": "^6.4.1", + "express": "^4.16.4", + "express-validation": "^1.0.2", + "express-winston": "^3.0.1", + "fabric-ca-client": "^1.3.0", + "fabric-client": "^1.3.0", + "http-status": "^1.3.1", + "joi": "^14.3.1", + "jsrsasign": "^8.0.12", + "lodash": "^4.17.11", + "mongoose": "^5.4.2", + "request": "^2.88.0", + "request-promise": "^4.2.2", + "sha.js": "^2.4.11", + "winston": "^3.1.0" + }, + "devDependencies": { + "babel-eslint": "9.0.0", + "eslint": "5.4.0", + "eslint-config-airbnb-base": "13.1.0", + "eslint-config-prettier": "3.0.1", + "eslint-plugin-import": "2.14.0", + "eslint-plugin-node": "7.0.1", + "eslint-plugin-prettier": "2.6.2", + "eslint-watch": "4.0.2", + "prettier": "1.14.2" + } +} diff --git a/services/decision.service.js b/services/decision.service.js new file mode 100644 index 0000000..a9a5fd4 --- /dev/null +++ b/services/decision.service.js @@ -0,0 +1,91 @@ +import signingService from "./signing.service"; +import queryService from "./query.service"; +import decisionHelper from "../utils/helpers/decision.helper"; +import logger from "../config/Log"; + +const Publish = async resource => { + try { + // GET USERID OUT OF RESOURCE + const { content, resourceId, oit, timestamp, subject, version } = resource; + const mockLimitedSigners = 2; + + // TODO express-validation + if (!content || !resourceId || !oit || !timestamp || !subject) { + throw new Error("Predefined resource model expectation was not met!"); + } + + const user = await decisionHelper.getUser(oit); + + // SIGN TRANSACTION + const args = { + id: resourceId, + hash: content, + subject, + timestamp, + version, + limitedSigners: mockLimitedSigners + }; + + const result = await signingService.SignTransaction( + args, + user, + "publishResource" // TODO don't hardcode + ); + logger.info("Resource was succesfully published to the blockchain!"); + return result; + } catch (e) { + logger.info(`Something went wrong in decision.service.js: ${e}`); + throw new Error(e); + } +}; + +const Sign = async resource => { + try { + // GET USERID OUT OF RESOURCE + const { content, resourceId, oit, timestamp, subject, version } = resource; + const mockLimitedSigners = 2; + + const user = await decisionHelper.getUser(oit); + + // SIGN TRANSACTION + const args = { + id: resourceId, + hash: content, + timestamp, + limitedSigners: mockLimitedSigners, + subject, + version + }; + + const result = await signingService.SignTransaction( + args, + user, + "signResource" // TODO don't hardcode + ); + logger.info("Resource was succesfully published to the blockchain!"); + return result; + } catch (e) { + logger.info(`Something went wrong: ${e}`); + throw new Error(e); + } +}; + +const GetResourceHistory = id => queryService.GetResourceHistory(id); +const GetResourceHistoryByVersion = (id, version) => + queryService.GetResourceHistoryByVersion(id, version); + +const Validate = (id, hash) => queryService.Validate(id, hash); + +const GetAll = () => queryService.GetAll(); + +const GetResourceById = id => queryService.GetResourceById(id); + +export default { + Publish, + Sign, + Validate, + GetAll, + GetResourceById, + GetResourceHistory, + GetResourceHistoryByVersion +}; diff --git a/services/network.service.js b/services/network.service.js new file mode 100644 index 0000000..94be1d0 --- /dev/null +++ b/services/network.service.js @@ -0,0 +1,180 @@ +import FabricClient from "fabric-client"; +import path from "path"; + +import config from "../config/config"; +import logger from "../config/Log"; + +const fabricClient = new FabricClient(); +let channel = null; +const eventHub = null; +let adminUser = null; +let fabricCaClient = null; +let ORG_MSP = null; +let channelName = null; +let chaincodeName = null; + +const register = async (user, secret = null) => { + if (!secret) { + logger.info("Creating secret"); + // eslint-disable-next-line no-param-reassign + secret = await fabricCaClient + .register( + { + role: user.role, + enrollmentID: user.username, + affiliation: user.affiliation, + attrs: [] // TODO add municipality + }, + adminUser + ) + .catch(err => Promise.reject(new Error(`Failed to register: ${err}`))); + } + + logger.info("Enrolling user on the fabric network"); + // Enroll the user + const enrollment = await fabricCaClient + .enroll({ + enrollmentID: user.username, + enrollmentSecret: secret + }) + .catch(err => Promise.reject(new Error(`Failed to enroll: ${err}`))); + + // Create the user + const finalUser = await fabricClient.createUser({ + username: user.username, + mspid: ORG_MSP, + cryptoContent: { + privateKeyPEM: enrollment.key.toBytes(), + signedCertPEM: enrollment.certificate + }, + skipPersistence: false + }); + + logger.info("User was succesfully enrolled on the fabric network!"); + + if (user.username === "admin") { + return finalUser; + } + + return Object.assign({}, finalUser, { + signedCertPEM: enrollment.certificate, + privateKeyPEM: enrollment.key.toBytes() + }); +}; + +const getClientForOrg = async (orgName, username) => { + const configName = "-connection-profile-path"; + const client = FabricClient.loadFromConfig( + FabricClient.getConfigSetting(`network${configName}`) + ); + + client.loadFromConfig(FabricClient.getConfigSetting(`client${configName}`)); + + await client.initCredentialStores(); + + if (username === "admin") { + const user = fabricClient.getUserContext("admin", true); + await client.setUserContext(user, true); + } + + return client; +}; + +const initFabric = async () => { + const file = "network-config-server.yaml"; + + FabricClient.setConfigSetting( + "network-connection-profile-path", + path.join("/clients/", file) + ); + + FabricClient.setConfigSetting( + `client-connection-profile-path`, + path.join("/clients/", `client.yaml`) + ); + + // Define storepath + const storePath = path.join(__dirname, "../.hfc-key-store"); + + // Set new crypto suite + const cryptoSuite = FabricClient.newCryptoSuite(); + fabricClient.setCryptoSuite(cryptoSuite); + + // Set default key-value store to storePath + const stateStore = await FabricClient.newDefaultKeyValueStore({ + path: storePath + }); + fabricClient.setStateStore(stateStore); + + // Set crypto keystore to storePath + const cryptoStore = FabricClient.newCryptoKeyStore({ path: storePath }); + cryptoSuite.setCryptoKeyStore(cryptoStore); + + const client = await getClientForOrg("client"); + client.setCryptoSuite(cryptoSuite); + client.setStateStore(stateStore); + fabricCaClient = client.getCertificateAuthority(); + ORG_MSP = client.getMspid(); + + // Check if admin is enrolled + const userFromStore = await fabricClient.getUserContext("admin", true); + + if (userFromStore && userFromStore.isEnrolled()) { + adminUser = userFromStore; + } else { + adminUser = await register( + { + username: "admin", + role: "admin", + affiliation: `org1.department1` + }, + config.ADMIN_PW + ); + } + + for (let x = 0; x < 200; x++) { + register({ + username: Math.random().toString(), + role: "pleb", + affiliation: `org1.department1` + }); + console.log(x); + } + + client.setUserContext(adminUser, true); + + // TODO multiple channels + [channelName] = Object.keys(client._network_config._network_config.channels); + + channel = client.getChannel(channelName); + if (!channel) { + throw new Error( + "Channel %s was not defined in the connection profile", + channelName + ); + } + + // TODO multiple chaincodes + const chaincodeInfo = await channel.queryInstantiatedChaincodes(); + chaincodeName = chaincodeInfo.chaincodes[0].name; + + logger.info("Config has been set!"); +}; + +const getFabricClient = () => fabricClient; +const getChannel = () => channel; +const getEventHub = () => eventHub; +const getChannelName = () => channelName; +const getOrgMsp = () => ORG_MSP; +const getChaincodeName = () => chaincodeName; + +export default { + getFabricClient, + getChannel, + getEventHub, + initFabric, + getClientForOrg, + getChannelName, + getOrgMsp, + getChaincodeName +}; diff --git a/services/query.service.js b/services/query.service.js new file mode 100644 index 0000000..4ab5e1b --- /dev/null +++ b/services/query.service.js @@ -0,0 +1,87 @@ +import httpStatus from "http-status"; +import { isBuffer } from "lodash"; + +import logger from "../config/Log"; +import network from "./network.service"; + +let channel = null; + +const query = async (fcn, args) => { + try { + channel = await network.getChannel(); + + const channelPeers = channel.getChannelPeers(); + const peers = channelPeers.map(channelpeer => { + if (channelpeer._roles.endorsingPeer) { + return channelpeer.getName(); + } + }); + + // send query + const request = { + targets: peers, // queryByChaincode allows for multiple targets + chaincodeId: network.getChaincodeName(), + fcn, + args + }; + const responsePayloads = await channel.queryByChaincode(request); + + if (responsePayloads.length === 0 || responsePayloads === undefined) { + throw new Error( + `Error from query: No result `, + httpStatus.BAD_REQUEST, + true + ); + } + + for (let i = 0; i < responsePayloads.length; i += 1) { + logger.info( + `result received from peer ${i}: ${responsePayloads[i].toString( + "utf8" + )}` + ); + } + + // TODO multiple peers, multiple responses + + if (isBuffer(responsePayloads[0])) { + return JSON.parse(responsePayloads[0].toString("utf8")); + } + + return { + result: responsePayloads[0].toString("utf8") + }; + } catch (e) { + logger.info(e); + return e; + } +}; + +const GetAll = async () => query("queryAll", []); + +const Validate = async (id, hash) => { + const result = await query("validateResource", [ + JSON.stringify({ id, hash }) + ]); + return { + result: result.result, + blockchainHash: result.hash + }; +}; + +const GetResourceById = async id => + query("queryById", [JSON.stringify({ id })]); + +const GetResourceHistory = async id => + query("queryHistory", [JSON.stringify({ id })]); + +const GetResourceHistoryByVersion = async (id, version) => + query("queryHistoryByVersion", [JSON.stringify({ id, version })]); + +export default { + GetAll, + Validate, + GetResourceById, + GetResourceHistory, + GetResourceHistoryByVersion +}; diff --git a/services/signing.service.js b/services/signing.service.js new file mode 100644 index 0000000..e0c678c --- /dev/null +++ b/services/signing.service.js @@ -0,0 +1,19 @@ +import signingHelper from "../utils/helpers/signing.helper"; +import logger from "../config/Log"; + +const SignTransaction = async (args, user, func) => { + logger.info("Preparing request for resource"); + const proposal = signingHelper.prepareRequest( + func, + [JSON.stringify(args)], + true, + user.certificatePEM + ); + return signingHelper.invokeOffline( + proposal, + user.privateKeyPEM, + user.certificatePEM + ); +}; + +export default { SignTransaction }; diff --git a/services/userManagement.service.js b/services/userManagement.service.js new file mode 100644 index 0000000..90f7a5d --- /dev/null +++ b/services/userManagement.service.js @@ -0,0 +1,70 @@ +import requestPromise from "request-promise"; +import CryptoJS from "crypto-js"; + +import User from "../models/auth.model"; + +const RegisterUser = async oit => { + // const responseRetrieve = await requestPromise.post( + // "http://authentication/retrieve-encryption-key", + // { + // method: "POST", + // body: { identifier: oit, seed: oit.secret }, + // json: true + // } + // ); + + // console.log("Call 1 happened"); + // const encryptionKey = JSON.parse(responseRetrieve.encryptionKey); + + const { + encryptedCert, + encryptedKey, + encryptionKey + } = await requestPromise.post("http://authentication/create-certificate", { + method: "POST", + body: { + enrollmentID: oit.identifier, + role: oit.roles.toString(), + id: oit.fullIdentifier, + seed: oit.secret + }, + json: true + }); + + let bytes = CryptoJS.AES.decrypt( + encryptedCert, + JSON.stringify(encryptionKey) + ); + const certificatePEM = bytes.toString(CryptoJS.enc.Utf8); + + // Decrypt private key + bytes = CryptoJS.AES.decrypt(encryptedKey, JSON.stringify(encryptionKey)); + const privateKeyPEM = bytes.toString(CryptoJS.enc.Utf8); + + const newUser = new User({ + username: oit.identifier, + encryptedCert, + encryptedKey + }); + + await newUser.save(); + + return { username: oit.identifier, certificatePEM, privateKeyPEM }; +}; + +const GetUser = async identifier => User.getByName(identifier); + +const GetEncryptionKey = async oit => { + const responseRetrieve = await requestPromise.post( + "http://authentication/retrieve-encryption-key", + { + method: "POST", + body: { identifier: oit, seed: oit.secret }, + json: true + } + ); + + return JSON.parse(responseRetrieve.encryptionKey); +}; + +export default { RegisterUser, GetUser, GetEncryptionKey }; diff --git a/utils/constants.js b/utils/constants.js new file mode 100644 index 0000000..1a6e3b7 --- /dev/null +++ b/utils/constants.js @@ -0,0 +1,6 @@ +export const STATUSES = { + UNPUBLISHED: "unpublished", + PUBLISHING: "publishing", + PUBLISHED: "published", + FAILED: "publication_failed" +}; diff --git a/utils/helpers/decision.helper.js b/utils/helpers/decision.helper.js new file mode 100644 index 0000000..66f556a --- /dev/null +++ b/utils/helpers/decision.helper.js @@ -0,0 +1,82 @@ +import CryptoJS from "crypto-js"; + +import userManagementervice from "../../services/userManagement.service"; + +const decryptCerts = (foundUser, encryptionKey) => { + let bytes = CryptoJS.AES.decrypt( + foundUser.encryptedCert, + JSON.stringify(encryptionKey) + ); + const certificatePEM = bytes.toString(CryptoJS.enc.Utf8); + + // Decrypt private key + bytes = CryptoJS.AES.decrypt( + foundUser.encryptedKey, + JSON.stringify(encryptionKey) + ); + const privateKeyPEM = bytes.toString(CryptoJS.enc.Utf8); + + return { certificatePEM, privateKeyPEM }; +}; + +const getCorrectTimestamp = () => { + const now = new Date(); + const year = now.getFullYear(); + const month = now.getMonth() + 1; + const day = now.getDate(); + + return `${year}-${month < 10 ? `0${month}` : `${month}-${day}`}`; +}; + +const getIdentifier = content => { + const signatoryResource = content.split('resource="')[2]; + const signatoryURI = signatoryResource.split('"')[0]; + const signatoryId = signatoryURI.split("/")[ + signatoryURI.split("/").length - 1 + ]; + + // TODO dynamic role when we retrieve openID Ticket + const role = "publisher"; + return { + identifier: signatoryId, + oit: { identifier: signatoryId, role }, + role + }; +}; + +const getNewResourceId = uri => uri.split("/")[uri.split("/").length - 1]; + +const getUser = async oit => { + // GET USER + const foundUser = await userManagementervice.GetUser(oit.identifier); + + console.log("found user?????? ", oit.identifier); + console.log("found user?????? ", foundUser); + console.log("found user?????? ", foundUser); + console.log("found user?????? ", foundUser); + console.log("found user?????? ", foundUser); + console.log("found user?????? ", foundUser); + + // REGISTER USER IF NULL + if (!foundUser) { + return userManagementervice.RegisterUser(oit); + } + const encryptionKey = await userManagementervice.GetEncryptionKey(oit); + const { certificatePEM, privateKeyPEM } = decryptCerts( + foundUser, + encryptionKey + ); + return { + username: oit.identifier, + certificatePEM, + privateKeyPEM + }; +}; + +export default { + getUser, + decryptCerts, + getCorrectTimestamp, + getIdentifier, + getNewResourceId +}; diff --git a/utils/helpers/signing.helper.js b/utils/helpers/signing.helper.js new file mode 100644 index 0000000..5bbf795 --- /dev/null +++ b/utils/helpers/signing.helper.js @@ -0,0 +1,226 @@ +import shajs from "sha.js"; +import { isEmpty } from "lodash"; +import elliptic from "elliptic"; +import { KEYUTIL } from "jsrsasign"; + +import network from "../../services/network.service"; +import logger from "../../config/Log"; + +let channel = null; +let targets = null; + +const _preventMalleability = sig => { + try { + const ordersForCurve = { + secp256r1: { + halfOrder: elliptic.curves.p256.n.shrn(1), + order: elliptic.curves.p256.n + }, + secp384r1: { + halfOrder: elliptic.curves.p384.n.shrn(1), + order: elliptic.curves.p384.n + } + }; + const { halfOrder } = ordersForCurve.secp256r1; + if (!halfOrder) { + throw new Error( + 'Can not find the half order needed to calculate "s" value for immalleable signatures. Unsupported curve name: secp256r1' + ); + } + + // in order to guarantee 's' falls in the lower range of the order, as explained in the above link, + // first see if 's' is larger than half of the order, if so, it needs to be specially treated + if (sig.s.cmp(halfOrder) === 1) { + // module 'bn.js', file lib/bn.js, method cmp() + // convert from BigInteger used by jsrsasign Key objects and bn.js used by elliptic Signature objects + const bigNum = ordersForCurve.secp256r1.order; + sig.s = bigNum.sub(sig.s); //eslint-disable-line + } + + return sig; + } catch (e) { + throw new Error(`_preventMalleabilit: ${e}`); + } +}; + +const signProposal = async (proposal, privateKeyPEM) => { + try { + const digest = shajs("sha256") + .update(proposal) + .digest("hex"); + + const { prvKeyHex } = KEYUTIL.getKey(privateKeyPEM); + + const { ec: EC } = elliptic; + const ecdsaCurve = elliptic.curves.p256; + + const ecdsa = new EC(ecdsaCurve); + const signKey = ecdsa.keyFromPrivate(prvKeyHex, "hex"); + let sig = ecdsa.sign(Buffer.from(digest, "hex"), signKey); + sig = _preventMalleability(sig, prvKeyHex.ecparams); + + // now we have the signature, next we should send the signed transaction proposal to the peer + const signature = Buffer.from(sig.toDER()); + return { + signature, + proposal_bytes: proposal + }; + } catch (e) { + throw new Error(`signProposal: ${e}`); + } +}; + +const prepareRequest = (fcn, args, invocation = true, certPem) => { + try { + const channelName = network.getChannelName(); + channel = network.getChannel(); + + const request = { + channelId: channelName, + chaincodeId: network.getChaincodeName(), + fcn, + args + }; + + if (invocation) { + return channel.generateUnsignedProposal( + request, + network.getOrgMsp(), + certPem + ); + } + return request; + } catch (e) { + throw new Error(`prepareRequest: ${e}`); + } +}; + +// eslint-disable-next-line +const processTxEvent = (txId, { signedEvent = {} }) => + new Promise((resolve, reject) => { + const eventHub = channel.newChannelEventHub(targets[0]); + + if (!isEmpty(signedEvent)) { + eventHub.connect({ + signedEvent + }); + } else { + eventHub.connect(); + } + + eventHub.registerTxEvent( + txId, + (tx, statusCode) => { + eventHub.unregisterTxEvent(txId); + eventHub.disconnect(); + + if (statusCode !== "VALID") { + return reject( + new Error( + `Problem with the tranaction, event status ::${statusCode}` + ) + ); + } + + logger.info( + `The transaction has been committed on peer ${ + eventHub._peer._endpoint.addr + }` + ); + + console.log("SUCCESESSSSSS - ", tx); + + resolve({ + statusCode, + tx + }); + }, + err => { + eventHub.disconnect(); + return reject( + new Error(`There was a problem with the eventhub ::${err}`) + ); + } + ); + }); + +// eslint-disable-next-line +const invokeOffline = async ({ txId, proposal }, privateKeyPEM, certPEM) => { + try { + const signedProposal = await signProposal( + proposal.toBuffer(), + privateKeyPEM + ); + targets = channel.getPeers().map(peer => peer.getPeer()); + const sendSignedProposalReq = { + signedProposal, + targets + }; + + const proposalResponses = await channel.sendSignedProposal( + sendSignedProposalReq + ); + + // Sign and send commit + const commitReq = { + proposalResponses, + proposal + }; + + for (const response of proposalResponses) { + if (response.response) { + if (response.response.status !== 200) { + throw new Error(response.message); + } + } else { + throw new Error(response.message); + } + } + + const commitProposal = await channel.generateUnsignedTransaction(commitReq); + + const signedCommitProposal = await signProposal( + commitProposal.toBuffer(), + privateKeyPEM + ); + + const response = await channel.sendSignedTransaction({ + signedProposal: signedCommitProposal, + request: commitReq + }); + + if (response.status !== "SUCCESS") { + // TODO - find out what went wrong + throw new Error("Something went wrong"); + } + + // return response.status; + + // let peer = channel.getChannelEventHubsForOrg(network.getOrgMsp()).getPeer(); + const eventHub = channel.newChannelEventHub(targets[0]); + const unsignedEvent = await eventHub.generateUnsignedRegistration({ + certificate: certPEM, + mspId: network.getOrgMsp() + }); + + const signedEventProposal = await signProposal( + unsignedEvent, + privateKeyPEM + ); + const signedEvent = { + signature: signedEventProposal.signature, + payload: signedEventProposal.proposal_bytes + }; + + return processTxEvent(txId.getTransactionID(), { + signedEvent + }); + } catch (e) { + throw new Error(`invokeOffline: ${e}`); + } +}; + +export default { + prepareRequest, + invokeOffline +}; diff --git a/utils/queries.js b/utils/queries.js new file mode 100644 index 0000000..842a088 --- /dev/null +++ b/utils/queries.js @@ -0,0 +1,87 @@ +export const queryStatus = status => `PREFIX sign: +PREFIX dct: +SELECT ?s ?content ?signatory ?void ?signedResource ?acmIdmSecret + (GROUP_CONCAT(DISTINCT ?role; SEPARATOR = ',') as ?roles) +WHERE { + ?s a sign:BlockchainSignature ; + sign:text ?content ; + sign:signatory ?signatory ; + sign:roles ?role; + sign:externalUserId ?void; + dct:subject ?signedResource ; + sign:secret ?acmIdmSecret ; + sign:status + . +}`; + +export const queryNotules = status => `PREFIX ext: +PREFIX besluit: +PREFIX prov: +PREFIX sign: +PREFIX dct: + +SELECT ?signedResource ?zitting ?content +WHERE { + GRAPH { + ?signedResource a sign:SignedResource; + dct:subject ?zitting; + sign:status ; + sign:text ?content. + } +} `; + +export const insertQuery = ` +PREFIX sign: +PREFIX dct: + +INSERT DATA { + GRAPH { + + a sign:BlockchainSignature ; + sign:text "...

Ik ben een besluit

" ; + sign:signatory ; + sign:roles "GelinktNotuleren_ondertekenaar", "GelinktNotuleren_schrijver" ; + sign:externalUserId "733bc851-5793-4239-acd3-a63c8fc64de5"; + dct:subject ; + sign:secret "b994192c-7d2a-4f93-a8e8-2852b81e5e89" ; + sign:status + . + } +}`; + +export const insertById = id => ` +PREFIX sign: +PREFIX dct: + +INSERT DATA { + GRAPH { + + a sign:BlockchainSignature ; + sign:text "...

Ik ben een besluit

" ; + sign:signatory ; + sign:roles "GelinktNotuleren_ondertekenaar", "GelinktNotuleren_schrijver" ; + sign:externalUserId "733bc851-5793-4239-acd3-a63c8fc64de5"; + dct:subject ; + sign:secret "b994192c-7d2a-4f93-a8e8-2852b81e5e89" ; + sign:status + . + } +}`; + +export const updateQuery = ( + id, + status +) => `PREFIX sign: +DELETE { + GRAPH ?g { + <${id}> sign:status ?status . + } + } INSERT { + GRAPH ?g { + <${id}> sign:status . + } + } WHERE { + GRAPH ?g { + <${id}> sign:status ?status . + } + }`;