Skip to content

Commit

Permalink
feat: add winston as a sole logging package
Browse files Browse the repository at this point in the history
Signed-off-by: Anthony Benites <[email protected]>
  • Loading branch information
ec2sw committed May 3, 2022
1 parent 304b1b4 commit e35ddde
Show file tree
Hide file tree
Showing 24 changed files with 785 additions and 1,368 deletions.
1,817 changes: 564 additions & 1,253 deletions package-lock.json

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
"cli-spinners": "^2.6.1",
"cors": "^2.8.5",
"csvtojson": "^2.0.10",
"debug": "~2.6.9",
"dotenv": "^10.0.0",
"express": "~4.16.1",
"express-fileupload": "^1.2.1",
Expand All @@ -55,7 +54,8 @@
"toad-scheduler": "^1.6.0",
"updeep": "^1.2.1",
"uuidv4": "^6.2.12",
"winston": "^3.6.0"
"winston": "^3.7.2",
"winston-daily-rotate-file": "^4.6.1"
},
"devDependencies": {
"@babel/cli": "^7.16.0",
Expand Down
11 changes: 8 additions & 3 deletions src/config/config.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,21 @@ const yaml = require('js-yaml');
const fs = require('fs');
const os = require('os');
const path = require('path');
const logger = require('./logger.cjs').logger;

const homeDir = os.homedir();
const defaultConfig = require('../utils/defaultConfig.json');

const persistanceFolder = `${homeDir}/.chia/climate-warehouse`;

// Adding this duplicate function here because im having trouble
// importing it in from utils folder
const configPath = `${homeDir}/.chia/climate-warehouse/config.yaml`;
const getConfig = _.memoize(() => {
logger.info(`Reading config file at ${configPath}`);

const configFile = path.resolve(
`${homeDir}/.chia/climate-warehouse/config.yaml`,
configPath,
);

// First write it to chia home
Expand All @@ -28,7 +33,7 @@ const getConfig = _.memoize(() => {
defaultConfig.APP.USE_SIMULATOR = true;
}

console.log('Cant write config file, falling back to defaults');
logger.error('Cant write config file, falling back to defaults');
return yaml.load(yaml.dump(defaultConfig));
}
}
Expand All @@ -42,7 +47,7 @@ const getConfig = _.memoize(() => {

return yml;
} catch (e) {
console.log(e, `Config file not found at ${configFile}`);
logger.error(`Config file not found at ${configFile}`, e);
}
});

Expand Down
94 changes: 94 additions & 0 deletions src/config/logger.cjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
const winston = require('winston');
const { format, transports, createLogger } = winston

const DailyRotateFile = require('winston-daily-rotate-file');

const fs = require('fs');
const os = require('os');
const homeDir = os.homedir();
const logDir = `${homeDir}/.chia/climate-warehouse/logs`;

if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true });
}
const logFormat = format.printf(info => `${info.timestamp} [${info.level}]: ${info.message} ${Object.keys(info.metadata || {}).length > 0 ? JSON.stringify(info.metadata) : ''}`)

const logger = createLogger({
level: 'info',
format: format.combine(
logFormat,
// format.label({ label: path.basename(process.main.filename) }),
format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
format.metadata({ fillExcept: ['message', 'level', 'timestamp'] }),
),
transports: [
// Write all logs with importance level of `error` or less to `error.log`
new transports.File({
filename: `${logDir}/error.log`,
level: 'error',
format: format.combine(
format.json(),
),
}),
// Write all logs with importance level of `info` or less to `combined.log`
new transports.File({
filename: `${logDir}/combined.log`,
format: format.combine(
format.json(),
),
}),
// Rotate logs to `application-%DATE%.log`
new DailyRotateFile({
filename: `${logDir}/application-%DATE%.log`,
datePattern: 'YYYY-MM-DD',
zippedArchive: true,
maxSize: '20m',
utc: true,
format: format.combine(
format.json(),
),
}),
],
exceptionHandlers: [
new transports.File({
filename: `${logDir}/exceptions.log`,
}),
new DailyRotateFile({
filename: `${logDir}/exceptions-%DATE%.log`,
datePattern: 'YYYY-MM-DD',
zippedArchive: true,
maxSize: '20m',
utc: true,
}),
],
rejectionHandlers: [
new transports.File({
filename: `${logDir}/rejections.log`,
}),
new DailyRotateFile({
filename: `${logDir}/rejections-%DATE%.log`,
datePattern: 'YYYY-MM-DD',
zippedArchive: true,
maxSize: '20m',
utc: true,
}),
],
exitOnError: false
});

//
// If not in production then log to the `console`
//
if (process.env.NODE_ENV !== 'production') {
logger.add(new transports.Console({
format: format.combine(
format.colorize(),
format.prettyPrint(),
logFormat,
)
}));
}

module.exports = {
logger,
};
4 changes: 3 additions & 1 deletion src/controllers/project.controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import { uuid as uuidv4 } from 'uuidv4';

import { Staging, Project, Organization, ModelKeys } from '../models';

import { logger } from '../config/logger.cjs';

import {
columnsToInclude,
optionallyPaginatedResponse,
Expand Down Expand Up @@ -318,7 +320,7 @@ export const update = async (req, res) => {
message: 'Error adding update to stage',
error: err.message,
});
console.log(err);
logger.error('Error adding update to stage', err);
}
};

Expand Down
19 changes: 10 additions & 9 deletions src/database/index.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import { Sequelize } from 'sequelize';
import config from '../config/config.cjs';
import Debug from 'debug';
Debug.enable('climate-warehouse:mirror-database');
const log = Debug('climate-warehouse:mirror-database');
import { logger } from '../config/logger.cjs';
import mysql from 'mysql2/promise';
import { getConfig } from '../utils/config-loader';

Expand All @@ -19,6 +17,8 @@ const mirrorConfig =
(process.env.NODE_ENV || 'local') === 'local' ? 'mirror' : 'mirrorTest';
export const sequelizeMirror = new Sequelize(config[mirrorConfig]);

logger.info('climate-warehouse:mirror-database');

export const safeMirrorDbHandler = (callback) => {
try {
sequelizeMirror
Expand All @@ -31,12 +31,13 @@ export const safeMirrorDbHandler = (callback) => {
getConfig().MIRROR_DB.DB_HOST &&
getConfig().MIRROR_DB.DB_HOST !== ''
) {
log('Mirror DB not connected');
logger.info('Mirror DB not connected');
}
});
} catch (error) {
console.log(
logger.error(
'MirrorDB tried to update before it was initialize, will try again later',
error,
);
}
};
Expand All @@ -54,11 +55,11 @@ export const seedDb = async (db) => {

for (let i = 0; i < seeders.length; i++) {
const seeder = seeders[i];
console.log('SEEDING: ', seeder.name);
logger.info(`SEEDING: ${seeder.name}`, seeder);
await seeder.seed.up(queryInterface, Sequelize);
}
} catch (error) {
log(error);
logger.error('Error seeding data', error);
}
};

Expand All @@ -85,15 +86,15 @@ export const checkForMigrations = async (db) => {

for (let i = 0; i < notCompletedMigrations.length; i++) {
const notCompleted = notCompletedMigrations[i];
console.log('MIGRATING: ', notCompleted.name);
logger.info('MIGRATING: ', notCompleted.name);
await notCompleted.migration.up(db.queryInterface, Sequelize);
await db.query('INSERT INTO `SequelizeMeta` VALUES(:name)', {
type: Sequelize.QueryTypes.INSERT,
replacements: { name: notCompleted.name },
});
}
} catch (error) {
log(error);
logger.error('Error checking for migrations', error);
}
};

Expand Down
32 changes: 15 additions & 17 deletions src/datalayer/persistance.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,9 @@ import request from 'request-promise';
import os from 'os';
import { getConfig } from '../utils/config-loader';

import Debug from 'debug';
Debug.enable('climate-warehouse:datalayer:persistance');
import { logger } from '../config/logger.cjs';

const log = Debug('climate-warehouse:datalayer:persistance');
logger.info('climate-warehouse:datalayer:persistance');

process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = 0;

Expand Down Expand Up @@ -43,13 +42,11 @@ export const createDataLayerStore = async () => {

const data = JSON.parse(response);

console.log(data);

if (data.success) {
return data.id;
}

log(data);
logger.info(data);

throw new Error(data.error);
};
Expand All @@ -70,23 +67,24 @@ export const pushChangeListToDataLayer = async (storeId, changelist) => {

const data = JSON.parse(response);

log(options, data);
logger.info(options);
logger.info(data);

if (data.success) {
log('Success!');
logger.info('Success!');
return true;
}

if (data.error.includes('Key already present')) {
log('Success, I guess...');
logger.info('Success, I guess...');
return true;
}

log(data);
logger.info(data);

return false;
} catch (error) {
log('There was an error pushing your changes to the datalayer');
logger.info('There was an error pushing your changes to the datalayer');
}
};

Expand Down Expand Up @@ -166,12 +164,12 @@ export const getStoreData = async (storeId, rootHash) => {

if (data.success) {
if (!_.isEmpty(data.keys_values)) {
log('Downloaded Data', data);
logger.info(`Downloaded Data: ${data}`);
}
return data;
}

log(data);
logger.info(data);
}

return false;
Expand All @@ -195,7 +193,7 @@ export const dataLayerAvailable = async () => {
return true;
}

log(data);
logger.info(data);
return false;
} catch (error) {
return false;
Expand All @@ -212,7 +210,7 @@ export const subscribeToStoreOnDataLayer = async (storeId, ip, port) => {
}),
};

log('RPC Call: ', `${rpcUrl}/subscribe`, storeId, ip, port);
logger.info(`RPC Call: ${rpcUrl}/subscribe ${storeId} ${ip} ${port}`);

try {
const response = await request(
Expand All @@ -222,13 +220,13 @@ export const subscribeToStoreOnDataLayer = async (storeId, ip, port) => {
const data = JSON.parse(response);

if (Object.keys(data).includes('success') && data.success) {
log('Successfully Subscribed: ', storeId, ip, port);
logger.info(`Successfully Subscribed: ${storeId} ${ip} ${port}`);
return data;
}

return false;
} catch (error) {
log('Error Subscribing: ', error);
logger.info(`Error Subscribing: ${error}`);
return false;
}
};
Expand Down
Loading

0 comments on commit e35ddde

Please sign in to comment.