diff --git a/.node-version b/.node-version index c966188e11ada..19b860c1872d5 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -4.4.7 +6.4.0 diff --git a/package.json b/package.json index a5434afd7faa1..b398864d1ffef 100644 --- a/package.json +++ b/package.json @@ -63,7 +63,7 @@ "url": "https://github.com/elastic/kibana.git" }, "dependencies": { - "@bigfunger/decompress-zip": "0.2.0-stripfix2", + "@bigfunger/decompress-zip": "0.2.0-stripfix3", "@spalger/angular-bootstrap": "0.12.1", "@spalger/filesaver": "1.1.2", "@spalger/leaflet-draw": "0.2.3", @@ -77,7 +77,7 @@ "angular-route": "1.4.7", "angular-sanitize": "1.5.7", "ansicolors": "0.3.2", - "autoprefixer": "5.1.1", + "autoprefixer": "6.3.7", "autoprefixer-loader": "2.0.0", "babel": "5.8.23", "babel-core": "5.8.23", @@ -103,8 +103,11 @@ "good": "6.3.0", "good-squeeze": "2.1.0", "gridster": "0.5.6", - "hapi": "8.8.1", + "grunt-run": "0.6.0", + "h2o2": "5.1.1", + "hapi": "14.2.0", "imports-loader": "0.6.4", + "inert": "4.0.2", "jade": "1.11.0", "jade-loader": "0.7.1", "joi": "6.6.1", @@ -114,8 +117,8 @@ "json-stringify-safe": "5.0.1", "jstimezonedetect": "1.0.5", "leaflet": "0.7.5", - "less": "2.5.1", - "less-loader": "2.2.0", + "less": "2.7.1", + "less-loader": "2.2.3", "loader-utils": "0.2.11", "lodash": "3.10.1", "marked": "0.3.6", @@ -133,6 +136,7 @@ "style-loader": "0.12.3", "tar": "2.2.0", "url-loader": "0.5.6", + "vision": "4.1.0", "webpack": "1.12.15", "webpack-directory-name-as-main": "1.0.0", "whatwg-fetch": "0.9.0", @@ -143,28 +147,27 @@ "angular-mocks": "1.4.7", "auto-release-sinon": "1.0.3", "babel-eslint": "4.1.7", - "chokidar": "1.0.5", + "chokidar": "1.6.0", "chromedriver": "2.22.1", "eslint": "1.5.1", "eslint-plugin-mocha": "1.0.0", "expect.js": "0.3.1", "faker": "1.1.0", "glob": "4.5.3", - "grunt": "0.4.5", + "grunt": "1.0.1", + "grunt-aws-s3": "0.14.5", "grunt-babel": "5.0.1", "grunt-cli": "0.1.13", - "grunt-contrib-clean": "0.6.0", + "grunt-contrib-clean": "1.0.0", "grunt-contrib-copy": "0.8.1", "grunt-esvm": "3.2.6", - "grunt-karma": "0.12.0", - "grunt-run": "0.5.0", - "grunt-s3": "0.2.0-alpha.3", + "grunt-karma": "0.12.2", "grunt-simple-mocha": "0.4.0", "gruntify-eslint": "1.0.1", "husky": "0.8.1", "intern": "3.2.3", "istanbul-instrumenter-loader": "0.1.3", - "karma": "0.13.22", + "karma": "1.2.0", "karma-chrome-launcher": "0.2.0", "karma-coverage": "0.5.1", "karma-firefox-launcher": "0.1.6", @@ -177,14 +180,14 @@ "makelogs": "2.0.0", "mocha": "2.3.4", "nock": "8.0.0", - "npm": "2.15.8", + "npm": "3.10.3", "portscanner": "1.0.0", "simple-git": "1.8.0", "sinon": "1.17.2", "source-map": "0.4.4" }, "engines": { - "node": "4.4.7", - "npm": "2.15.8" + "node": "6.4.0", + "npm": "3.10.3" } } diff --git a/src/optimize/lazy/LazyServer.js b/src/optimize/lazy/LazyServer.js index 7527cf1505d51..10847e93cde94 100644 --- a/src/optimize/lazy/LazyServer.js +++ b/src/optimize/lazy/LazyServer.js @@ -2,12 +2,15 @@ let { Server } = require('hapi'); let { fromNode } = require('bluebird'); let Boom = require('boom'); - +let registerHapiPlugins = require('../../server/http/register_hapi_plugins'); module.exports = class LazyServer { constructor(host, port, optimizer) { this.optimizer = optimizer; this.server = new Server(); + + registerHapiPlugins(null, this.server); + this.server.connection({ host: host, port: port diff --git a/src/plugins/elasticsearch/lib/__tests__/routes.js b/src/plugins/elasticsearch/lib/__tests__/routes.js index ff681438623e5..0bf1886790037 100644 --- a/src/plugins/elasticsearch/lib/__tests__/routes.js +++ b/src/plugins/elasticsearch/lib/__tests__/routes.js @@ -28,12 +28,11 @@ describe('plugins/elasticsearch', function () { }); - function testRoute(options) { + function testRoute(options, statusCode = 200) { if (typeof options.payload === 'object') { options.payload = JSON.stringify(options.payload); } - const statusCode = options.statusCode || 200; describe(format('%s %s', options.method, options.url), function () { it('should should return ' + statusCode, function (done) { kbnTestServer.makeRequest(kbnServer, options, function (res) { @@ -61,21 +60,18 @@ describe('plugins/elasticsearch', function () { testRoute({ method: 'POST', - url: '/elasticsearch/.kibana', - statusCode: 405 - }); + url: '/elasticsearch/.kibana' + }, 405); testRoute({ method: 'PUT', - url: '/elasticsearch/.kibana', - statusCode: 405 - }); + url: '/elasticsearch/.kibana' + }, 405); testRoute({ method: 'DELETE', - url: '/elasticsearch/.kibana', - statusCode: 405 - }); + url: '/elasticsearch/.kibana' + }, 405); testRoute({ method: 'GET', @@ -85,9 +81,8 @@ describe('plugins/elasticsearch', function () { testRoute({ method: 'POST', url: '/elasticsearch/.kibana/_bulk', - payload: '{}', - statusCode: 400 - }); + payload: '{}' + }, 400); testRoute({ method: 'POST', diff --git a/src/server/http/index.js b/src/server/http/index.js index 06d8f28588b54..cf3830e764bff 100644 --- a/src/server/http/index.js +++ b/src/server/http/index.js @@ -13,6 +13,7 @@ module.exports = function (kbnServer, server, config) { server = kbnServer.server = new Hapi.Server(); const shortUrlLookup = require('./short_url_lookup')(server); + kbnServer.mixin(require('./register_hapi_plugins')); // Create a new connection let connectionOptions = { diff --git a/src/server/http/register_hapi_plugins.js b/src/server/http/register_hapi_plugins.js new file mode 100644 index 0000000000000..395beb0d37a6c --- /dev/null +++ b/src/server/http/register_hapi_plugins.js @@ -0,0 +1,16 @@ +import HapiTemplates from 'vision'; +import HapiStaticFiles from 'inert'; +import HapiProxy from 'h2o2'; +import { fromNode } from 'bluebird'; + +const plugins = [HapiTemplates, HapiStaticFiles, HapiProxy]; + +async function registerPlugins(server) { + await fromNode(cb => { + server.register(plugins, cb); + }); +} + +export default function (kbnServer, server, config) { + registerPlugins(server); +} diff --git a/tasks/config/aws_s3.js b/tasks/config/aws_s3.js new file mode 100644 index 0000000000000..975e4a1889099 --- /dev/null +++ b/tasks/config/aws_s3.js @@ -0,0 +1,21 @@ +export default function (grunt) { + const { config } = grunt; + const { sha, version } = grunt.config.get('build'); + + return { + options: { + bucket: 'download.elasticsearch.org', + access: 'private', + uploadConcurrency: 10 + }, + + staging: { + files: [{ + expand: true, + cwd: 'target', + src: ['**'], + dest: `kibana/staging/${version}-${sha.substr(0, 7)}/kibana/` + }] + } + }; +}; diff --git a/tasks/licenses.js b/tasks/licenses.js index b7977b418630b..b105a90839995 100644 --- a/tasks/licenses.js +++ b/tasks/licenses.js @@ -1,75 +1,77 @@ -var _ = require('lodash'); -var npm = require('npm'); -var npmLicense = require('license-checker'); +import _ from 'lodash'; +import { fromNode } from 'bluebird'; +import npm from 'npm'; +import npmLicense from 'license-checker'; -module.exports = function (grunt) { - grunt.registerTask('licenses', 'Checks dependency licenses', function () { +export default function licenses(grunt) { + grunt.registerTask('licenses', 'Checks dependency licenses', async function () { + const config = this.options(); + const done = this.async(); - var config = this.options(); + const result = []; + const options = { + start: process.cwd(), + production: true, + json: true + }; - var done = this.async(); + const packages = await fromNode(cb => { + npmLicense.init(options, (result, error) => { + cb(undefined, result); + }); + }); - var result = {}; - var options = { start: process.cwd(), json: true }; - var checkQueueLength = 2; + /** + * Licenses for a package by name with overrides + * + * @param {String} name + * @return {Array} + */ - function processPackage(info, dependency) { - var pkgInfo = {}; - pkgInfo.name = dependency; - pkgInfo.licenses = config.overrides[dependency] || (info && info.licenses); - pkgInfo.licenses = _.isArray(pkgInfo.licenses) ? pkgInfo.licenses : [pkgInfo.licenses]; - pkgInfo.valid = (function () { - if (_.intersection(pkgInfo.licenses, config.licenses).length > 0) { - return true; - } - return false; - }()); - return pkgInfo; - } + function licensesForPackage(name) { + let licenses = packages[name].licenses; - npmLicense.init(options, function (allDependencies) { - // Only check production NPM dependencies, not dev - npm.load({production: true}, function () { - npm.commands.list([], true, function (a, b, npmList) { + if (config.overrides.hasOwnProperty(name)) { + licenses = config.overrides[name]; + } - // Recurse npm --production --json ls output, create array of package@version - var getDependencies = function (dependencies, list) { - list = list || []; - _.each(dependencies, function (info, dependency) { - list.push(dependency + '@' + info.version); - if (info.dependencies) { - getDependencies(info.dependencies, list); - } - }); - return list; - }; + return typeof licenses === 'string' ? [licenses] : licenses; + } - var productionDependencies = {}; - _.each(getDependencies(npmList.dependencies), function (packageAndVersion) { - productionDependencies[packageAndVersion] = allDependencies[packageAndVersion]; - }); + /** + * Determine if a package has a valid license + * + * @param {String} name + * @return {Boolean} + */ - var licenseStats = _.map(productionDependencies, processPackage); - var invalidLicenses = _.filter(licenseStats, function (pkg) { return !pkg.valid; }); + function isInvalidLicense(name) { + let licenses = licensesForPackage(name); - if (!grunt.option('only-invalid')) { - grunt.log.debug(JSON.stringify(licenseStats, null, 2)); - } + // verify all licenses for the package are in the config + return _.intersection(licenses, config.licenses).length < licenses.length; + } + // Build object containing only invalid packages + const invalidPackages = _.pick(packages, (pkg, name) => { + return isInvalidLicense(name); + }); - if (invalidLicenses.length) { - grunt.log.debug(JSON.stringify(invalidLicenses, null, 2)); - grunt.fail.warn( - 'Non-confirming licenses: ' + _.pluck(invalidLicenses, 'name').join(', '), - invalidLicenses.length - ); - } + if (Object.keys(invalidPackages).length) { + const util = require('util'); + const execSync = require('child_process').execSync; + const names = Object.keys(invalidPackages); - done(); - }); - }); - }); + // Uses npm ls to create tree for package locations + const tree = execSync(`npm ls ${names.join(' ')}`); + grunt.log.debug(JSON.stringify(invalidPackages, null, 2)); + grunt.fail.warn( + `Non-confirming licenses:\n ${names.join('\n ')}\n\n${tree}`, + invalidPackages.length + ); + } + done(); }); }; diff --git a/tasks/release.js b/tasks/release.js index ca20966549563..90601b8993e03 100644 --- a/tasks/release.js +++ b/tasks/release.js @@ -1,88 +1,63 @@ module.exports = function (grunt) { - var readline = require('readline'); - var url = require('url'); - var fs = require('fs'); - var _ = require('lodash'); + const readline = require('readline'); + const url = require('url'); + const fs = require('fs'); + const path = require('path'); + const _ = require('lodash'); // build, then zip and upload to s3 grunt.registerTask('release', [ '_release:confirmUpload', - '_release:loadS3Config', 'build', - '_release:setS3Uploads', - 's3:release', + '_release:loadS3Config', + 'aws_s3:staging', '_release:complete' ]); grunt.registerTask('_release:confirmUpload', function () { - var rl = readline.createInterface({ + const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); rl.on('close', this.async()); rl.question('Do you want to actually upload the files to s3 after building?, [N/y] ', function (resp) { - var debug = resp.toLowerCase().trim()[0] !== 'y'; - grunt.config.set('s3.release.debug', debug); + const debug = resp.toLowerCase().trim()[0] !== 'y'; + + grunt.config.set('aws_s3.staging.options.debug', debug); + rl.close(); }); }); // collect the key and secret from the .aws-config.json file, finish configuring the s3 task grunt.registerTask('_release:loadS3Config', function () { - var config = grunt.file.readJSON('.aws-config.json'); - grunt.config('s3.options', { - key: config.key, - secret: config.secret + const config = grunt.file.readJSON('.aws-config.json'); + + grunt.config('aws_s3.options', { + accessKeyId: config.key, + secretAccessKey: config.secret, + bucket: config.bucket || grunt.config.get('aws_s3.config.bucket'), + region: config.region }); }); - grunt.registerTask('_release:setS3Uploads', function () { + grunt.registerTask('_release:complete', function () { const { sha, version } = grunt.config.get('build'); + const config = grunt.config.get('aws_s3.staging.files'); - var uploads = grunt.config.get('platforms') - .reduce(function (files, platform) { - return files.concat( - platform.tarName, - platform.tarName + '.sha1.txt', - platform.zipName, - platform.zipName + '.sha1.txt', - platform.rpmName, - platform.rpmName && platform.rpmName + '.sha1.txt', - platform.debName, - platform.debName && platform.debName + '.sha1.txt' - ); - }, []) - .filter(function (filename) { - if (_.isUndefined(filename)) return false; - try { - fs.accessSync('target/' + filename, fs.F_OK); - return true; - } catch (e) { - return false; - } - }) - .map(function (filename) { - const src = `target/${filename}`; + grunt.log.ok('Builds uploaded'); - const shortSha = sha.substr(0, 7); - const dest = `kibana/staging/${version}-${shortSha}/kibana/${filename}`; + fs.readdirSync('./target').forEach((file) => { + if (path.extname(file) !== '.txt') { + let link = url.format({ + protocol: 'https', + hostname: 'download.elastic.co', + pathname: config[0].dest + file + }); - return { src, dest }; + grunt.log.writeln(link); + } }); - grunt.config.set('s3.release.upload', uploads); - }); - - grunt.registerTask('_release:complete', function () { - grunt.log.ok('Builds released'); - var links = grunt.config.get('s3.release.upload').reduce((t, {dest}) => { - var link = url.format({ - protocol: 'https', - hostname: 'download.elastic.co', - pathname: dest - }); - return `${t}${link}\n`; - }, ''); - grunt.log.write(links); }); };