|
| 1 | +const Model = require('objection').Model |
| 2 | +const path = require('path') |
| 3 | +const fs = require('fs-extra') |
| 4 | +const _ = require('lodash') |
| 5 | +const yaml = require('js-yaml') |
| 6 | +const commonHelper = require('../helpers/common') |
| 7 | + |
| 8 | +/* global WIKI */ |
| 9 | + |
| 10 | +/** |
| 11 | + * SearchEngine model |
| 12 | + */ |
| 13 | +module.exports = class SearchEngine extends Model { |
| 14 | + static get tableName() { return 'searchEngines' } |
| 15 | + |
| 16 | + static get jsonSchema () { |
| 17 | + return { |
| 18 | + type: 'object', |
| 19 | + required: ['key', 'isEnabled'], |
| 20 | + |
| 21 | + properties: { |
| 22 | + id: {type: 'integer'}, |
| 23 | + key: {type: 'string'}, |
| 24 | + isEnabled: {type: 'boolean'}, |
| 25 | + level: {type: 'string'}, |
| 26 | + config: {type: 'object'} |
| 27 | + } |
| 28 | + } |
| 29 | + } |
| 30 | + |
| 31 | + static async getSearchEngines() { |
| 32 | + return WIKI.models.searchEngines.query() |
| 33 | + } |
| 34 | + |
| 35 | + static async refreshSearchEnginesFromDisk() { |
| 36 | + let trx |
| 37 | + try { |
| 38 | + const dbSearchEngines = await WIKI.models.searchEngines.query() |
| 39 | + |
| 40 | + // -> Fetch definitions from disk |
| 41 | + const searchEnginesDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/search')) |
| 42 | + let diskSearchEngines = [] |
| 43 | + for (let dir of searchEnginesDirs) { |
| 44 | + const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/search', dir, 'definition.yml'), 'utf8') |
| 45 | + diskSearchEngines.push(yaml.safeLoad(def)) |
| 46 | + } |
| 47 | + WIKI.data.searchEngines = diskSearchEngines.map(searchEngine => ({ |
| 48 | + ...searchEngine, |
| 49 | + props: commonHelper.parseModuleProps(searchEngine.props) |
| 50 | + })) |
| 51 | + |
| 52 | + // -> Insert new searchEngines |
| 53 | + let newSearchEngines = [] |
| 54 | + for (let searchEngine of WIKI.data.searchEngines) { |
| 55 | + if (!_.some(dbSearchEngines, ['key', searchEngine.key])) { |
| 56 | + newSearchEngines.push({ |
| 57 | + key: searchEngine.key, |
| 58 | + isEnabled: false, |
| 59 | + config: _.transform(searchEngine.props, (result, value, key) => { |
| 60 | + _.set(result, key, value.default) |
| 61 | + return result |
| 62 | + }, {}) |
| 63 | + }) |
| 64 | + } else { |
| 65 | + const searchEngineConfig = _.get(_.find(dbSearchEngines, ['key', searchEngine.key]), 'config', {}) |
| 66 | + await WIKI.models.searchEngines.query().patch({ |
| 67 | + config: _.transform(searchEngine.props, (result, value, key) => { |
| 68 | + if (!_.has(result, key)) { |
| 69 | + _.set(result, key, value.default) |
| 70 | + } |
| 71 | + return result |
| 72 | + }, searchEngineConfig) |
| 73 | + }).where('key', searchEngine.key) |
| 74 | + } |
| 75 | + } |
| 76 | + if (newSearchEngines.length > 0) { |
| 77 | + trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex) |
| 78 | + for (let searchEngine of newSearchEngines) { |
| 79 | + await WIKI.models.searchEngines.query(trx).insert(searchEngine) |
| 80 | + } |
| 81 | + await trx.commit() |
| 82 | + WIKI.logger.info(`Loaded ${newSearchEngines.length} new search engines: [ OK ]`) |
| 83 | + } else { |
| 84 | + WIKI.logger.info(`No new search engines found: [ SKIPPED ]`) |
| 85 | + } |
| 86 | + } catch (err) { |
| 87 | + WIKI.logger.error(`Failed to scan or load new search engines: [ FAILED ]`) |
| 88 | + WIKI.logger.error(err) |
| 89 | + if (trx) { |
| 90 | + trx.rollback() |
| 91 | + } |
| 92 | + } |
| 93 | + } |
| 94 | + |
| 95 | + static async pageEvent({ event, page }) { |
| 96 | + const searchEngines = await WIKI.models.storage.query().where('isEnabled', true) |
| 97 | + if (searchEngines && searchEngines.length > 0) { |
| 98 | + _.forEach(searchEngines, logger => { |
| 99 | + WIKI.queue.job.syncStorage.add({ |
| 100 | + event, |
| 101 | + logger, |
| 102 | + page |
| 103 | + }, { |
| 104 | + removeOnComplete: true |
| 105 | + }) |
| 106 | + }) |
| 107 | + } |
| 108 | + } |
| 109 | +} |
0 commit comments