feat: loggers + search engines models

This commit is contained in:
Nicolas Giard
2018-09-01 15:15:44 -04:00
parent 9c6a4f6c20
commit 5919d14670
31 changed files with 433 additions and 40 deletions

110
server/models/loggers.js Normal file
View File

@@ -0,0 +1,110 @@
const Model = require('objection').Model
const path = require('path')
const fs = require('fs-extra')
const _ = require('lodash')
const yaml = require('js-yaml')
const commonHelper = require('../helpers/common')
/* global WIKI */
/**
* Logger model
*/
module.exports = class Logger extends Model {
static get tableName() { return 'loggers' }
static get jsonSchema () {
return {
type: 'object',
required: ['key', 'isEnabled'],
properties: {
id: {type: 'integer'},
key: {type: 'string'},
isEnabled: {type: 'boolean'},
level: {type: 'string'},
config: {type: 'object'}
}
}
}
static async getLoggers() {
return WIKI.models.loggers.query()
}
static async refreshLoggersFromDisk() {
let trx
try {
const dbLoggers = await WIKI.models.loggers.query()
// -> Fetch definitions from disk
const loggersDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/logging'))
let diskLoggers = []
for (let dir of loggersDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/logging', dir, 'definition.yml'), 'utf8')
diskLoggers.push(yaml.safeLoad(def))
}
WIKI.data.loggers = diskLoggers.map(logger => ({
...logger,
props: commonHelper.parseModuleProps(logger.props)
}))
// -> Insert new loggers
let newLoggers = []
for (let logger of WIKI.data.loggers) {
if (!_.some(dbLoggers, ['key', logger.key])) {
newLoggers.push({
key: logger.key,
isEnabled: (logger.key === 'console'),
level: logger.defaultLevel,
config: _.transform(logger.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {})
})
} else {
const loggerConfig = _.get(_.find(dbLoggers, ['key', logger.key]), 'config', {})
await WIKI.models.loggers.query().patch({
config: _.transform(logger.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, loggerConfig)
}).where('key', logger.key)
}
}
if (newLoggers.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let logger of newLoggers) {
await WIKI.models.loggers.query(trx).insert(logger)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newLoggers.length} new loggers: [ OK ]`)
} else {
WIKI.logger.info(`No new loggers found: [ SKIPPED ]`)
}
} catch (err) {
WIKI.logger.error(`Failed to scan or load new loggers: [ FAILED ]`)
WIKI.logger.error(err)
if (trx) {
trx.rollback()
}
}
}
static async pageEvent({ event, page }) {
const loggers = await WIKI.models.storage.query().where('isEnabled', true)
if (loggers && loggers.length > 0) {
_.forEach(loggers, logger => {
WIKI.queue.job.syncStorage.add({
event,
logger,
page
}, {
removeOnComplete: true
})
})
}
}
}

View File

@@ -0,0 +1,109 @@
const Model = require('objection').Model
const path = require('path')
const fs = require('fs-extra')
const _ = require('lodash')
const yaml = require('js-yaml')
const commonHelper = require('../helpers/common')
/* global WIKI */
/**
* SearchEngine model
*/
module.exports = class SearchEngine extends Model {
static get tableName() { return 'searchEngines' }
static get jsonSchema () {
return {
type: 'object',
required: ['key', 'isEnabled'],
properties: {
id: {type: 'integer'},
key: {type: 'string'},
isEnabled: {type: 'boolean'},
level: {type: 'string'},
config: {type: 'object'}
}
}
}
static async getSearchEngines() {
return WIKI.models.searchEngines.query()
}
static async refreshSearchEnginesFromDisk() {
let trx
try {
const dbSearchEngines = await WIKI.models.searchEngines.query()
// -> Fetch definitions from disk
const searchEnginesDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/search'))
let diskSearchEngines = []
for (let dir of searchEnginesDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/search', dir, 'definition.yml'), 'utf8')
diskSearchEngines.push(yaml.safeLoad(def))
}
WIKI.data.searchEngines = diskSearchEngines.map(searchEngine => ({
...searchEngine,
props: commonHelper.parseModuleProps(searchEngine.props)
}))
// -> Insert new searchEngines
let newSearchEngines = []
for (let searchEngine of WIKI.data.searchEngines) {
if (!_.some(dbSearchEngines, ['key', searchEngine.key])) {
newSearchEngines.push({
key: searchEngine.key,
isEnabled: false,
config: _.transform(searchEngine.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {})
})
} else {
const searchEngineConfig = _.get(_.find(dbSearchEngines, ['key', searchEngine.key]), 'config', {})
await WIKI.models.searchEngines.query().patch({
config: _.transform(searchEngine.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, searchEngineConfig)
}).where('key', searchEngine.key)
}
}
if (newSearchEngines.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let searchEngine of newSearchEngines) {
await WIKI.models.searchEngines.query(trx).insert(searchEngine)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newSearchEngines.length} new search engines: [ OK ]`)
} else {
WIKI.logger.info(`No new search engines found: [ SKIPPED ]`)
}
} catch (err) {
WIKI.logger.error(`Failed to scan or load new search engines: [ FAILED ]`)
WIKI.logger.error(err)
if (trx) {
trx.rollback()
}
}
}
static async pageEvent({ event, page }) {
const searchEngines = await WIKI.models.storage.query().where('isEnabled', true)
if (searchEngines && searchEngines.length > 0) {
_.forEach(searchEngines, logger => {
WIKI.queue.job.syncStorage.add({
event,
logger,
page
}, {
removeOnComplete: true
})
})
}
}
}