refactor: project cleanup + onboarding page
This commit is contained in:
109
server/core/auth.js
Normal file
109
server/core/auth.js
Normal file
@@ -0,0 +1,109 @@
|
||||
/* global wiki */
|
||||
|
||||
const _ = require('lodash')
|
||||
const passport = require('passport')
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
|
||||
module.exports = {
|
||||
strategies: {},
|
||||
init() {
|
||||
this.passport = passport
|
||||
|
||||
// Serialization user methods
|
||||
|
||||
passport.serializeUser(function (user, done) {
|
||||
done(null, user.id)
|
||||
})
|
||||
|
||||
passport.deserializeUser(function (id, done) {
|
||||
wiki.db.User.findById(id).then((user) => {
|
||||
if (user) {
|
||||
done(null, user)
|
||||
} else {
|
||||
done(new Error(wiki.lang.t('auth:errors:usernotfound')), null)
|
||||
}
|
||||
return true
|
||||
}).catch((err) => {
|
||||
done(err, null)
|
||||
})
|
||||
})
|
||||
|
||||
// Load authentication strategies
|
||||
|
||||
_.forOwn(_.omitBy(wiki.config.auth.strategies, s => s.enabled === false), (strategyConfig, strategyKey) => {
|
||||
strategyConfig.callbackURL = `${wiki.config.site.host}${wiki.config.site.path}login/${strategyKey}/callback`
|
||||
let strategy = require(`../modules/authentication/${strategyKey}`)
|
||||
try {
|
||||
strategy.init(passport, strategyConfig)
|
||||
} catch (err) {
|
||||
wiki.logger.error(`Authentication Provider ${strategyKey}: [ FAILED ]`)
|
||||
wiki.logger.error(err)
|
||||
}
|
||||
fs.readFile(path.join(wiki.ROOTPATH, `assets/svg/auth-icon-${strategyKey}.svg`), 'utf8').then(iconData => {
|
||||
strategy.icon = iconData
|
||||
}).catch(err => {
|
||||
if (err.code === 'ENOENT') {
|
||||
strategy.icon = '[missing icon]'
|
||||
} else {
|
||||
wiki.logger.error(err)
|
||||
}
|
||||
})
|
||||
this.strategies[strategy.key] = strategy
|
||||
wiki.logger.info(`Authentication Provider ${strategyKey}: [ OK ]`)
|
||||
})
|
||||
|
||||
// Create Guest account for first-time
|
||||
|
||||
wiki.db.User.findOne({
|
||||
where: {
|
||||
provider: 'local',
|
||||
email: 'guest@example.com'
|
||||
}
|
||||
}).then((c) => {
|
||||
if (c < 1) {
|
||||
return wiki.db.User.create({
|
||||
provider: 'local',
|
||||
email: 'guest@example.com',
|
||||
name: 'Guest',
|
||||
password: '',
|
||||
role: 'guest'
|
||||
}).then(() => {
|
||||
wiki.logger.info('[AUTH] Guest account created successfully!')
|
||||
return true
|
||||
}).catch((err) => {
|
||||
wiki.logger.error('[AUTH] An error occured while creating guest account:')
|
||||
wiki.logger.error(err)
|
||||
return err
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// .then(() => {
|
||||
// if (process.env.WIKI_JS_HEROKU) {
|
||||
// return wiki.db.User.findOne({ provider: 'local', email: process.env.WIKI_ADMIN_EMAIL }).then((c) => {
|
||||
// if (c < 1) {
|
||||
// // Create root admin account (HEROKU ONLY)
|
||||
|
||||
// return wiki.db.User.create({
|
||||
// provider: 'local',
|
||||
// email: process.env.WIKI_ADMIN_EMAIL,
|
||||
// name: 'Administrator',
|
||||
// password: '$2a$04$MAHRw785Xe/Jd5kcKzr3D.VRZDeomFZu2lius4gGpZZ9cJw7B7Mna', // admin123 (default)
|
||||
// role: 'admin'
|
||||
// }).then(() => {
|
||||
// wiki.logger.info('[AUTH] Root admin account created successfully!')
|
||||
// return true
|
||||
// }).catch((err) => {
|
||||
// wiki.logger.error('[AUTH] An error occured while creating root admin account:')
|
||||
// wiki.logger.error(err)
|
||||
// return err
|
||||
// })
|
||||
// } else { return true }
|
||||
// })
|
||||
// } else { return true }
|
||||
// })
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
105
server/core/config.js
Normal file
105
server/core/config.js
Normal file
@@ -0,0 +1,105 @@
|
||||
const _ = require('lodash')
|
||||
const cfgHelper = require('../helpers/config')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const yaml = require('js-yaml')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Load root config from disk
|
||||
*/
|
||||
init() {
|
||||
let confPaths = {
|
||||
config: path.join(wiki.ROOTPATH, 'config.yml'),
|
||||
data: path.join(wiki.SERVERPATH, 'app/data.yml'),
|
||||
dataRegex: path.join(wiki.SERVERPATH, 'app/regex.js')
|
||||
}
|
||||
|
||||
let appconfig = {}
|
||||
let appdata = {}
|
||||
|
||||
try {
|
||||
appconfig = yaml.safeLoad(
|
||||
cfgHelper.parseConfigValue(
|
||||
fs.readFileSync(confPaths.config, 'utf8')
|
||||
)
|
||||
)
|
||||
appdata = yaml.safeLoad(fs.readFileSync(confPaths.data, 'utf8'))
|
||||
appdata.regex = require(confPaths.dataRegex)
|
||||
} catch (ex) {
|
||||
console.error(ex)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Merge with defaults
|
||||
|
||||
appconfig = _.defaultsDeep(appconfig, appdata.defaults.config)
|
||||
|
||||
if (appconfig.port < 1) {
|
||||
appconfig.port = process.env.PORT || 80
|
||||
}
|
||||
|
||||
appconfig.public = (appconfig.public === true || _.toLower(appconfig.public) === 'true')
|
||||
|
||||
wiki.config = appconfig
|
||||
wiki.data = appdata
|
||||
wiki.version = require(path.join(wiki.ROOTPATH, 'package.json')).version
|
||||
},
|
||||
|
||||
/**
|
||||
* Load config from DB
|
||||
*
|
||||
* @param {Array} subsets Array of subsets to load
|
||||
* @returns Promise
|
||||
*/
|
||||
async loadFromDb(subsets) {
|
||||
if (!_.isArray(subsets) || subsets.length === 0) {
|
||||
subsets = wiki.data.configNamespaces
|
||||
}
|
||||
|
||||
let results = await wiki.db.Setting.findAll({
|
||||
attributes: ['key', 'config'],
|
||||
where: {
|
||||
key: {
|
||||
$in: subsets
|
||||
}
|
||||
}
|
||||
})
|
||||
if (_.isArray(results) && results.length === subsets.length) {
|
||||
results.forEach(result => {
|
||||
wiki.config[result.key] = result.config
|
||||
})
|
||||
return true
|
||||
} else {
|
||||
wiki.logger.warn('DB Configuration is empty or incomplete.')
|
||||
return false
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Save config to DB
|
||||
*
|
||||
* @param {Array} subsets Array of subsets to save
|
||||
* @returns Promise
|
||||
*/
|
||||
async saveToDb(subsets) {
|
||||
if (!_.isArray(subsets) || subsets.length === 0) {
|
||||
subsets = wiki.data.configNamespaces
|
||||
}
|
||||
|
||||
try {
|
||||
for (let set of subsets) {
|
||||
await wiki.db.Setting.upsert({
|
||||
key: set,
|
||||
config: _.get(wiki.config, set, {})
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
wiki.logger.error(`Failed to save configuration to DB: ${err.message}`)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
}
|
132
server/core/db.js
Normal file
132
server/core/db.js
Normal file
@@ -0,0 +1,132 @@
|
||||
const _ = require('lodash')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const Promise = require('bluebird')
|
||||
const Sequelize = require('sequelize')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
const operatorsAliases = {
|
||||
$eq: Sequelize.Op.eq,
|
||||
$ne: Sequelize.Op.ne,
|
||||
$gte: Sequelize.Op.gte,
|
||||
$gt: Sequelize.Op.gt,
|
||||
$lte: Sequelize.Op.lte,
|
||||
$lt: Sequelize.Op.lt,
|
||||
$not: Sequelize.Op.not,
|
||||
$in: Sequelize.Op.in,
|
||||
$notIn: Sequelize.Op.notIn,
|
||||
$is: Sequelize.Op.is,
|
||||
$like: Sequelize.Op.like,
|
||||
$notLike: Sequelize.Op.notLike,
|
||||
$iLike: Sequelize.Op.iLike,
|
||||
$notILike: Sequelize.Op.notILike,
|
||||
$regexp: Sequelize.Op.regexp,
|
||||
$notRegexp: Sequelize.Op.notRegexp,
|
||||
$iRegexp: Sequelize.Op.iRegexp,
|
||||
$notIRegexp: Sequelize.Op.notIRegexp,
|
||||
$between: Sequelize.Op.between,
|
||||
$notBetween: Sequelize.Op.notBetween,
|
||||
$overlap: Sequelize.Op.overlap,
|
||||
$contains: Sequelize.Op.contains,
|
||||
$contained: Sequelize.Op.contained,
|
||||
$adjacent: Sequelize.Op.adjacent,
|
||||
$strictLeft: Sequelize.Op.strictLeft,
|
||||
$strictRight: Sequelize.Op.strictRight,
|
||||
$noExtendRight: Sequelize.Op.noExtendRight,
|
||||
$noExtendLeft: Sequelize.Op.noExtendLeft,
|
||||
$and: Sequelize.Op.and,
|
||||
$or: Sequelize.Op.or,
|
||||
$any: Sequelize.Op.any,
|
||||
$all: Sequelize.Op.all,
|
||||
$values: Sequelize.Op.values,
|
||||
$col: Sequelize.Op.col
|
||||
}
|
||||
|
||||
/**
|
||||
* PostgreSQL DB module
|
||||
*/
|
||||
module.exports = {
|
||||
Sequelize,
|
||||
Op: Sequelize.Op,
|
||||
|
||||
/**
|
||||
* Initialize DB
|
||||
*
|
||||
* @return {Object} DB instance
|
||||
*/
|
||||
init() {
|
||||
let self = this
|
||||
let dbModelsPath = path.join(wiki.SERVERPATH, 'models')
|
||||
|
||||
// Define Sequelize instance
|
||||
|
||||
this.inst = new this.Sequelize(wiki.config.db.db, wiki.config.db.user, wiki.config.db.pass, {
|
||||
host: wiki.config.db.host,
|
||||
port: wiki.config.db.port,
|
||||
dialect: 'postgres',
|
||||
pool: {
|
||||
max: 10,
|
||||
min: 0,
|
||||
idle: 10000
|
||||
},
|
||||
logging: log => { wiki.logger.log('debug', log) },
|
||||
operatorsAliases
|
||||
})
|
||||
|
||||
// Attempt to connect and authenticate to DB
|
||||
|
||||
this.inst.authenticate().then(() => {
|
||||
wiki.logger.info('Database (PostgreSQL) connection: [ OK ]')
|
||||
}).catch(err => {
|
||||
wiki.logger.error('Failed to connect to PostgreSQL instance.')
|
||||
wiki.logger.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
// Load DB Models
|
||||
|
||||
fs
|
||||
.readdirSync(dbModelsPath)
|
||||
.filter(file => {
|
||||
return (file.indexOf('.') !== 0 && file.indexOf('_') !== 0)
|
||||
})
|
||||
.forEach(file => {
|
||||
let modelName = _.upperFirst(_.camelCase(_.split(file, '.')[0]))
|
||||
self[modelName] = self.inst.import(path.join(dbModelsPath, file))
|
||||
})
|
||||
|
||||
// Associate DB Models
|
||||
|
||||
require(path.join(dbModelsPath, '_relations.js'))(self)
|
||||
|
||||
// Set init tasks
|
||||
|
||||
let initTasks = {
|
||||
// -> Sync DB Schemas
|
||||
syncSchemas() {
|
||||
return self.inst.sync({
|
||||
force: false,
|
||||
logging: log => { wiki.logger.log('debug', log) }
|
||||
})
|
||||
},
|
||||
// -> Set Connection App Name
|
||||
setAppName() {
|
||||
return self.inst.query(`set application_name = 'Wiki.js'`, { raw: true })
|
||||
}
|
||||
}
|
||||
|
||||
let initTasksQueue = (wiki.IS_MASTER) ? [
|
||||
initTasks.syncSchemas,
|
||||
initTasks.setAppName
|
||||
] : [
|
||||
initTasks.setAppName
|
||||
]
|
||||
|
||||
// Perform init tasks
|
||||
|
||||
this.onReady = Promise.each(initTasksQueue, t => t()).return(true)
|
||||
|
||||
return this
|
||||
}
|
||||
}
|
41
server/core/graphql.js
Normal file
41
server/core/graphql.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const _ = require('lodash')
|
||||
const fs = require('fs')
|
||||
const gqlTools = require('graphql-tools')
|
||||
const path = require('path')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
const typeDefs = fs.readFileSync(path.join(wiki.SERVERPATH, 'schemas/types.graphql'), 'utf8')
|
||||
|
||||
const DateScalar = require('../schemas/scalar-date')
|
||||
const AuthenticationResolvers = require('../schemas/resolvers-authentication')
|
||||
const CommentResolvers = require('../schemas/resolvers-comment')
|
||||
const DocumentResolvers = require('../schemas/resolvers-document')
|
||||
const FileResolvers = require('../schemas/resolvers-file')
|
||||
const FolderResolvers = require('../schemas/resolvers-folder')
|
||||
const GroupResolvers = require('../schemas/resolvers-group')
|
||||
const SettingResolvers = require('../schemas/resolvers-setting')
|
||||
const TagResolvers = require('../schemas/resolvers-tag')
|
||||
const TranslationResolvers = require('../schemas/resolvers-translation')
|
||||
const UserResolvers = require('../schemas/resolvers-user')
|
||||
|
||||
const resolvers = _.merge(
|
||||
AuthenticationResolvers,
|
||||
CommentResolvers,
|
||||
DocumentResolvers,
|
||||
FileResolvers,
|
||||
FolderResolvers,
|
||||
GroupResolvers,
|
||||
SettingResolvers,
|
||||
TagResolvers,
|
||||
TranslationResolvers,
|
||||
UserResolvers,
|
||||
DateScalar
|
||||
)
|
||||
|
||||
const Schema = gqlTools.makeExecutableSchema({
|
||||
typeDefs,
|
||||
resolvers
|
||||
})
|
||||
|
||||
module.exports = Schema
|
95
server/core/kernel.js
Normal file
95
server/core/kernel.js
Normal file
@@ -0,0 +1,95 @@
|
||||
const _ = require('lodash')
|
||||
const cluster = require('cluster')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
numWorkers: 1,
|
||||
workers: [],
|
||||
init() {
|
||||
if (cluster.isMaster) {
|
||||
wiki.logger.info('=======================================')
|
||||
wiki.logger.info('= Wiki.js =============================')
|
||||
wiki.logger.info('=======================================')
|
||||
|
||||
wiki.redis = require('./redis').init()
|
||||
wiki.queue = require('./queue').init()
|
||||
|
||||
this.setWorkerLimit()
|
||||
this.bootMaster()
|
||||
} else {
|
||||
this.bootWorker()
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Pre-Master Boot Sequence
|
||||
*/
|
||||
preBootMaster() {
|
||||
return Promise.mapSeries([
|
||||
() => { return wiki.db.onReady },
|
||||
() => { return wiki.configSvc.loadFromDb() },
|
||||
() => { return wiki.queue.clean() }
|
||||
], fn => { return fn() })
|
||||
},
|
||||
/**
|
||||
* Boot Master Process
|
||||
*/
|
||||
bootMaster() {
|
||||
this.preBootMaster().then(sequenceResults => {
|
||||
if (_.every(sequenceResults, rs => rs === true) && wiki.config.configMode !== 'setup') {
|
||||
this.postBootMaster()
|
||||
} else {
|
||||
wiki.logger.info('Starting configuration manager...')
|
||||
require('../setup')()
|
||||
}
|
||||
return true
|
||||
}).catch(err => {
|
||||
wiki.logger.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
},
|
||||
/**
|
||||
* Post-Master Boot Sequence
|
||||
*/
|
||||
async postBootMaster() {
|
||||
await require('../master')()
|
||||
|
||||
_.times(this.numWorkers, () => {
|
||||
this.spawnWorker()
|
||||
})
|
||||
|
||||
wiki.queue.uplClearTemp.add({}, {
|
||||
repeat: { cron: '*/15 * * * *' }
|
||||
})
|
||||
|
||||
cluster.on('exit', (worker, code, signal) => {
|
||||
if (!global.DEV) {
|
||||
wiki.logger.info(`Background Worker #${worker.id} was terminated.`)
|
||||
}
|
||||
})
|
||||
},
|
||||
/**
|
||||
* Boot Worker Process
|
||||
*/
|
||||
bootWorker() {
|
||||
wiki.logger.info(`Background Worker #${cluster.worker.id} is initializing...`)
|
||||
require('../worker')
|
||||
},
|
||||
/**
|
||||
* Spawn new Worker process
|
||||
*/
|
||||
spawnWorker() {
|
||||
this.workers.push(cluster.fork())
|
||||
},
|
||||
/**
|
||||
* Set Worker count based on config + system capabilities
|
||||
*/
|
||||
setWorkerLimit() {
|
||||
const numCPUs = require('os').cpus().length
|
||||
this.numWorkers = (wiki.config.workers > 0) ? wiki.config.workers : numCPUs
|
||||
if (this.numWorkers > numCPUs) {
|
||||
this.numWorkers = numCPUs
|
||||
}
|
||||
}
|
||||
}
|
57
server/core/localization.js
Normal file
57
server/core/localization.js
Normal file
@@ -0,0 +1,57 @@
|
||||
const _ = require('lodash')
|
||||
const dotize = require('dotize')
|
||||
const i18nBackend = require('i18next-node-fs-backend')
|
||||
const i18nMW = require('i18next-express-middleware')
|
||||
const i18next = require('i18next')
|
||||
const path = require('path')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
engine: null,
|
||||
namespaces: [],
|
||||
init() {
|
||||
this.namespaces = wiki.data.localeNamespaces
|
||||
this.engine = i18next
|
||||
this.engine.use(i18nBackend).init({
|
||||
load: 'languageOnly',
|
||||
ns: this.namespaces,
|
||||
defaultNS: 'common',
|
||||
saveMissing: false,
|
||||
preload: [wiki.config.site.lang],
|
||||
lng: wiki.config.site.lang,
|
||||
fallbackLng: 'en',
|
||||
backend: {
|
||||
loadPath: path.join(wiki.SERVERPATH, 'locales/{{lng}}/{{ns}}.yml')
|
||||
}
|
||||
})
|
||||
return this
|
||||
},
|
||||
attachMiddleware (app) {
|
||||
app.use(i18nMW.handle(this.engine))
|
||||
},
|
||||
async getByNamespace(locale, namespace) {
|
||||
if (this.engine.hasResourceBundle(locale, namespace)) {
|
||||
let data = this.engine.getResourceBundle(locale, namespace)
|
||||
return _.map(dotize.convert(data), (value, key) => {
|
||||
return {
|
||||
key,
|
||||
value
|
||||
}
|
||||
})
|
||||
} else {
|
||||
throw new Error('Invalid locale or namespace')
|
||||
}
|
||||
},
|
||||
async loadLocale(locale) {
|
||||
return Promise.fromCallback(cb => {
|
||||
return this.engine.loadLanguages(locale, cb)
|
||||
})
|
||||
},
|
||||
async setCurrentLocale(locale) {
|
||||
return Promise.fromCallback(cb => {
|
||||
return this.engine.changeLanguage(locale, cb)
|
||||
})
|
||||
}
|
||||
}
|
40
server/core/logger.js
Normal file
40
server/core/logger.js
Normal file
@@ -0,0 +1,40 @@
|
||||
const _ = require('lodash')
|
||||
const cluster = require('cluster')
|
||||
const fs = require('fs-extra')
|
||||
const path = require('path')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
loggers: {},
|
||||
init() {
|
||||
let winston = require('winston')
|
||||
|
||||
let logger = new (winston.Logger)({
|
||||
level: wiki.config.logLevel,
|
||||
transports: []
|
||||
})
|
||||
|
||||
logger.filters.push((level, msg) => {
|
||||
let processName = (cluster.isMaster) ? 'MASTER' : `WORKER-${cluster.worker.id}`
|
||||
return '[' + processName + '] ' + msg
|
||||
})
|
||||
|
||||
_.forOwn(_.omitBy(wiki.config.logging.loggers, s => s.enabled === false), (loggerConfig, loggerKey) => {
|
||||
let loggerModule = require(`../modules/logging/${loggerKey}`)
|
||||
loggerModule.init(logger, loggerConfig)
|
||||
fs.readFile(path.join(wiki.ROOTPATH, `assets/svg/auth-icon-${loggerKey}.svg`), 'utf8').then(iconData => {
|
||||
logger.icon = iconData
|
||||
}).catch(err => {
|
||||
if (err.code === 'ENOENT') {
|
||||
logger.icon = '[missing icon]'
|
||||
} else {
|
||||
logger.error(err)
|
||||
}
|
||||
})
|
||||
this.loggers[logger.key] = loggerModule
|
||||
})
|
||||
|
||||
return logger
|
||||
}
|
||||
}
|
35
server/core/queue.js
Normal file
35
server/core/queue.js
Normal file
@@ -0,0 +1,35 @@
|
||||
const Bull = require('bull')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
init() {
|
||||
wiki.data.queues.forEach(queueName => {
|
||||
this[queueName] = new Bull(queueName, {
|
||||
prefix: `q-${wiki.config.ha.nodeuid}`,
|
||||
redis: wiki.config.redis
|
||||
})
|
||||
})
|
||||
return this
|
||||
},
|
||||
clean() {
|
||||
return Promise.each(wiki.data.queues, queueName => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let keyStream = wiki.redis.scanStream({
|
||||
match: `q-${wiki.config.ha.nodeuid}:${queueName}:*`
|
||||
})
|
||||
keyStream.on('data', resultKeys => {
|
||||
if (resultKeys.length > 0) {
|
||||
wiki.redis.del(resultKeys)
|
||||
}
|
||||
})
|
||||
keyStream.on('end', resolve)
|
||||
})
|
||||
}).then(() => {
|
||||
wiki.logger.info('Purging old queue jobs: [ OK ]')
|
||||
}).return(true).catch(err => {
|
||||
wiki.logger.error(err)
|
||||
})
|
||||
}
|
||||
}
|
23
server/core/redis.js
Normal file
23
server/core/redis.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const Redis = require('ioredis')
|
||||
const { isPlainObject } = require('lodash')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
init() {
|
||||
if (isPlainObject(wiki.config.redis)) {
|
||||
let red = new Redis(wiki.config.redis)
|
||||
red.on('ready', () => {
|
||||
wiki.logger.info('Redis connection: [ OK ]')
|
||||
})
|
||||
red.on('error', () => {
|
||||
wiki.logger.error('Failed to connect to Redis instance!')
|
||||
process.exit(1)
|
||||
})
|
||||
return red
|
||||
} else {
|
||||
wiki.logger.error('Invalid Redis configuration!')
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
68
server/core/system.js
Normal file
68
server/core/system.js
Normal file
@@ -0,0 +1,68 @@
|
||||
const _ = require('lodash')
|
||||
const cfgHelper = require('../helpers/config')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
/**
|
||||
* Upgrade from Wiki.js 1.x - MongoDB database
|
||||
*
|
||||
* @param {Object} opts Options object
|
||||
*/
|
||||
async upgradeFromMongo (opts) {
|
||||
wiki.telemetry.sendEvent('setup', 'upgradeFromMongo')
|
||||
|
||||
wiki.logger.info('Upgrading from MongoDB...')
|
||||
|
||||
let mongo = require('mongodb').MongoClient
|
||||
let parsedMongoConStr = cfgHelper.parseConfigValue(opts.mongoCnStr)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// Connect to MongoDB
|
||||
|
||||
mongo.connect(parsedMongoConStr, {
|
||||
autoReconnect: false,
|
||||
reconnectTries: 2,
|
||||
reconnectInterval: 1000,
|
||||
connectTimeoutMS: 5000,
|
||||
socketTimeoutMS: 5000
|
||||
}, async (err, db) => {
|
||||
try {
|
||||
if (err !== null) { throw err }
|
||||
|
||||
let users = db.collection('users')
|
||||
|
||||
// Check if users table is populated
|
||||
let userCount = await users.count()
|
||||
if (userCount < 2) {
|
||||
throw new Error('MongoDB Upgrade: Users table is empty!')
|
||||
}
|
||||
|
||||
// Import all users
|
||||
let userData = await users.find({
|
||||
email: {
|
||||
$not: 'guest'
|
||||
}
|
||||
}).toArray()
|
||||
await wiki.db.User.bulkCreate(_.map(userData, usr => {
|
||||
return {
|
||||
email: usr.email,
|
||||
name: usr.name || 'Imported User',
|
||||
password: usr.password || '',
|
||||
provider: usr.provider || 'local',
|
||||
providerId: usr.providerId || '',
|
||||
role: 'user',
|
||||
createdAt: usr.createdAt
|
||||
}
|
||||
}))
|
||||
|
||||
resolve(true)
|
||||
} catch (err) {
|
||||
reject(err)
|
||||
}
|
||||
db.close()
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
63
server/core/telemetry.js
Normal file
63
server/core/telemetry.js
Normal file
@@ -0,0 +1,63 @@
|
||||
const _ = require('lodash')
|
||||
const axios = require('axios')
|
||||
const bugsnag = require('bugsnag')
|
||||
const path = require('path')
|
||||
const uuid = require('uuid/v4')
|
||||
|
||||
/* global wiki */
|
||||
|
||||
module.exports = {
|
||||
cid: '',
|
||||
enabled: false,
|
||||
init() {
|
||||
this.cid = uuid()
|
||||
bugsnag.register(wiki.data.telemetry.BUGSNAG_ID, {
|
||||
appVersion: wiki.version,
|
||||
autoNotify: false,
|
||||
hostname: this.cid,
|
||||
notifyReleaseStages: ['production'],
|
||||
packageJSON: path.join(wiki.ROOTPATH, 'package.json'),
|
||||
projectRoot: wiki.ROOTPATH,
|
||||
useSSL: true
|
||||
})
|
||||
bugsnag.onBeforeNotify((notification, originalError) => {
|
||||
if (!this.enabled) { return false }
|
||||
})
|
||||
|
||||
if (_.get(wiki.config, 'logging.telemetry', false) === true) {
|
||||
this.enabled = true
|
||||
}
|
||||
|
||||
return this
|
||||
},
|
||||
sendError(err) {
|
||||
bugsnag.notify(err, { userId: this.cid })
|
||||
},
|
||||
sendEvent(eventCategory, eventAction, eventLabel) {
|
||||
if (!this.enabled) { return false }
|
||||
axios({
|
||||
method: 'post',
|
||||
url: wiki.data.telemetry.GA_REMOTE,
|
||||
headers: {
|
||||
'Content-type': 'application/x-www-form-urlencoded'
|
||||
},
|
||||
params: {
|
||||
v: 1, // API version
|
||||
tid: wiki.data.telemetry.GA_ID, // Tracking ID
|
||||
aip: 1, // Anonymize IP
|
||||
ds: 'server', // Data source
|
||||
cid: this.cid, // Client ID
|
||||
t: 'event', // Hit Type
|
||||
ec: eventCategory, // Event Category
|
||||
ea: eventAction, // Event Action
|
||||
el: eventLabel // Event Label
|
||||
}
|
||||
}).then(resp => {
|
||||
if (resp.status !== 200) {
|
||||
wiki.logger.warn('Unable to send analytics telemetry request.')
|
||||
}
|
||||
}, err => {
|
||||
wiki.logger.warn('Unable to send analytics telemetry request.')
|
||||
})
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user