Removed all native comp. depdencies + fixes
This commit is contained in:
@@ -4,7 +4,7 @@ const Promise = require('bluebird')
|
||||
const path = require('path')
|
||||
const fs = Promise.promisifyAll(require('fs-extra'))
|
||||
const _ = require('lodash')
|
||||
const farmhash = require('farmhash')
|
||||
const crypto = require('crypto')
|
||||
|
||||
/**
|
||||
* Entries Model
|
||||
@@ -228,7 +228,7 @@ module.exports = {
|
||||
* @return {String} The full cache path.
|
||||
*/
|
||||
getCachePath (entryPath) {
|
||||
return path.join(this._cachePath, farmhash.fingerprint32(entryPath) + '.json')
|
||||
return path.join(this._cachePath, crypto.createHash('md5').update(entryPath).digest('hex') + '.json')
|
||||
},
|
||||
|
||||
/**
|
||||
|
81
libs/search-index/index.js
Normal file
81
libs/search-index/index.js
Normal file
@@ -0,0 +1,81 @@
|
||||
const bunyan = require('bunyan')
|
||||
const level = require('levelup')
|
||||
const down = require('memdown')
|
||||
const SearchIndexAdder = require('search-index-adder')
|
||||
const SearchIndexSearcher = require('search-index-searcher')
|
||||
|
||||
module.exports = function (givenOptions, moduleReady) {
|
||||
const optionsLoaded = function (err, SearchIndex) {
|
||||
const siUtil = require('./siUtil.js')(SearchIndex.options)
|
||||
if (err) return moduleReady(err)
|
||||
SearchIndex.close = siUtil.close
|
||||
SearchIndex.countDocs = siUtil.countDocs
|
||||
getAdder(SearchIndex, adderLoaded)
|
||||
}
|
||||
|
||||
const adderLoaded = function (err, SearchIndex) {
|
||||
if (err) return moduleReady(err)
|
||||
getSearcher(SearchIndex, searcherLoaded)
|
||||
}
|
||||
|
||||
const searcherLoaded = function (err, SearchIndex) {
|
||||
if (err) return moduleReady(err)
|
||||
return moduleReady(err, SearchIndex)
|
||||
}
|
||||
|
||||
getOptions(givenOptions, optionsLoaded)
|
||||
}
|
||||
|
||||
const getAdder = function (SearchIndex, done) {
|
||||
SearchIndexAdder(SearchIndex.options, function (err, searchIndexAdder) {
|
||||
SearchIndex.add = searchIndexAdder.add
|
||||
SearchIndex.callbackyAdd = searchIndexAdder.concurrentAdd // deprecated
|
||||
SearchIndex.concurrentAdd = searchIndexAdder.concurrentAdd
|
||||
SearchIndex.createWriteStream = searchIndexAdder.createWriteStream
|
||||
SearchIndex.dbWriteStream = searchIndexAdder.dbWriteStream
|
||||
SearchIndex.defaultPipeline = searchIndexAdder.defaultPipeline
|
||||
SearchIndex.del = searchIndexAdder.deleter
|
||||
SearchIndex.deleteStream = searchIndexAdder.deleteStream
|
||||
SearchIndex.flush = searchIndexAdder.flush
|
||||
done(err, SearchIndex)
|
||||
})
|
||||
}
|
||||
|
||||
const getSearcher = function (SearchIndex, done) {
|
||||
SearchIndexSearcher(SearchIndex.options, function (err, searchIndexSearcher) {
|
||||
SearchIndex.availableFields = searchIndexSearcher.availableFields
|
||||
SearchIndex.buckets = searchIndexSearcher.bucketStream
|
||||
SearchIndex.categorize = searchIndexSearcher.categoryStream
|
||||
SearchIndex.dbReadStream = searchIndexSearcher.dbReadStream
|
||||
SearchIndex.get = searchIndexSearcher.get
|
||||
SearchIndex.match = searchIndexSearcher.match
|
||||
SearchIndex.scan = searchIndexSearcher.scan
|
||||
SearchIndex.search = searchIndexSearcher.search
|
||||
SearchIndex.totalHits = searchIndexSearcher.totalHits
|
||||
done(err, SearchIndex)
|
||||
})
|
||||
}
|
||||
|
||||
const getOptions = function (options, done) {
|
||||
var SearchIndex = {}
|
||||
SearchIndex.options = Object.assign({}, {
|
||||
indexPath: 'si',
|
||||
keySeparator: '○',
|
||||
logLevel: 'error'
|
||||
}, options)
|
||||
options.log = bunyan.createLogger({
|
||||
name: 'search-index',
|
||||
level: options.logLevel
|
||||
})
|
||||
if (!options.indexes) {
|
||||
level(SearchIndex.options.indexPath || 'si', {
|
||||
valueEncoding: 'json',
|
||||
db: down
|
||||
}, function (err, db) {
|
||||
SearchIndex.options.indexes = db
|
||||
return done(err, SearchIndex)
|
||||
})
|
||||
} else {
|
||||
return done(null, SearchIndex)
|
||||
}
|
||||
}
|
36
libs/search-index/siUtil.js
Normal file
36
libs/search-index/siUtil.js
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = function (siOptions) {
|
||||
var siUtil = {}
|
||||
|
||||
siUtil.countDocs = function (callback) {
|
||||
var count = 0
|
||||
const gte = 'DOCUMENT' + siOptions.keySeparator
|
||||
const lte = 'DOCUMENT' + siOptions.keySeparator + siOptions.keySeparator
|
||||
siOptions.indexes.createReadStream({gte: gte, lte: lte})
|
||||
.on('data', function (data) {
|
||||
count++
|
||||
})
|
||||
.on('error', function (err) {
|
||||
return callback(err, null)
|
||||
})
|
||||
.on('end', function () {
|
||||
return callback(null, count)
|
||||
})
|
||||
}
|
||||
|
||||
siUtil.close = function (callback) {
|
||||
siOptions.indexes.close(function (err) {
|
||||
while (!siOptions.indexes.isClosed()) {
|
||||
//log not always working here- investigate
|
||||
if (siOptions.log) siOptions.log.info('closing...')
|
||||
}
|
||||
if (siOptions.indexes.isClosed()) {
|
||||
if (siOptions.log) siOptions.log.info('closed...')
|
||||
callback(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return siUtil
|
||||
}
|
@@ -2,8 +2,7 @@
|
||||
|
||||
const Promise = require('bluebird')
|
||||
const _ = require('lodash')
|
||||
const path = require('path')
|
||||
const searchIndex = require('search-index')
|
||||
const searchIndex = require('./search-index')
|
||||
const stopWord = require('stopword')
|
||||
const streamToPromise = require('stream-to-promise')
|
||||
|
||||
@@ -19,12 +18,11 @@ module.exports = {
|
||||
*/
|
||||
init () {
|
||||
let self = this
|
||||
let dbPath = path.resolve(ROOTPATH, appconfig.paths.data, 'search')
|
||||
self._isReady = new Promise((resolve, reject) => {
|
||||
searchIndex({
|
||||
deletable: true,
|
||||
fieldedSearch: true,
|
||||
indexPath: dbPath,
|
||||
indexPath: 'wiki',
|
||||
logLevel: 'error',
|
||||
stopwords: _.get(stopWord, appconfig.lang, [])
|
||||
}, (err, si) => {
|
||||
|
@@ -6,9 +6,10 @@ const fs = Promise.promisifyAll(require('fs-extra'))
|
||||
const readChunk = require('read-chunk')
|
||||
const fileType = require('file-type')
|
||||
const mime = require('mime-types')
|
||||
const farmhash = require('farmhash')
|
||||
const crypto = require('crypto')
|
||||
const chokidar = require('chokidar')
|
||||
const sharp = require('sharp')
|
||||
const jimp = require('jimp')
|
||||
const imageSize = Promise.promisify(require('image-size'))
|
||||
const _ = require('lodash')
|
||||
|
||||
/**
|
||||
@@ -32,9 +33,6 @@ module.exports = {
|
||||
self._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads')
|
||||
self._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs')
|
||||
|
||||
// Disable Sharp cache, as it cause file locks issues when deleting uploads.
|
||||
sharp.cache(false)
|
||||
|
||||
return self
|
||||
},
|
||||
|
||||
@@ -162,7 +160,7 @@ module.exports = {
|
||||
let fldPath = path.join(self._uploadsPath, fldName)
|
||||
let fPath = path.join(fldPath, f)
|
||||
let fPathObj = path.parse(fPath)
|
||||
let fUid = farmhash.fingerprint32(fldName + '/' + f)
|
||||
let fUid = crypto.createHash('md5').update(fldName + '/' + f).digest('hex')
|
||||
|
||||
return fs.statAsync(fPath).then((s) => {
|
||||
if (!s.isFile()) { return false }
|
||||
@@ -179,8 +177,8 @@ module.exports = {
|
||||
// Images
|
||||
|
||||
if (s.size < 3145728) { // ignore files larger than 3MB
|
||||
if (_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/webp'], mimeInfo.mime)) {
|
||||
return self.getImageMetadata(fPath).then((mImgData) => {
|
||||
if (_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/bmp'], mimeInfo.mime)) {
|
||||
return self.getImageSize(fPath).then((mImgSize) => {
|
||||
let cacheThumbnailPath = path.parse(path.join(self._uploadsThumbsPath, fUid + '.png'))
|
||||
let cacheThumbnailPathStr = path.format(cacheThumbnailPath)
|
||||
|
||||
@@ -188,7 +186,7 @@ module.exports = {
|
||||
_id: fUid,
|
||||
category: 'image',
|
||||
mime: mimeInfo.mime,
|
||||
extra: _.pick(mImgData, ['format', 'width', 'height', 'density', 'hasAlpha', 'orientation']),
|
||||
extra: mImgSize,
|
||||
folder: 'f:' + fldName,
|
||||
filename: f,
|
||||
basename: fPathObj.name,
|
||||
@@ -232,24 +230,23 @@ module.exports = {
|
||||
* @return {Promise<Object>} Promise returning the resized image info
|
||||
*/
|
||||
generateThumbnail (sourcePath, destPath) {
|
||||
return sharp(sourcePath)
|
||||
.withoutEnlargement()
|
||||
.resize(150, 150)
|
||||
.background('white')
|
||||
.embed()
|
||||
.flatten()
|
||||
.toFormat('png')
|
||||
.toFile(destPath)
|
||||
return jimp.read(sourcePath).then(img => {
|
||||
return img.cover(150, 150)
|
||||
.background(0xFFFFFFFF)
|
||||
.opaque()
|
||||
.rgba(false)
|
||||
.write(destPath)
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the image metadata.
|
||||
* Gets the image dimensions.
|
||||
*
|
||||
* @param {String} sourcePath The source path
|
||||
* @return {Object} The image metadata.
|
||||
* @return {Object} The image dimensions.
|
||||
*/
|
||||
getImageMetadata (sourcePath) {
|
||||
return sharp(sourcePath).metadata()
|
||||
getImageSize (sourcePath) {
|
||||
return imageSize(sourcePath)
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ const Promise = require('bluebird')
|
||||
const fs = Promise.promisifyAll(require('fs-extra'))
|
||||
const request = require('request')
|
||||
const url = require('url')
|
||||
const farmhash = require('farmhash')
|
||||
const crypto = require('crypto')
|
||||
const _ = require('lodash')
|
||||
|
||||
var regFolderName = new RegExp('^[a-z0-9][a-z0-9-]*[a-z0-9]$')
|
||||
@@ -254,7 +254,7 @@ module.exports = {
|
||||
// -> Move thumbnail ahead to avoid re-generation
|
||||
|
||||
if (originFile.category === 'image') {
|
||||
let fUid = farmhash.fingerprint32(folder.name + '/' + destFilename)
|
||||
let fUid = crypto.createHash('md5').update(folder.name + '/' + destFilename).digest('hex')
|
||||
let sourceThumbPath = path.resolve(self._uploadsThumbsPath, originFile._id + '.png')
|
||||
let destThumbPath = path.resolve(self._uploadsThumbsPath, fUid + '.png')
|
||||
preMoveOps.push(fs.moveAsync(sourceThumbPath, destThumbPath))
|
||||
|
Reference in New Issue
Block a user