Merged main & websocket server, refactored libs, image uploads fixes
This commit is contained in:
@@ -1,283 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
var path = require('path'),
|
||||
Promise = require('bluebird'),
|
||||
fs = Promise.promisifyAll(require('fs-extra')),
|
||||
readChunk = require('read-chunk'),
|
||||
fileType = require('file-type'),
|
||||
farmhash = require('farmhash'),
|
||||
moment = require('moment'),
|
||||
chokidar = require('chokidar'),
|
||||
_ = require('lodash');
|
||||
|
||||
/**
|
||||
* Uploads
|
||||
*
|
||||
* @param {Object} appconfig The application configuration
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
_uploadsPath: './repo/uploads',
|
||||
_uploadsThumbsPath: './data/thumbs',
|
||||
|
||||
_watcher: null,
|
||||
|
||||
/**
|
||||
* Initialize Uploads model
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} Uploads model instance
|
||||
*/
|
||||
init(appconfig) {
|
||||
|
||||
let self = this;
|
||||
|
||||
self._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads');
|
||||
self._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs');
|
||||
|
||||
return self;
|
||||
|
||||
},
|
||||
|
||||
watch() {
|
||||
|
||||
let self = this;
|
||||
|
||||
self._watcher = chokidar.watch(self._uploadsPath, {
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
cwd: self._uploadsPath,
|
||||
depth: 1,
|
||||
awaitWriteFinish: true
|
||||
});
|
||||
|
||||
//-> Add new upload file
|
||||
|
||||
self._watcher.on('add', (p) => {
|
||||
|
||||
let pInfo = self.parseUploadsRelPath(p);
|
||||
return self.processFile(pInfo.folder, pInfo.filename).then((mData) => {
|
||||
ws.emit('uploadsAddFiles', {
|
||||
auth: WSInternalKey,
|
||||
content: mData
|
||||
});
|
||||
}).then(() => {
|
||||
return git.commitUploads('Uploaded ' + p);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
//-> Remove upload file
|
||||
|
||||
self._watcher.on('unlink', (p) => {
|
||||
|
||||
let pInfo = self.parseUploadsRelPath(p);
|
||||
return self.deleteFile(pInfo.folder, pInfo.filename).then((uID) => {
|
||||
ws.emit('uploadsRemoveFiles', {
|
||||
auth: WSInternalKey,
|
||||
content: uID
|
||||
});
|
||||
}).then(() => {
|
||||
return git.commitUploads('Deleted ' + p);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Initial Uploads scan
|
||||
*
|
||||
* @return {Promise<Void>} Promise of the scan operation
|
||||
*/
|
||||
initialScan() {
|
||||
|
||||
let self = this;
|
||||
|
||||
return fs.readdirAsync(self._uploadsPath).then((ls) => {
|
||||
|
||||
// Get all folders
|
||||
|
||||
return Promise.map(ls, (f) => {
|
||||
return fs.statAsync(path.join(self._uploadsPath, f)).then((s) => { return { filename: f, stat: s }; });
|
||||
}).filter((s) => { return s.stat.isDirectory(); }).then((arrDirs) => {
|
||||
|
||||
let folderNames = _.map(arrDirs, 'filename');
|
||||
folderNames.unshift('');
|
||||
|
||||
// Add folders to DB
|
||||
|
||||
return db.UplFolder.remove({}).then(() => {
|
||||
return db.UplFolder.insertMany(_.map(folderNames, (f) => {
|
||||
return { name: f };
|
||||
}));
|
||||
}).then(() => {
|
||||
|
||||
// Travel each directory and scan files
|
||||
|
||||
let allFiles = [];
|
||||
|
||||
return Promise.map(folderNames, (fldName) => {
|
||||
|
||||
let fldPath = path.join(self._uploadsPath, fldName);
|
||||
return fs.readdirAsync(fldPath).then((fList) => {
|
||||
return Promise.map(fList, (f) => {
|
||||
return upl.processFile(fldName, f).then((mData) => {
|
||||
if(mData) {
|
||||
allFiles.push(mData);
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}, {concurrency: 3});
|
||||
});
|
||||
}, {concurrency: 1}).finally(() => {
|
||||
|
||||
// Add files to DB
|
||||
|
||||
return db.UplFile.remove({}).then(() => {
|
||||
if(_.isArray(allFiles) && allFiles.length > 0) {
|
||||
return db.UplFile.insertMany(allFiles);
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
}).then(() => {
|
||||
|
||||
// Watch for new changes
|
||||
|
||||
return upl.watch();
|
||||
|
||||
})
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Parse relative Uploads path
|
||||
*
|
||||
* @param {String} f Relative Uploads path
|
||||
* @return {Object} Parsed path (folder and filename)
|
||||
*/
|
||||
parseUploadsRelPath(f) {
|
||||
|
||||
let fObj = path.parse(f);
|
||||
return {
|
||||
folder: fObj.dir,
|
||||
filename: fObj.base
|
||||
};
|
||||
|
||||
},
|
||||
|
||||
processFile(fldName, f) {
|
||||
|
||||
let self = this;
|
||||
|
||||
let fldPath = path.join(self._uploadsPath, fldName);
|
||||
let fPath = path.join(fldPath, f);
|
||||
let fPathObj = path.parse(fPath);
|
||||
let fUid = farmhash.fingerprint32(fldName + '/' + f);
|
||||
|
||||
return fs.statAsync(fPath).then((s) => {
|
||||
|
||||
if(!s.isFile()) { return false; }
|
||||
|
||||
// Get MIME info
|
||||
|
||||
let mimeInfo = fileType(readChunk.sync(fPath, 0, 262));
|
||||
|
||||
// Images
|
||||
|
||||
if(s.size < 3145728) { // ignore files larger than 3MB
|
||||
if(_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/webp'], mimeInfo.mime)) {
|
||||
return self.getImageMetadata(fPath).then((mImgData) => {
|
||||
|
||||
let cacheThumbnailPath = path.parse(path.join(self._uploadsThumbsPath, fUid + '.png'));
|
||||
let cacheThumbnailPathStr = path.format(cacheThumbnailPath);
|
||||
|
||||
let mData = {
|
||||
_id: fUid,
|
||||
category: 'image',
|
||||
mime: mimeInfo.mime,
|
||||
extra: _.pick(mImgData, ['format', 'width', 'height', 'density', 'hasAlpha', 'orientation']),
|
||||
folder: null,
|
||||
filename: f,
|
||||
basename: fPathObj.name,
|
||||
filesize: s.size
|
||||
}
|
||||
|
||||
// Generate thumbnail
|
||||
|
||||
return fs.statAsync(cacheThumbnailPathStr).then((st) => {
|
||||
return st.isFile();
|
||||
}).catch((err) => {
|
||||
return false;
|
||||
}).then((thumbExists) => {
|
||||
|
||||
return (thumbExists) ? mData : fs.ensureDirAsync(cacheThumbnailPath.dir).then(() => {
|
||||
return self.generateThumbnail(fPath, cacheThumbnailPathStr);
|
||||
}).return(mData);
|
||||
|
||||
});
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Other Files
|
||||
|
||||
return {
|
||||
_id: fUid,
|
||||
category: 'binary',
|
||||
mime: mimeInfo.mime,
|
||||
folder: fldName,
|
||||
filename: f,
|
||||
basename: fPathObj.name,
|
||||
filesize: s.size
|
||||
};
|
||||
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Generate thumbnail of image
|
||||
*
|
||||
* @param {String} sourcePath The source path
|
||||
* @return {Promise<Object>} Promise returning the resized image info
|
||||
*/
|
||||
generateThumbnail(sourcePath, destPath) {
|
||||
|
||||
let sharp = require('sharp');
|
||||
|
||||
return sharp(sourcePath)
|
||||
.withoutEnlargement()
|
||||
.resize(150,150)
|
||||
.background('white')
|
||||
.embed()
|
||||
.flatten()
|
||||
.toFormat('png')
|
||||
.toFile(destPath);
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the image metadata.
|
||||
*
|
||||
* @param {String} sourcePath The source path
|
||||
* @return {Object} The image metadata.
|
||||
*/
|
||||
getImageMetadata(sourcePath) {
|
||||
|
||||
let sharp = require('sharp');
|
||||
|
||||
return sharp(sourcePath).metadata();
|
||||
|
||||
}
|
||||
|
||||
};
|
@@ -1,66 +0,0 @@
|
||||
var LocalStrategy = require('passport-local').Strategy;
|
||||
|
||||
module.exports = function(passport, appconfig) {
|
||||
|
||||
// Serialization user methods
|
||||
|
||||
passport.serializeUser(function(user, done) {
|
||||
done(null, user._id);
|
||||
});
|
||||
|
||||
passport.deserializeUser(function(id, done) {
|
||||
let user = db.User.find({ id });
|
||||
if(user) {
|
||||
done(null, user);
|
||||
} else {
|
||||
done(err, null);
|
||||
}
|
||||
});
|
||||
|
||||
// Setup local user authentication strategy
|
||||
|
||||
passport.use(
|
||||
'local',
|
||||
new LocalStrategy({
|
||||
usernameField : 'email',
|
||||
passwordField : 'password',
|
||||
passReqToCallback : true
|
||||
},
|
||||
function(req, uEmail, uPassword, done) {
|
||||
db.User.findOne({ 'email' : uEmail }).then((user) => {
|
||||
if (user) {
|
||||
user.validatePassword(uPassword).then((isValid) => {
|
||||
return (isValid) ? done(null, user) : done(null, false);
|
||||
});
|
||||
} else {
|
||||
return done(null, false);
|
||||
}
|
||||
}).catch((err) => {
|
||||
done(err);
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// Check for admin access
|
||||
|
||||
db.onReady.then(() => {
|
||||
|
||||
/*if(db.User.count() < 1) {
|
||||
winston.info('No administrator account found. Creating a new one...');
|
||||
if(db.User.insert({
|
||||
email: appconfig.admin,
|
||||
firstName: "Admin",
|
||||
lastName: "Admin",
|
||||
password: "admin123"
|
||||
})) {
|
||||
winston.info('Administrator account created successfully!');
|
||||
} else {
|
||||
winston.error('An error occured while creating administrator account: ');
|
||||
}
|
||||
}*/
|
||||
|
||||
return true;
|
||||
|
||||
});
|
||||
|
||||
};
|
@@ -1,35 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
var fs = require('fs'),
|
||||
yaml = require('js-yaml'),
|
||||
_ = require('lodash');
|
||||
|
||||
/**
|
||||
* Load Application Configuration
|
||||
*
|
||||
* @param {String} confPath Path to the configuration file
|
||||
* @return {Object} Application Configuration
|
||||
*/
|
||||
module.exports = (confPath) => {
|
||||
|
||||
var appconfig = {};
|
||||
|
||||
try {
|
||||
appconfig = yaml.safeLoad(fs.readFileSync(confPath, 'utf8'));
|
||||
} catch (ex) {
|
||||
winston.error(ex);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
return _.defaultsDeep(appconfig, {
|
||||
title: "Requarks Wiki",
|
||||
host: "http://localhost",
|
||||
port: process.env.PORT,
|
||||
wsPort: 8080,
|
||||
db: "mongodb://localhost/wiki",
|
||||
redis: null,
|
||||
sessionSecret: null,
|
||||
admin: null
|
||||
});
|
||||
|
||||
};
|
@@ -1,435 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
var Promise = require('bluebird'),
|
||||
path = require('path'),
|
||||
fs = Promise.promisifyAll(require("fs-extra")),
|
||||
_ = require('lodash'),
|
||||
farmhash = require('farmhash'),
|
||||
moment = require('moment');
|
||||
|
||||
/**
|
||||
* Entries Model
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
_repoPath: 'repo',
|
||||
_cachePath: 'data/cache',
|
||||
|
||||
/**
|
||||
* Initialize Entries model
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} Entries model instance
|
||||
*/
|
||||
init(appconfig) {
|
||||
|
||||
let self = this;
|
||||
|
||||
self._repoPath = path.resolve(ROOTPATH, appconfig.paths.repo);
|
||||
self._cachePath = path.resolve(ROOTPATH, appconfig.paths.data, 'cache');
|
||||
|
||||
return self;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if a document already exists
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {Promise<Boolean>} True if exists, false otherwise
|
||||
*/
|
||||
exists(entryPath) {
|
||||
|
||||
let self = this;
|
||||
|
||||
return self.fetchOriginal(entryPath, {
|
||||
parseMarkdown: false,
|
||||
parseMeta: false,
|
||||
parseTree: false,
|
||||
includeMarkdown: false,
|
||||
includeParentInfo: false,
|
||||
cache: false
|
||||
}).then(() => {
|
||||
return true;
|
||||
}).catch((err) => {
|
||||
return false;
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetch a document from cache, otherwise the original
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {Promise<Object>} Page Data
|
||||
*/
|
||||
fetch(entryPath) {
|
||||
|
||||
let self = this;
|
||||
|
||||
let cpath = self.getCachePath(entryPath);
|
||||
|
||||
return fs.statAsync(cpath).then((st) => {
|
||||
return st.isFile();
|
||||
}).catch((err) => {
|
||||
return false;
|
||||
}).then((isCache) => {
|
||||
|
||||
if(isCache) {
|
||||
|
||||
// Load from cache
|
||||
|
||||
return fs.readFileAsync(cpath).then((contents) => {
|
||||
return JSON.parse(contents);
|
||||
}).catch((err) => {
|
||||
winston.error('Corrupted cache file. Deleting it...');
|
||||
fs.unlinkSync(cpath);
|
||||
return false;
|
||||
});
|
||||
|
||||
} else {
|
||||
|
||||
// Load original
|
||||
|
||||
return self.fetchOriginal(entryPath);
|
||||
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetches the original document entry
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @param {Object} options The options
|
||||
* @return {Promise<Object>} Page data
|
||||
*/
|
||||
fetchOriginal(entryPath, options) {
|
||||
|
||||
let self = this;
|
||||
|
||||
let fpath = self.getFullPath(entryPath);
|
||||
let cpath = self.getCachePath(entryPath);
|
||||
|
||||
options = _.defaults(options, {
|
||||
parseMarkdown: true,
|
||||
parseMeta: true,
|
||||
parseTree: true,
|
||||
includeMarkdown: false,
|
||||
includeParentInfo: true,
|
||||
cache: true
|
||||
});
|
||||
|
||||
return fs.statAsync(fpath).then((st) => {
|
||||
if(st.isFile()) {
|
||||
return fs.readFileAsync(fpath, 'utf8').then((contents) => {
|
||||
|
||||
// Parse contents
|
||||
|
||||
let pageData = {
|
||||
markdown: (options.includeMarkdown) ? contents : '',
|
||||
html: (options.parseMarkdown) ? mark.parseContent(contents) : '',
|
||||
meta: (options.parseMeta) ? mark.parseMeta(contents) : {},
|
||||
tree: (options.parseTree) ? mark.parseTree(contents) : []
|
||||
};
|
||||
|
||||
if(!pageData.meta.title) {
|
||||
pageData.meta.title = _.startCase(entryPath);
|
||||
}
|
||||
|
||||
pageData.meta.path = entryPath;
|
||||
|
||||
// Get parent
|
||||
|
||||
let parentPromise = (options.includeParentInfo) ? self.getParentInfo(entryPath).then((parentData) => {
|
||||
return (pageData.parent = parentData);
|
||||
}).catch((err) => {
|
||||
return (pageData.parent = false);
|
||||
}) : Promise.resolve(true);
|
||||
|
||||
return parentPromise.then(() => {
|
||||
|
||||
// Cache to disk
|
||||
|
||||
if(options.cache) {
|
||||
let cacheData = JSON.stringify(_.pick(pageData, ['html', 'meta', 'tree', 'parent']), false, false, false);
|
||||
return fs.writeFileAsync(cpath, cacheData).catch((err) => {
|
||||
winston.error('Unable to write to cache! Performance may be affected.');
|
||||
return true;
|
||||
});
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
|
||||
}).return(pageData);
|
||||
|
||||
});
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}).catch((err) => {
|
||||
return Promise.reject(new Promise.OperationalError('Entry ' + entryPath + ' does not exist!'));
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Parse raw url path and make it safe
|
||||
*
|
||||
* @param {String} urlPath The url path
|
||||
* @return {String} Safe entry path
|
||||
*/
|
||||
parsePath(urlPath) {
|
||||
|
||||
let wlist = new RegExp('[^a-z0-9/\-]','g');
|
||||
|
||||
urlPath = _.toLower(urlPath).replace(wlist, '');
|
||||
|
||||
if(urlPath === '/') {
|
||||
urlPath = 'home';
|
||||
}
|
||||
|
||||
let urlParts = _.filter(_.split(urlPath, '/'), (p) => { return !_.isEmpty(p); });
|
||||
|
||||
return _.join(urlParts, '/');
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the parent information.
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {Promise<Object|False>} The parent information.
|
||||
*/
|
||||
getParentInfo(entryPath) {
|
||||
|
||||
let self = this;
|
||||
|
||||
if(_.includes(entryPath, '/')) {
|
||||
|
||||
let parentParts = _.initial(_.split(entryPath, '/'));
|
||||
let parentPath = _.join(parentParts,'/');
|
||||
let parentFile = _.last(parentParts);
|
||||
let fpath = self.getFullPath(parentPath);
|
||||
|
||||
return fs.statAsync(fpath).then((st) => {
|
||||
if(st.isFile()) {
|
||||
return fs.readFileAsync(fpath, 'utf8').then((contents) => {
|
||||
|
||||
let pageMeta = mark.parseMeta(contents);
|
||||
|
||||
return {
|
||||
path: parentPath,
|
||||
title: (pageMeta.title) ? pageMeta.title : _.startCase(parentFile),
|
||||
subtitle: (pageMeta.subtitle) ? pageMeta.subtitle : false
|
||||
};
|
||||
|
||||
});
|
||||
} else {
|
||||
return Promise.reject(new Error('Parent entry is not a valid file.'));
|
||||
}
|
||||
});
|
||||
|
||||
} else {
|
||||
return Promise.reject(new Error('Parent entry is root.'));
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the full original path of a document.
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {String} The full path.
|
||||
*/
|
||||
getFullPath(entryPath) {
|
||||
return path.join(this._repoPath, entryPath + '.md');
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the full cache path of a document.
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {String} The full cache path.
|
||||
*/
|
||||
getCachePath(entryPath) {
|
||||
return path.join(this._cachePath, farmhash.fingerprint32(entryPath) + '.json');
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the entry path from full path.
|
||||
*
|
||||
* @param {String} fullPath The full path
|
||||
* @return {String} The entry path
|
||||
*/
|
||||
getEntryPathFromFullPath(fullPath) {
|
||||
let absRepoPath = path.resolve(ROOTPATH, this._repoPath);
|
||||
return _.chain(fullPath).replace(absRepoPath, '').replace('.md', '').replace(new RegExp('\\\\', 'g'),'/').value();
|
||||
},
|
||||
|
||||
/**
|
||||
* Update an existing document
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @param {String} contents The markdown-formatted contents
|
||||
* @return {Promise<Boolean>} True on success, false on failure
|
||||
*/
|
||||
update(entryPath, contents) {
|
||||
|
||||
let self = this;
|
||||
let fpath = self.getFullPath(entryPath);
|
||||
|
||||
return fs.statAsync(fpath).then((st) => {
|
||||
if(st.isFile()) {
|
||||
return self.makePersistent(entryPath, contents).then(() => {
|
||||
return self.updateCache(entryPath);
|
||||
});
|
||||
} else {
|
||||
return Promise.reject(new Error('Entry does not exist!'));
|
||||
}
|
||||
}).catch((err) => {
|
||||
winston.error(err);
|
||||
return Promise.reject(new Error('Failed to save document.'));
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Update local cache and search index
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {Promise} Promise of the operation
|
||||
*/
|
||||
updateCache(entryPath) {
|
||||
|
||||
let self = this;
|
||||
|
||||
return self.fetchOriginal(entryPath, {
|
||||
parseMarkdown: true,
|
||||
parseMeta: true,
|
||||
parseTree: true,
|
||||
includeMarkdown: true,
|
||||
includeParentInfo: true,
|
||||
cache: true
|
||||
}).then((pageData) => {
|
||||
return {
|
||||
entryPath,
|
||||
meta: pageData.meta,
|
||||
parent: pageData.parent || {},
|
||||
text: mark.removeMarkdown(pageData.markdown)
|
||||
};
|
||||
}).then((content) => {
|
||||
return db.Entry.create({
|
||||
_id: content.entryPath,
|
||||
title: content.meta.title || content.entryPath,
|
||||
subtitle: content.meta.subtitle || '',
|
||||
parent: content.parent.title || '',
|
||||
content: content.text || ''
|
||||
});
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Create a new document
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @param {String} contents The markdown-formatted contents
|
||||
* @return {Promise<Boolean>} True on success, false on failure
|
||||
*/
|
||||
create(entryPath, contents) {
|
||||
|
||||
let self = this;
|
||||
|
||||
return self.exists(entryPath).then((docExists) => {
|
||||
if(!docExists) {
|
||||
return self.makePersistent(entryPath, contents).then(() => {
|
||||
return self.updateCache(entryPath);
|
||||
});
|
||||
} else {
|
||||
return Promise.reject(new Error('Entry already exists!'));
|
||||
}
|
||||
}).catch((err) => {
|
||||
winston.error(err);
|
||||
return Promise.reject(new Error('Something went wrong.'));
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Makes a document persistent to disk and git repository
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @param {String} contents The markdown-formatted contents
|
||||
* @return {Promise<Boolean>} True on success, false on failure
|
||||
*/
|
||||
makePersistent(entryPath, contents) {
|
||||
|
||||
let self = this;
|
||||
let fpath = self.getFullPath(entryPath);
|
||||
|
||||
return fs.outputFileAsync(fpath, contents).then(() => {
|
||||
return git.commitDocument(entryPath);
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Move a document
|
||||
*
|
||||
* @param {String} entryPath The current entry path
|
||||
* @param {String} newEntryPath The new entry path
|
||||
* @return {Promise} Promise of the operation
|
||||
*/
|
||||
move(entryPath, newEntryPath) {
|
||||
|
||||
let self = this;
|
||||
|
||||
if(_.isEmpty(entryPath) || entryPath === 'home') {
|
||||
return Promise.reject(new Error('Invalid path!'));
|
||||
}
|
||||
|
||||
return git.moveDocument(entryPath, newEntryPath).then(() => {
|
||||
return git.commitDocument(newEntryPath).then(() => {
|
||||
|
||||
// Delete old cache version
|
||||
|
||||
let oldEntryCachePath = self.getCachePath(entryPath);
|
||||
fs.unlinkAsync(oldEntryCachePath).catch((err) => { return true; });
|
||||
|
||||
// Delete old index entry
|
||||
|
||||
ws.emit('searchDel', {
|
||||
auth: WSInternalKey,
|
||||
entryPath
|
||||
});
|
||||
|
||||
// Create cache for new entry
|
||||
|
||||
return self.updateCache(newEntryPath);
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Generate a starter page content based on the entry path
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {Promise<String>} Starter content
|
||||
*/
|
||||
getStarter(entryPath) {
|
||||
|
||||
let self = this;
|
||||
let formattedTitle = _.startCase(_.last(_.split(entryPath, '/')));
|
||||
|
||||
return fs.readFileAsync(path.join(ROOTPATH, 'client/content/create.md'), 'utf8').then((contents) => {
|
||||
return _.replace(contents, new RegExp('{TITLE}', 'g'), formattedTitle);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
};
|
259
models/git.js
259
models/git.js
@@ -1,259 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
var Git = require("git-wrapper2-promise"),
|
||||
Promise = require('bluebird'),
|
||||
path = require('path'),
|
||||
os = require('os'),
|
||||
fs = Promise.promisifyAll(require("fs")),
|
||||
moment = require('moment'),
|
||||
_ = require('lodash'),
|
||||
URL = require('url');
|
||||
|
||||
/**
|
||||
* Git Model
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
_git: null,
|
||||
_url: '',
|
||||
_repo: {
|
||||
path: '',
|
||||
branch: 'master',
|
||||
exists: false
|
||||
},
|
||||
_signature: {
|
||||
name: 'Wiki',
|
||||
email: 'user@example.com'
|
||||
},
|
||||
_opts: {
|
||||
clone: {},
|
||||
push: {}
|
||||
},
|
||||
onReady: null,
|
||||
|
||||
/**
|
||||
* Initialize Git model
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} Git model instance
|
||||
*/
|
||||
init(appconfig) {
|
||||
|
||||
let self = this;
|
||||
|
||||
//-> Build repository path
|
||||
|
||||
if(_.isEmpty(appconfig.paths.repo)) {
|
||||
self._repo.path = path.join(ROOTPATH, 'repo');
|
||||
} else {
|
||||
self._repo.path = appconfig.paths.repo;
|
||||
}
|
||||
|
||||
//-> Initialize repository
|
||||
|
||||
self.onReady = self._initRepo(appconfig);
|
||||
|
||||
// Define signature
|
||||
|
||||
self._signature.name = appconfig.git.signature.name || 'Wiki';
|
||||
self._signature.email = appconfig.git.signature.email || 'user@example.com';
|
||||
|
||||
return self;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Initialize Git repository
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} Promise
|
||||
*/
|
||||
_initRepo(appconfig) {
|
||||
|
||||
let self = this;
|
||||
|
||||
winston.info('[' + PROCNAME + '][GIT] Checking Git repository...');
|
||||
|
||||
//-> Check if path is accessible
|
||||
|
||||
return fs.mkdirAsync(self._repo.path).catch((err) => {
|
||||
if(err.code !== 'EEXIST') {
|
||||
winston.error('[' + PROCNAME + '][GIT] Invalid Git repository path or missing permissions.');
|
||||
}
|
||||
}).then(() => {
|
||||
|
||||
self._git = new Git({ 'git-dir': self._repo.path });
|
||||
|
||||
//-> Check if path already contains a git working folder
|
||||
|
||||
return self._git.isRepo().then((isRepo) => {
|
||||
self._repo.exists = isRepo;
|
||||
return (!isRepo) ? self._git.exec('init') : true;
|
||||
}).catch((err) => {
|
||||
self._repo.exists = false;
|
||||
});
|
||||
|
||||
}).then(() => {
|
||||
|
||||
// Initialize remote
|
||||
|
||||
let urlObj = URL.parse(appconfig.git.url);
|
||||
urlObj.auth = appconfig.git.auth.username + ((appconfig.git.auth.type !== 'ssh') ? ':' + appconfig.git.auth.password : '');
|
||||
self._url = URL.format(urlObj);
|
||||
|
||||
return self._git.exec('remote', 'show').then((cProc) => {
|
||||
let out = cProc.stdout.toString();
|
||||
if(_.includes(out, 'origin')) {
|
||||
return true;
|
||||
} else {
|
||||
return Promise.join(
|
||||
self._git.exec('config', ['--local', 'user.name', self._signature.name]),
|
||||
self._git.exec('config', ['--local', 'user.email', self._signature.email])
|
||||
).then(() => {
|
||||
return self._git.exec('remote', ['add', 'origin', self._url]);
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
}).catch((err) => {
|
||||
winston.error('[' + PROCNAME + '][GIT] Git remote error!');
|
||||
throw err;
|
||||
}).then(() => {
|
||||
winston.info('[' + PROCNAME + '][GIT] Git repository is OK.');
|
||||
return true;
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the repo path.
|
||||
*
|
||||
* @return {String} The repo path.
|
||||
*/
|
||||
getRepoPath() {
|
||||
|
||||
return this._repo.path || path.join(ROOTPATH, 'repo');
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Sync with the remote repository
|
||||
*
|
||||
* @return {Promise} Resolve on sync success
|
||||
*/
|
||||
resync() {
|
||||
|
||||
let self = this;
|
||||
|
||||
// Fetch
|
||||
|
||||
winston.info('[' + PROCNAME + '][GIT] Performing pull from remote repository...');
|
||||
return self._git.pull('origin', self._repo.branch).then((cProc) => {
|
||||
winston.info('[' + PROCNAME + '][GIT] Pull completed.');
|
||||
})
|
||||
.catch((err) => {
|
||||
winston.error('[' + PROCNAME + '][GIT] Unable to fetch from git origin!');
|
||||
throw err;
|
||||
})
|
||||
.then(() => {
|
||||
|
||||
// Check for changes
|
||||
|
||||
return self._git.exec('log', 'origin/' + self._repo.branch + '..HEAD').then((cProc) => {
|
||||
let out = cProc.stdout.toString();
|
||||
|
||||
if(_.includes(out, 'commit')) {
|
||||
|
||||
winston.info('[' + PROCNAME + '][GIT] Performing push to remote repository...');
|
||||
return self._git.push('origin', self._repo.branch).then(() => {
|
||||
return winston.info('[' + PROCNAME + '][GIT] Push completed.');
|
||||
});
|
||||
|
||||
} else {
|
||||
|
||||
winston.info('[' + PROCNAME + '][GIT] Push skipped. Repository is already in sync.');
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
});
|
||||
|
||||
})
|
||||
.catch((err) => {
|
||||
winston.error('[' + PROCNAME + '][GIT] Unable to push changes to remote!');
|
||||
throw err;
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Commits a document.
|
||||
*
|
||||
* @param {String} entryPath The entry path
|
||||
* @return {Promise} Resolve on commit success
|
||||
*/
|
||||
commitDocument(entryPath) {
|
||||
|
||||
let self = this;
|
||||
let gitFilePath = entryPath + '.md';
|
||||
let commitMsg = '';
|
||||
|
||||
return self._git.exec('ls-files', gitFilePath).then((cProc) => {
|
||||
let out = cProc.stdout.toString();
|
||||
return _.includes(out, gitFilePath);
|
||||
}).then((isTracked) => {
|
||||
commitMsg = (isTracked) ? 'Updated ' + gitFilePath : 'Added ' + gitFilePath;
|
||||
return self._git.add(gitFilePath);
|
||||
}).then(() => {
|
||||
return self._git.commit(commitMsg).catch((err) => {
|
||||
if(_.includes(err.stdout, 'nothing to commit')) { return true; }
|
||||
});
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Move a document.
|
||||
*
|
||||
* @param {String} entryPath The current entry path
|
||||
* @param {String} newEntryPath The new entry path
|
||||
* @return {Promise<Boolean>} Resolve on success
|
||||
*/
|
||||
moveDocument(entryPath, newEntryPath) {
|
||||
|
||||
let self = this;
|
||||
let gitFilePath = entryPath + '.md';
|
||||
let gitNewFilePath = newEntryPath + '.md';
|
||||
|
||||
return self._git.exec('mv', [gitFilePath, gitNewFilePath]).then((cProc) => {
|
||||
let out = cProc.stdout.toString();
|
||||
if(_.includes(out, 'fatal')) {
|
||||
let errorMsg = _.capitalize(_.head(_.split(_.replace(out, 'fatal: ', ''), ',')));
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
return true;
|
||||
})
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Commits uploads changes.
|
||||
*
|
||||
* @param {String} msg The commit message
|
||||
* @return {Promise} Resolve on commit success
|
||||
*/
|
||||
commitUploads(msg) {
|
||||
|
||||
let self = this;
|
||||
msg = msg || "Uploads repository sync";
|
||||
|
||||
return self._git.add('uploads').then(() => {
|
||||
return self._git.commit(msg).catch((err) => {
|
||||
if(_.includes(err.stdout, 'nothing to commit')) { return true; }
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
};
|
@@ -1,222 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
var Promise = require('bluebird'),
|
||||
md = require('markdown-it'),
|
||||
mdEmoji = require('markdown-it-emoji'),
|
||||
mdTaskLists = require('markdown-it-task-lists'),
|
||||
mdAbbr = require('markdown-it-abbr'),
|
||||
mdAnchor = require('markdown-it-anchor'),
|
||||
mdFootnote = require('markdown-it-footnote'),
|
||||
mdExternalLinks = require('markdown-it-external-links'),
|
||||
mdExpandTabs = require('markdown-it-expand-tabs'),
|
||||
mdAttrs = require('markdown-it-attrs'),
|
||||
hljs = require('highlight.js'),
|
||||
cheerio = require('cheerio'),
|
||||
_ = require('lodash'),
|
||||
mdRemove = require('remove-markdown');
|
||||
|
||||
// Load plugins
|
||||
|
||||
var mkdown = md({
|
||||
html: true,
|
||||
linkify: true,
|
||||
typography: true,
|
||||
highlight(str, lang) {
|
||||
if (lang && hljs.getLanguage(lang)) {
|
||||
try {
|
||||
return '<pre class="hljs"><code>' + hljs.highlight(lang, str, true).value + '</code></pre>';
|
||||
} catch (err) {
|
||||
return '<pre><code>' + str + '</code></pre>';
|
||||
}
|
||||
}
|
||||
return '<pre><code>' + str + '</code></pre>';
|
||||
}
|
||||
})
|
||||
.use(mdEmoji)
|
||||
.use(mdTaskLists)
|
||||
.use(mdAbbr)
|
||||
.use(mdAnchor, {
|
||||
slugify: _.kebabCase,
|
||||
permalink: true,
|
||||
permalinkClass: 'toc-anchor',
|
||||
permalinkSymbol: '#',
|
||||
permalinkBefore: true
|
||||
})
|
||||
.use(mdFootnote)
|
||||
.use(mdExternalLinks, {
|
||||
externalClassName: 'external-link',
|
||||
internalClassName: 'internal-link'
|
||||
})
|
||||
.use(mdExpandTabs, {
|
||||
tabWidth: 4
|
||||
})
|
||||
.use(mdAttrs);
|
||||
|
||||
// Rendering rules
|
||||
|
||||
mkdown.renderer.rules.emoji = function(token, idx) {
|
||||
return '<i class="twa twa-' + token[idx].markup + '"></i>';
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse markdown content and build TOC tree
|
||||
*
|
||||
* @param {(Function|string)} content Markdown content
|
||||
* @return {Array} TOC tree
|
||||
*/
|
||||
const parseTree = (content) => {
|
||||
|
||||
let tokens = md().parse(content, {});
|
||||
let tocArray = [];
|
||||
|
||||
//-> Extract headings and their respective levels
|
||||
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
if (tokens[i].type !== "heading_close") {
|
||||
continue;
|
||||
}
|
||||
|
||||
const heading = tokens[i - 1];
|
||||
const heading_close = tokens[i];
|
||||
|
||||
if (heading.type === "inline") {
|
||||
let content = "";
|
||||
let anchor = "";
|
||||
if (heading.children && heading.children[0].type === "link_open") {
|
||||
content = heading.children[1].content;
|
||||
anchor = _.kebabCase(content);
|
||||
} else {
|
||||
content = heading.content
|
||||
anchor = _.kebabCase(heading.children.reduce((acc, t) => acc + t.content, ""));
|
||||
}
|
||||
|
||||
tocArray.push({
|
||||
content,
|
||||
anchor,
|
||||
level: +heading_close.tag.substr(1, 1)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
//-> Exclude levels deeper than 2
|
||||
|
||||
_.remove(tocArray, (n) => { return n.level > 2; });
|
||||
|
||||
//-> Build tree from flat array
|
||||
|
||||
return _.reduce(tocArray, (tree, v) => {
|
||||
let treeLength = tree.length - 1;
|
||||
if(v.level < 2) {
|
||||
tree.push({
|
||||
content: v.content,
|
||||
anchor: v.anchor,
|
||||
nodes: []
|
||||
});
|
||||
} else {
|
||||
let lastNodeLevel = 1;
|
||||
let GetNodePath = (startPos) => {
|
||||
lastNodeLevel++;
|
||||
if(_.isEmpty(startPos)) {
|
||||
startPos = 'nodes';
|
||||
}
|
||||
if(lastNodeLevel === v.level) {
|
||||
return startPos;
|
||||
} else {
|
||||
return GetNodePath(startPos + '[' + (_.at(tree[treeLength], startPos).length - 1) + '].nodes');
|
||||
}
|
||||
};
|
||||
let lastNodePath = GetNodePath();
|
||||
let lastNode = _.get(tree[treeLength], lastNodePath);
|
||||
if(lastNode) {
|
||||
lastNode.push({
|
||||
content: v.content,
|
||||
anchor: v.anchor,
|
||||
nodes: []
|
||||
});
|
||||
_.set(tree[treeLength], lastNodePath, lastNode);
|
||||
}
|
||||
}
|
||||
return tree;
|
||||
}, []);
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse markdown content to HTML
|
||||
*
|
||||
* @param {String} content Markdown content
|
||||
* @return {String} HTML formatted content
|
||||
*/
|
||||
const parseContent = (content) => {
|
||||
|
||||
let output = mkdown.render(content);
|
||||
let cr = cheerio.load(output);
|
||||
cr('table').addClass('table is-bordered is-striped is-narrow');
|
||||
output = cr.html();
|
||||
|
||||
return output;
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse meta-data tags from content
|
||||
*
|
||||
* @param {String} content Markdown content
|
||||
* @return {Object} Properties found in the content and their values
|
||||
*/
|
||||
const parseMeta = (content) => {
|
||||
|
||||
let commentMeta = new RegExp('<!-- ?([a-zA-Z]+):(.*)-->','g');
|
||||
let results = {}, match;
|
||||
while(match = commentMeta.exec(content)) {
|
||||
results[_.toLower(match[1])] = _.trim(match[2]);
|
||||
}
|
||||
|
||||
return results;
|
||||
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* Parse content and return all data
|
||||
*
|
||||
* @param {String} content Markdown-formatted content
|
||||
* @return {Object} Object containing meta, html and tree data
|
||||
*/
|
||||
parse(content) {
|
||||
return {
|
||||
meta: parseMeta(content),
|
||||
html: parseContent(content),
|
||||
tree: parseTree(content)
|
||||
};
|
||||
},
|
||||
|
||||
parseContent,
|
||||
parseMeta,
|
||||
parseTree,
|
||||
|
||||
/**
|
||||
* Strips non-text elements from Markdown content
|
||||
*
|
||||
* @param {String} content Markdown-formatted content
|
||||
* @return {String} Text-only version
|
||||
*/
|
||||
removeMarkdown(content) {
|
||||
return mdRemove(_.chain(content)
|
||||
.replace(/<!-- ?([a-zA-Z]+):(.*)-->/g, '')
|
||||
.replace(/```[^`]+```/g, '')
|
||||
.replace(/`[^`]+`/g, '')
|
||||
.replace(new RegExp('(?!mailto:)(?:(?:http|https|ftp)://)(?:\\S+(?::\\S*)?@)?(?:(?:(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}(?:\\.(?:[0-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))|(?:(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)(?:\\.(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)*(?:\\.(?:[a-z\\u00a1-\\uffff]{2,})))|localhost)(?::\\d{2,5})?(?:(/|\\?|#)[^\\s]*)?', 'g'), '')
|
||||
.replace(/\r?\n|\r/g, ' ')
|
||||
.deburr()
|
||||
.toLower()
|
||||
.replace(/(\b([^a-z]+)\b)/g, ' ')
|
||||
.replace(/[^a-z]+/g, ' ')
|
||||
.replace(/(\b(\w{1,2})\b(\W|$))/g, '')
|
||||
.replace(/\s\s+/g, ' ')
|
||||
.value()
|
||||
);
|
||||
}
|
||||
|
||||
};
|
@@ -1,64 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
const modb = require('mongoose'),
|
||||
fs = require("fs"),
|
||||
path = require("path"),
|
||||
_ = require('lodash');
|
||||
|
||||
/**
|
||||
* MongoDB module
|
||||
*
|
||||
* @param {Object} appconfig Application config
|
||||
* @return {Object} MongoDB wrapper instance
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
/**
|
||||
* Initialize DB
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} DB instance
|
||||
*/
|
||||
init(appconfig) {
|
||||
|
||||
let self = this;
|
||||
|
||||
let dbModelsPath = path.resolve(ROOTPATH, 'models', 'db');
|
||||
|
||||
modb.Promise = require('bluebird');
|
||||
|
||||
// Event handlers
|
||||
|
||||
modb.connection.on('error', (err) => {
|
||||
winston.error('[' + PROCNAME + '] Failed to connect to MongoDB instance.');
|
||||
});
|
||||
modb.connection.once('open', function() {
|
||||
winston.log('[' + PROCNAME + '] Connected to MongoDB instance.');
|
||||
});
|
||||
|
||||
// Store connection handle
|
||||
|
||||
self.connection = modb.connection;
|
||||
self.ObjectId = modb.Types.ObjectId;
|
||||
|
||||
// Load DB Models
|
||||
|
||||
fs
|
||||
.readdirSync(dbModelsPath)
|
||||
.filter(function(file) {
|
||||
return (file.indexOf(".") !== 0);
|
||||
})
|
||||
.forEach(function(file) {
|
||||
let modelName = _.upperFirst(_.camelCase(_.split(file,'.')[0]));
|
||||
self[modelName] = require(path.join(dbModelsPath, file));
|
||||
});
|
||||
|
||||
// Connect
|
||||
|
||||
self.onReady = modb.connect(appconfig.db);
|
||||
|
||||
return self;
|
||||
|
||||
}
|
||||
|
||||
};
|
@@ -1,152 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
var path = require('path'),
|
||||
Promise = require('bluebird'),
|
||||
fs = Promise.promisifyAll(require('fs-extra')),
|
||||
multer = require('multer'),
|
||||
_ = require('lodash');
|
||||
|
||||
/**
|
||||
* Local Data Storage
|
||||
*
|
||||
* @param {Object} appconfig The application configuration
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
_uploadsPath: './repo/uploads',
|
||||
_uploadsThumbsPath: './data/thumbs',
|
||||
|
||||
uploadImgHandler: null,
|
||||
|
||||
/**
|
||||
* Initialize Local Data Storage model
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} Local Data Storage model instance
|
||||
*/
|
||||
init(appconfig) {
|
||||
|
||||
this._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads');
|
||||
this._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs');
|
||||
|
||||
this.createBaseDirectories(appconfig);
|
||||
this.initMulter(appconfig);
|
||||
|
||||
return this;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Init Multer upload handlers
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {boolean} Void
|
||||
*/
|
||||
initMulter(appconfig) {
|
||||
|
||||
this.uploadImgHandler = multer({
|
||||
storage: multer.diskStorage({
|
||||
destination: (req, f, cb) => {
|
||||
cb(null, path.resolve(ROOTPATH, appconfig.paths.data, 'temp-upload'))
|
||||
}
|
||||
}),
|
||||
fileFilter: (req, f, cb) => {
|
||||
|
||||
//-> Check filesize (3 MB max)
|
||||
|
||||
if(f.size > 3145728) {
|
||||
return cb(null, false);
|
||||
}
|
||||
|
||||
//-> Check MIME type (quick check only)
|
||||
|
||||
if(!_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/webp'], f.mimetype)) {
|
||||
return cb(null, false);
|
||||
}
|
||||
|
||||
cb(null, true);
|
||||
}
|
||||
}).array('imgfile', 20);
|
||||
|
||||
return true;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates a base directories (Synchronous).
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Void} Void
|
||||
*/
|
||||
createBaseDirectories(appconfig) {
|
||||
|
||||
winston.info('[SERVER] Checking data directories...');
|
||||
|
||||
try {
|
||||
fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data));
|
||||
fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './cache'));
|
||||
fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './thumbs'));
|
||||
fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './temp-upload'));
|
||||
|
||||
fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.repo));
|
||||
fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.repo, './uploads'));
|
||||
} catch (err) {
|
||||
winston.error(err);
|
||||
}
|
||||
|
||||
winston.info('[SERVER] Data and Repository directories are OK.');
|
||||
|
||||
return;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the uploads path.
|
||||
*
|
||||
* @return {String} The uploads path.
|
||||
*/
|
||||
getUploadsPath() {
|
||||
return this._uploadsPath;
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the thumbnails folder path.
|
||||
*
|
||||
* @return {String} The thumbs path.
|
||||
*/
|
||||
getThumbsPath() {
|
||||
return this._uploadsThumbsPath;
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if filename is valid and unique
|
||||
*
|
||||
* @param {String} f The filename
|
||||
* @param {String} fld The containing folder
|
||||
* @return {Promise<String>} Promise of the accepted filename
|
||||
*/
|
||||
validateUploadsFilename(f, fld) {
|
||||
|
||||
let fObj = path.parse(f);
|
||||
let fname = _.chain(fObj.name).trim().toLower().kebabCase().value().replace(/[^a-z0-9\-]+/g, '');
|
||||
let fext = _.toLower(fObj.ext);
|
||||
|
||||
if(!_.includes(['.jpg', '.jpeg', '.png', '.gif', '.webp'], fext)) {
|
||||
fext = '.png';
|
||||
}
|
||||
|
||||
f = fname + fext;
|
||||
let fpath = path.resolve(this._uploadsPath, fld, f);
|
||||
|
||||
return fs.statAsync(fpath).then((s) => {
|
||||
throw new Error('File ' + f + ' already exists.');
|
||||
}).catch((err) => {
|
||||
if(err.code === 'ENOENT') {
|
||||
return f;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
};
|
@@ -11,8 +11,11 @@ const modb = require('mongoose'),
|
||||
*/
|
||||
var uplFolderSchema = modb.Schema({
|
||||
|
||||
_id: String,
|
||||
|
||||
name: {
|
||||
type: String
|
||||
type: String,
|
||||
index: true
|
||||
}
|
||||
|
||||
},
|
@@ -1,133 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
const Promise = require('bluebird'),
|
||||
_ = require('lodash'),
|
||||
path = require('path');
|
||||
|
||||
/**
|
||||
* Search Model
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
_si: null,
|
||||
|
||||
/**
|
||||
* Initialize Search model
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} Search model instance
|
||||
*/
|
||||
init(appconfig) {
|
||||
|
||||
let self = this;
|
||||
|
||||
return self;
|
||||
|
||||
},
|
||||
|
||||
find(terms) {
|
||||
|
||||
let self = this;
|
||||
terms = _.chain(terms)
|
||||
.deburr()
|
||||
.toLower()
|
||||
.trim()
|
||||
.replace(/[^a-z0-9 ]/g, '')
|
||||
.split(' ')
|
||||
.filter((f) => { return !_.isEmpty(f); })
|
||||
.join(' ')
|
||||
.value();
|
||||
|
||||
return db.Entry.find(
|
||||
{ $text: { $search: terms } },
|
||||
{ score: { $meta: "textScore" }, title: 1 }
|
||||
)
|
||||
.sort({ score: { $meta: "textScore" } })
|
||||
.limit(10)
|
||||
.exec()
|
||||
.then((hits) => {
|
||||
|
||||
/*if(hits.length < 5) {
|
||||
return self._si.matchAsync({
|
||||
beginsWith: terms,
|
||||
threshold: 3,
|
||||
limit: 5,
|
||||
type: 'simple'
|
||||
}).then((matches) => {
|
||||
|
||||
return {
|
||||
match: hits,
|
||||
suggest: matches
|
||||
};
|
||||
|
||||
});
|
||||
} else {*/
|
||||
return {
|
||||
match: hits,
|
||||
suggest: []
|
||||
};
|
||||
//}
|
||||
|
||||
}).catch((err) => {
|
||||
|
||||
if(err.type === 'NotFoundError') {
|
||||
return {
|
||||
match: [],
|
||||
suggest: []
|
||||
};
|
||||
} else {
|
||||
winston.error(err);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete an entry from the index
|
||||
*
|
||||
* @param {String} The entry path
|
||||
* @return {Promise} Promise of the operation
|
||||
*/
|
||||
delete(entryPath) {
|
||||
|
||||
let self = this;
|
||||
/*let hasResults = false;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
|
||||
self._si.search({
|
||||
query: {
|
||||
AND: { 'entryPath': [entryPath] }
|
||||
}
|
||||
}).on('data', (results) => {
|
||||
|
||||
hasResults = true;
|
||||
|
||||
if(results.totalHits > 0) {
|
||||
let delIds = _.map(results.hits, 'id');
|
||||
self._si.del(delIds).on('end', () => { return resolve(true); });
|
||||
} else {
|
||||
resolve(true);
|
||||
}
|
||||
|
||||
}).on('error', (err) => {
|
||||
|
||||
if(err.type === 'NotFoundError') {
|
||||
resolve(true);
|
||||
} else {
|
||||
winston.error(err);
|
||||
reject(err);
|
||||
}
|
||||
|
||||
}).on('end', () => {
|
||||
if(!hasResults) {
|
||||
resolve(true);
|
||||
}
|
||||
});
|
||||
|
||||
});*/
|
||||
|
||||
}
|
||||
|
||||
};
|
@@ -1,160 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
var path = require('path'),
|
||||
Promise = require('bluebird'),
|
||||
fs = Promise.promisifyAll(require('fs-extra')),
|
||||
multer = require('multer'),
|
||||
_ = require('lodash');
|
||||
|
||||
var regFolderName = new RegExp("^[a-z0-9][a-z0-9\-]*[a-z0-9]$");
|
||||
|
||||
/**
|
||||
* Uploads
|
||||
*/
|
||||
module.exports = {
|
||||
|
||||
_uploadsPath: './repo/uploads',
|
||||
_uploadsThumbsPath: './data/thumbs',
|
||||
|
||||
/**
|
||||
* Initialize Local Data Storage model
|
||||
*
|
||||
* @param {Object} appconfig The application config
|
||||
* @return {Object} Uploads model instance
|
||||
*/
|
||||
init(appconfig) {
|
||||
|
||||
this._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads');
|
||||
this._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs');
|
||||
|
||||
return this;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the thumbnails folder path.
|
||||
*
|
||||
* @return {String} The thumbs path.
|
||||
*/
|
||||
getThumbsPath() {
|
||||
return this._uploadsThumbsPath;
|
||||
},
|
||||
|
||||
/**
|
||||
* Sets the uploads folders.
|
||||
*
|
||||
* @param {Array<String>} arrFolders The arr folders
|
||||
* @return {Void} Void
|
||||
*/
|
||||
setUploadsFolders(arrFolders) {
|
||||
|
||||
this._uploadsFolders = arrFolders;
|
||||
return;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the uploads folders.
|
||||
*
|
||||
* @return {Array<String>} The uploads folders.
|
||||
*/
|
||||
getUploadsFolders() {
|
||||
return this._uploadsFolders;
|
||||
},
|
||||
|
||||
/**
|
||||
* Creates an uploads folder.
|
||||
*
|
||||
* @param {String} folderName The folder name
|
||||
* @return {Promise} Promise of the operation
|
||||
*/
|
||||
createUploadsFolder(folderName) {
|
||||
|
||||
let self = this;
|
||||
|
||||
folderName = _.kebabCase(_.trim(folderName));
|
||||
|
||||
if(_.isEmpty(folderName) || !regFolderName.test(folderName)) {
|
||||
return Promise.resolve(self.getUploadsFolders());
|
||||
}
|
||||
|
||||
return fs.ensureDirAsync(path.join(self._uploadsPath, folderName)).then(() => {
|
||||
if(!_.includes(self._uploadsFolders, folderName)) {
|
||||
self._uploadsFolders.push(folderName);
|
||||
self._uploadsFolders = _.sortBy(self._uploadsFolders);
|
||||
}
|
||||
return self.getUploadsFolders();
|
||||
});
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if folder is valid and exists
|
||||
*
|
||||
* @param {String} folderName The folder name
|
||||
* @return {Boolean} True if valid
|
||||
*/
|
||||
validateUploadsFolder(folderName) {
|
||||
|
||||
if(_.includes(this._uploadsFolders, folderName)) {
|
||||
return path.resolve(this._uploadsPath, folderName);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Sets the uploads files.
|
||||
*
|
||||
* @param {Array<Object>} arrFiles The uploads files
|
||||
* @return {Void} Void
|
||||
*/
|
||||
setUploadsFiles(arrFiles) {
|
||||
|
||||
let self = this;
|
||||
|
||||
/*if(_.isArray(arrFiles) && arrFiles.length > 0) {
|
||||
self._uploadsDb.Files.clear();
|
||||
self._uploadsDb.Files.insert(arrFiles);
|
||||
self._uploadsDb.Files.ensureIndex('category', true);
|
||||
self._uploadsDb.Files.ensureIndex('folder', true);
|
||||
}*/
|
||||
|
||||
return;
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* Adds one or more uploads files.
|
||||
*
|
||||
* @param {Array<Object>} arrFiles The uploads files
|
||||
* @return {Void} Void
|
||||
*/
|
||||
addUploadsFiles(arrFiles) {
|
||||
if(_.isArray(arrFiles) || _.isPlainObject(arrFiles)) {
|
||||
//this._uploadsDb.Files.insert(arrFiles);
|
||||
}
|
||||
return;
|
||||
},
|
||||
|
||||
/**
|
||||
* Gets the uploads files.
|
||||
*
|
||||
* @param {String} cat Category type
|
||||
* @param {String} fld Folder
|
||||
* @return {Array<Object>} The files matching the query
|
||||
*/
|
||||
getUploadsFiles(cat, fld) {
|
||||
|
||||
return /*this._uploadsDb.Files.chain().find({
|
||||
'$and': [{ 'category' : cat },{ 'folder' : fld }]
|
||||
}).simplesort('filename').data()*/;
|
||||
|
||||
},
|
||||
|
||||
deleteUploadsFile(fldName, f) {
|
||||
|
||||
}
|
||||
|
||||
};
|
Reference in New Issue
Block a user