Standard JS code conversion + fixes
This commit is contained in:
		
							
								
								
									
										938
									
								
								libs/entries.js
									
									
									
									
									
								
							
							
						
						
									
										938
									
								
								libs/entries.js
									
									
									
									
									
								
							| @@ -1,500 +1,452 @@ | ||||
| "use strict"; | ||||
| 'use strict' | ||||
|  | ||||
| var Promise = require('bluebird'), | ||||
| 	path = require('path'), | ||||
| 	fs = Promise.promisifyAll(require("fs-extra")), | ||||
| 	_ = require('lodash'), | ||||
| 	farmhash = require('farmhash'), | ||||
| 	moment = require('moment'); | ||||
| const Promise = require('bluebird') | ||||
| const path = require('path') | ||||
| const fs = Promise.promisifyAll(require('fs-extra')) | ||||
| const _ = require('lodash') | ||||
| const farmhash = require('farmhash') | ||||
|  | ||||
| /** | ||||
|  * Entries Model | ||||
|  */ | ||||
| module.exports = { | ||||
|  | ||||
| 	_repoPath: 'repo', | ||||
| 	_cachePath: 'data/cache', | ||||
|  | ||||
| 	/** | ||||
| 	 * Initialize Entries model | ||||
| 	 * | ||||
| 	 * @return     {Object}  Entries model instance | ||||
| 	 */ | ||||
| 	init() { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		self._repoPath = path.resolve(ROOTPATH, appconfig.paths.repo); | ||||
| 		self._cachePath = path.resolve(ROOTPATH, appconfig.paths.data, 'cache'); | ||||
|  | ||||
| 		return self; | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Check if a document already exists | ||||
| 	 * | ||||
| 	 * @param      {String}  entryPath  The entry path | ||||
| 	 * @return     {Promise<Boolean>}  True if exists, false otherwise | ||||
| 	 */ | ||||
| 	exists(entryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		return self.fetchOriginal(entryPath, { | ||||
| 			parseMarkdown: false, | ||||
| 			parseMeta: false, | ||||
| 			parseTree: false, | ||||
| 			includeMarkdown: false, | ||||
| 			includeParentInfo: false, | ||||
| 			cache: false | ||||
| 		}).then(() => { | ||||
| 			return true; | ||||
| 		}).catch((err) => { | ||||
| 			return false; | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Fetch a document from cache, otherwise the original | ||||
| 	 * | ||||
| 	 * @param      {String}           entryPath  The entry path | ||||
| 	 * @return     {Promise<Object>}  Page Data | ||||
| 	 */ | ||||
| 	fetch(entryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		let cpath = self.getCachePath(entryPath); | ||||
|  | ||||
| 		return fs.statAsync(cpath).then((st) => { | ||||
| 			return st.isFile(); | ||||
| 		}).catch((err) => { | ||||
| 			return false; | ||||
| 		}).then((isCache) => { | ||||
|  | ||||
| 			if(isCache) { | ||||
|  | ||||
| 				// Load from cache | ||||
|  | ||||
| 				return fs.readFileAsync(cpath).then((contents) => { | ||||
| 					return JSON.parse(contents); | ||||
| 				}).catch((err) => { | ||||
| 					winston.error('Corrupted cache file. Deleting it...'); | ||||
| 					fs.unlinkSync(cpath); | ||||
| 					return false; | ||||
| 				}); | ||||
|  | ||||
| 			} else { | ||||
|  | ||||
| 				// Load original | ||||
|  | ||||
| 				return self.fetchOriginal(entryPath); | ||||
|  | ||||
| 			} | ||||
|  | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Fetches the original document entry | ||||
| 	 * | ||||
| 	 * @param      {String}           entryPath  The entry path | ||||
| 	 * @param      {Object}           options    The options | ||||
| 	 * @return     {Promise<Object>}  Page data | ||||
| 	 */ | ||||
| 	fetchOriginal(entryPath, options) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		let fpath = self.getFullPath(entryPath); | ||||
| 		let cpath = self.getCachePath(entryPath); | ||||
|  | ||||
| 		options = _.defaults(options, { | ||||
| 			parseMarkdown: true, | ||||
| 			parseMeta: true, | ||||
| 			parseTree: true, | ||||
| 			includeMarkdown: false, | ||||
| 			includeParentInfo: true, | ||||
| 			cache: true | ||||
| 		}); | ||||
|  | ||||
| 		return fs.statAsync(fpath).then((st) => { | ||||
| 			if(st.isFile()) { | ||||
| 				return fs.readFileAsync(fpath, 'utf8').then((contents) => { | ||||
|  | ||||
| 					// Parse contents | ||||
|  | ||||
| 					let pageData = { | ||||
| 						markdown: (options.includeMarkdown) ? contents : '', | ||||
| 						html: (options.parseMarkdown) ? mark.parseContent(contents) : '', | ||||
| 						meta: (options.parseMeta) ? mark.parseMeta(contents) : {}, | ||||
| 						tree: (options.parseTree) ? mark.parseTree(contents) : [] | ||||
| 					}; | ||||
|  | ||||
| 					if(!pageData.meta.title) { | ||||
| 						pageData.meta.title = _.startCase(entryPath); | ||||
| 					} | ||||
|  | ||||
| 					pageData.meta.path = entryPath; | ||||
|  | ||||
| 					// Get parent | ||||
|  | ||||
| 					let parentPromise = (options.includeParentInfo) ? self.getParentInfo(entryPath).then((parentData) => { | ||||
| 						return (pageData.parent = parentData); | ||||
| 					}).catch((err) => { | ||||
| 						return (pageData.parent = false); | ||||
| 					}) : Promise.resolve(true); | ||||
|  | ||||
| 					return parentPromise.then(() => { | ||||
|  | ||||
| 						// Cache to disk | ||||
|  | ||||
| 						if(options.cache) { | ||||
| 							let cacheData = JSON.stringify(_.pick(pageData, ['html', 'meta', 'tree', 'parent']), false, false, false); | ||||
| 							return fs.writeFileAsync(cpath, cacheData).catch((err) => { | ||||
| 								winston.error('Unable to write to cache! Performance may be affected.'); | ||||
| 								return true; | ||||
| 							}); | ||||
| 						} else { | ||||
| 							return true; | ||||
| 						} | ||||
|  | ||||
| 					}).return(pageData); | ||||
|  | ||||
| 			 	}); | ||||
| 			} else { | ||||
| 				return false; | ||||
| 			} | ||||
| 		}).catch((err) => { | ||||
| 			return Promise.reject(new Promise.OperationalError('Entry ' + entryPath + ' does not exist!')); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Parse raw url path and make it safe | ||||
| 	 * | ||||
| 	 * @param      {String}  urlPath  The url path | ||||
| 	 * @return     {String}  Safe entry path | ||||
| 	 */ | ||||
| 	parsePath(urlPath) { | ||||
|  | ||||
| 		let wlist = new RegExp('[^a-z0-9/\-]','g'); | ||||
|  | ||||
| 		urlPath = _.toLower(urlPath).replace(wlist, ''); | ||||
|  | ||||
| 		if(urlPath === '/') { | ||||
| 			urlPath = 'home'; | ||||
| 		} | ||||
|  | ||||
| 		let urlParts = _.filter(_.split(urlPath, '/'), (p) => { return !_.isEmpty(p); }); | ||||
|  | ||||
| 		return _.join(urlParts, '/'); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the parent information. | ||||
| 	 * | ||||
| 	 * @param      {String}                 entryPath  The entry path | ||||
| 	 * @return     {Promise<Object|False>}  The parent information. | ||||
| 	 */ | ||||
| 	getParentInfo(entryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		if(_.includes(entryPath, '/')) { | ||||
|  | ||||
| 			let parentParts = _.initial(_.split(entryPath, '/')); | ||||
| 			let parentPath = _.join(parentParts,'/'); | ||||
| 			let parentFile = _.last(parentParts); | ||||
| 			let fpath = self.getFullPath(parentPath); | ||||
|  | ||||
| 			return fs.statAsync(fpath).then((st) => { | ||||
| 				if(st.isFile()) { | ||||
| 					return fs.readFileAsync(fpath, 'utf8').then((contents) => { | ||||
|  | ||||
| 						let pageMeta = mark.parseMeta(contents); | ||||
|  | ||||
| 						return { | ||||
| 							path: parentPath, | ||||
| 							title: (pageMeta.title) ? pageMeta.title : _.startCase(parentFile), | ||||
| 							subtitle: (pageMeta.subtitle) ? pageMeta.subtitle : false | ||||
| 						}; | ||||
|  | ||||
| 					}); | ||||
| 				} else { | ||||
| 					return Promise.reject(new Error('Parent entry is not a valid file.')); | ||||
| 				} | ||||
| 			}); | ||||
|  | ||||
| 		} else { | ||||
| 			return Promise.reject(new Error('Parent entry is root.')); | ||||
| 		} | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the full original path of a document. | ||||
| 	 * | ||||
| 	 * @param      {String}  entryPath  The entry path | ||||
| 	 * @return     {String}  The full path. | ||||
| 	 */ | ||||
| 	getFullPath(entryPath) { | ||||
| 		return path.join(this._repoPath, entryPath + '.md'); | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the full cache path of a document. | ||||
| 	 * | ||||
| 	 * @param      {String}    entryPath  The entry path | ||||
| 	 * @return     {String}  The full cache path. | ||||
| 	 */ | ||||
| 	getCachePath(entryPath) { | ||||
| 		return path.join(this._cachePath, farmhash.fingerprint32(entryPath) + '.json'); | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the entry path from full path. | ||||
| 	 * | ||||
| 	 * @param      {String}  fullPath  The full path | ||||
| 	 * @return     {String}  The entry path | ||||
| 	 */ | ||||
| 	getEntryPathFromFullPath(fullPath) { | ||||
| 		let absRepoPath = path.resolve(ROOTPATH, this._repoPath); | ||||
| 		return _.chain(fullPath).replace(absRepoPath, '').replace('.md', '').replace(new RegExp('\\\\', 'g'),'/').value(); | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Update an existing document | ||||
| 	 * | ||||
| 	 * @param      {String}            entryPath  The entry path | ||||
| 	 * @param      {String}            contents   The markdown-formatted contents | ||||
| 	 * @return     {Promise<Boolean>}  True on success, false on failure | ||||
| 	 */ | ||||
| 	update(entryPath, contents) { | ||||
|  | ||||
| 		let self = this; | ||||
| 		let fpath = self.getFullPath(entryPath); | ||||
|  | ||||
| 		return fs.statAsync(fpath).then((st) => { | ||||
| 			if(st.isFile()) { | ||||
| 				return self.makePersistent(entryPath, contents).then(() => { | ||||
| 					return self.updateCache(entryPath); | ||||
| 				}); | ||||
| 			} else { | ||||
| 				return Promise.reject(new Error('Entry does not exist!')); | ||||
| 			} | ||||
| 		}).catch((err) => { | ||||
| 			winston.error(err); | ||||
| 			return Promise.reject(new Error('Failed to save document.')); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Update local cache and search index | ||||
| 	 * | ||||
| 	 * @param      {String}   entryPath  The entry path | ||||
| 	 * @return     {Promise}  Promise of the operation | ||||
| 	 */ | ||||
| 	updateCache(entryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		return self.fetchOriginal(entryPath, { | ||||
| 			parseMarkdown: true, | ||||
| 			parseMeta: true, | ||||
| 			parseTree: true, | ||||
| 			includeMarkdown: true, | ||||
| 			includeParentInfo: true, | ||||
| 			cache: true | ||||
| 		}).then((pageData) => { | ||||
| 			return { | ||||
| 				entryPath, | ||||
| 				meta: pageData.meta, | ||||
| 				parent: pageData.parent || {}, | ||||
| 				text: mark.removeMarkdown(pageData.markdown) | ||||
| 			}; | ||||
| 		}).then((content) => { | ||||
| 			return db.Entry.findOneAndUpdate({ | ||||
| 				_id: content.entryPath | ||||
| 			}, { | ||||
| 				_id: content.entryPath, | ||||
| 				title: content.meta.title || content.entryPath, | ||||
| 				subtitle: content.meta.subtitle || '', | ||||
| 				parent: content.parent.title || '', | ||||
| 				content: content.text || '' | ||||
| 			}, { | ||||
| 				new: true, | ||||
| 				upsert: true | ||||
| 			}); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Create a new document | ||||
| 	 * | ||||
| 	 * @param      {String}            entryPath  The entry path | ||||
| 	 * @param      {String}            contents   The markdown-formatted contents | ||||
| 	 * @return     {Promise<Boolean>}  True on success, false on failure | ||||
| 	 */ | ||||
| 	create(entryPath, contents) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		return self.exists(entryPath).then((docExists) => { | ||||
| 			if(!docExists) { | ||||
| 				return self.makePersistent(entryPath, contents).then(() => { | ||||
| 					return self.updateCache(entryPath); | ||||
| 				}); | ||||
| 			} else { | ||||
| 				return Promise.reject(new Error('Entry already exists!')); | ||||
| 			} | ||||
| 		}).catch((err) => { | ||||
| 			winston.error(err); | ||||
| 			return Promise.reject(new Error('Something went wrong.')); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Makes a document persistent to disk and git repository | ||||
| 	 * | ||||
| 	 * @param      {String}            entryPath  The entry path | ||||
| 	 * @param      {String}            contents   The markdown-formatted contents | ||||
| 	 * @return     {Promise<Boolean>}  True on success, false on failure | ||||
| 	 */ | ||||
| 	makePersistent(entryPath, contents) { | ||||
|  | ||||
| 		let self = this; | ||||
| 		let fpath = self.getFullPath(entryPath); | ||||
|  | ||||
| 		return fs.outputFileAsync(fpath, contents).then(() => { | ||||
| 			return git.commitDocument(entryPath); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Move a document | ||||
| 	 * | ||||
| 	 * @param      {String}   entryPath     The current entry path | ||||
| 	 * @param      {String}   newEntryPath  The new entry path | ||||
| 	 * @return     {Promise}  Promise of the operation | ||||
| 	 */ | ||||
| 	move(entryPath, newEntryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		if(_.isEmpty(entryPath) || entryPath === 'home') { | ||||
| 			return Promise.reject(new Error('Invalid path!')); | ||||
| 		} | ||||
|  | ||||
| 		return git.moveDocument(entryPath, newEntryPath).then(() => { | ||||
| 			return git.commitDocument(newEntryPath).then(() => { | ||||
|  | ||||
| 				// Delete old cache version | ||||
|  | ||||
| 				let oldEntryCachePath = self.getCachePath(entryPath); | ||||
| 				fs.unlinkAsync(oldEntryCachePath).catch((err) => { return true; }); | ||||
|  | ||||
| 				// Delete old index entry | ||||
|  | ||||
| 				ws.emit('searchDel', { | ||||
| 					auth: WSInternalKey, | ||||
| 					entryPath | ||||
| 				}); | ||||
|  | ||||
| 				// Create cache for new entry | ||||
|  | ||||
| 				return self.updateCache(newEntryPath); | ||||
|  | ||||
| 			}); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Generate a starter page content based on the entry path | ||||
| 	 * | ||||
| 	 * @param      {String}           entryPath  The entry path | ||||
| 	 * @return     {Promise<String>}  Starter content | ||||
| 	 */ | ||||
| 	getStarter(entryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
| 		let formattedTitle = _.startCase(_.last(_.split(entryPath, '/'))); | ||||
|  | ||||
| 		return fs.readFileAsync(path.join(ROOTPATH, 'client/content/create.md'), 'utf8').then((contents) => { | ||||
| 			return _.replace(contents, new RegExp('{TITLE}', 'g'), formattedTitle); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Searches entries based on terms. | ||||
| 	 * | ||||
| 	 * @param      {String}  terms   The terms to search for | ||||
| 	 * @return     {Promise<Object>}  Promise of the search results | ||||
| 	 */ | ||||
| 	search(terms) { | ||||
|  | ||||
| 		let self = this; | ||||
| 		terms = _.chain(terms) | ||||
| 							.deburr() | ||||
| 							.toLower() | ||||
| 							.trim() | ||||
| 							.replace(/[^a-z0-9\- ]/g, '') | ||||
| 							.split(' ') | ||||
| 							.filter((f) => { return !_.isEmpty(f); }) | ||||
| 							.join(' ') | ||||
| 							.value(); | ||||
|  | ||||
| 		return db.Entry.find( | ||||
| 			{ $text: { $search: terms } }, | ||||
| 			{ score: { $meta: "textScore" }, title: 1 } | ||||
| 		) | ||||
| 		.sort({ score: { $meta: "textScore" } }) | ||||
| 		.limit(10) | ||||
| 		.exec() | ||||
| 		.then((hits) => { | ||||
|  | ||||
| 			if(hits.length < 5) { | ||||
| 				let regMatch = new RegExp('^' + _.split(terms, ' ')[0]); | ||||
| 				return db.Entry.find({ | ||||
| 					_id: { $regex: regMatch } | ||||
| 				}, '_id') | ||||
| 				.sort('_id') | ||||
| 				.limit(5) | ||||
| 				.exec() | ||||
| 				.then((matches) => { | ||||
| 					return { | ||||
| 						match: hits, | ||||
| 						suggest: (matches) ? _.map(matches, '_id') : [] | ||||
| 					}; | ||||
| 				}); | ||||
| 			} else { | ||||
| 				return { | ||||
| 					match: _.filter(hits, (h) => { return h._doc.score >= 1; }), | ||||
| 					suggest: [] | ||||
| 				}; | ||||
| 			} | ||||
|  | ||||
| 		}).catch((err) => { | ||||
|  | ||||
| 			winston.error(err); | ||||
| 			return { | ||||
| 				match: [], | ||||
| 				suggest: [] | ||||
| 			}; | ||||
|  | ||||
| 		}); | ||||
|  | ||||
| 	} | ||||
|  | ||||
| }; | ||||
|   _repoPath: 'repo', | ||||
|   _cachePath: 'data/cache', | ||||
|  | ||||
|   /** | ||||
|    * Initialize Entries model | ||||
|    * | ||||
|    * @return     {Object}  Entries model instance | ||||
|    */ | ||||
|   init () { | ||||
|     let self = this | ||||
|  | ||||
|     self._repoPath = path.resolve(ROOTPATH, appconfig.paths.repo) | ||||
|     self._cachePath = path.resolve(ROOTPATH, appconfig.paths.data, 'cache') | ||||
|  | ||||
|     return self | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Check if a document already exists | ||||
|    * | ||||
|    * @param      {String}  entryPath  The entry path | ||||
|    * @return     {Promise<Boolean>}  True if exists, false otherwise | ||||
|    */ | ||||
|   exists (entryPath) { | ||||
|     let self = this | ||||
|  | ||||
|     return self.fetchOriginal(entryPath, { | ||||
|       parseMarkdown: false, | ||||
|       parseMeta: false, | ||||
|       parseTree: false, | ||||
|       includeMarkdown: false, | ||||
|       includeParentInfo: false, | ||||
|       cache: false | ||||
|     }).then(() => { | ||||
|       return true | ||||
|     }).catch((err) => { // eslint-disable-line handle-callback-err | ||||
|       return false | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Fetch a document from cache, otherwise the original | ||||
|    * | ||||
|    * @param      {String}           entryPath  The entry path | ||||
|    * @return     {Promise<Object>}  Page Data | ||||
|    */ | ||||
|   fetch (entryPath) { | ||||
|     let self = this | ||||
|  | ||||
|     let cpath = self.getCachePath(entryPath) | ||||
|  | ||||
|     return fs.statAsync(cpath).then((st) => { | ||||
|       return st.isFile() | ||||
|     }).catch((err) => { // eslint-disable-line handle-callback-err | ||||
|       return false | ||||
|     }).then((isCache) => { | ||||
|       if (isCache) { | ||||
|         // Load from cache | ||||
|  | ||||
|         return fs.readFileAsync(cpath).then((contents) => { | ||||
|           return JSON.parse(contents) | ||||
|         }).catch((err) => { // eslint-disable-line handle-callback-err | ||||
|           winston.error('Corrupted cache file. Deleting it...') | ||||
|           fs.unlinkSync(cpath) | ||||
|           return false | ||||
|         }) | ||||
|       } else { | ||||
|         // Load original | ||||
|  | ||||
|         return self.fetchOriginal(entryPath) | ||||
|       } | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Fetches the original document entry | ||||
|    * | ||||
|    * @param      {String}           entryPath  The entry path | ||||
|    * @param      {Object}           options    The options | ||||
|    * @return     {Promise<Object>}  Page data | ||||
|    */ | ||||
|   fetchOriginal (entryPath, options) { | ||||
|     let self = this | ||||
|  | ||||
|     let fpath = self.getFullPath(entryPath) | ||||
|     let cpath = self.getCachePath(entryPath) | ||||
|  | ||||
|     options = _.defaults(options, { | ||||
|       parseMarkdown: true, | ||||
|       parseMeta: true, | ||||
|       parseTree: true, | ||||
|       includeMarkdown: false, | ||||
|       includeParentInfo: true, | ||||
|       cache: true | ||||
|     }) | ||||
|  | ||||
|     return fs.statAsync(fpath).then((st) => { | ||||
|       if (st.isFile()) { | ||||
|         return fs.readFileAsync(fpath, 'utf8').then((contents) => { | ||||
|           // Parse contents | ||||
|  | ||||
|           let pageData = { | ||||
|             markdown: (options.includeMarkdown) ? contents : '', | ||||
|             html: (options.parseMarkdown) ? mark.parseContent(contents) : '', | ||||
|             meta: (options.parseMeta) ? mark.parseMeta(contents) : {}, | ||||
|             tree: (options.parseTree) ? mark.parseTree(contents) : [] | ||||
|           } | ||||
|  | ||||
|           if (!pageData.meta.title) { | ||||
|             pageData.meta.title = _.startCase(entryPath) | ||||
|           } | ||||
|  | ||||
|           pageData.meta.path = entryPath | ||||
|  | ||||
|           // Get parent | ||||
|  | ||||
|           let parentPromise = (options.includeParentInfo) ? self.getParentInfo(entryPath).then((parentData) => { | ||||
|             return (pageData.parent = parentData) | ||||
|           }).catch((err) => { // eslint-disable-line handle-callback-err | ||||
|             return (pageData.parent = false) | ||||
|           }) : Promise.resolve(true) | ||||
|  | ||||
|           return parentPromise.then(() => { | ||||
|             // Cache to disk | ||||
|  | ||||
|             if (options.cache) { | ||||
|               let cacheData = JSON.stringify(_.pick(pageData, ['html', 'meta', 'tree', 'parent']), false, false, false) | ||||
|               return fs.writeFileAsync(cpath, cacheData).catch((err) => { | ||||
|                 winston.error('Unable to write to cache! Performance may be affected.') | ||||
|                 winston.error(err) | ||||
|                 return true | ||||
|               }) | ||||
|             } else { | ||||
|               return true | ||||
|             } | ||||
|           }).return(pageData) | ||||
|         }) | ||||
|       } else { | ||||
|         return false | ||||
|       } | ||||
|     }).catch((err) => { // eslint-disable-line handle-callback-err | ||||
|       return Promise.reject(new Promise.OperationalError('Entry ' + entryPath + ' does not exist!')) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Parse raw url path and make it safe | ||||
|    * | ||||
|    * @param      {String}  urlPath  The url path | ||||
|    * @return     {String}  Safe entry path | ||||
|    */ | ||||
|   parsePath (urlPath) { | ||||
|     let wlist = new RegExp('[^a-z0-9/-]', 'g') | ||||
|  | ||||
|     urlPath = _.toLower(urlPath).replace(wlist, '') | ||||
|  | ||||
|     if (urlPath === '/') { | ||||
|       urlPath = 'home' | ||||
|     } | ||||
|  | ||||
|     let urlParts = _.filter(_.split(urlPath, '/'), (p) => { return !_.isEmpty(p) }) | ||||
|  | ||||
|     return _.join(urlParts, '/') | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Gets the parent information. | ||||
|    * | ||||
|    * @param      {String}                 entryPath  The entry path | ||||
|    * @return     {Promise<Object|False>}  The parent information. | ||||
|    */ | ||||
|   getParentInfo (entryPath) { | ||||
|     let self = this | ||||
|  | ||||
|     if (_.includes(entryPath, '/')) { | ||||
|       let parentParts = _.initial(_.split(entryPath, '/')) | ||||
|       let parentPath = _.join(parentParts, '/') | ||||
|       let parentFile = _.last(parentParts) | ||||
|       let fpath = self.getFullPath(parentPath) | ||||
|  | ||||
|       return fs.statAsync(fpath).then((st) => { | ||||
|         if (st.isFile()) { | ||||
|           return fs.readFileAsync(fpath, 'utf8').then((contents) => { | ||||
|             let pageMeta = mark.parseMeta(contents) | ||||
|  | ||||
|             return { | ||||
|               path: parentPath, | ||||
|               title: (pageMeta.title) ? pageMeta.title : _.startCase(parentFile), | ||||
|               subtitle: (pageMeta.subtitle) ? pageMeta.subtitle : false | ||||
|             } | ||||
|           }) | ||||
|         } else { | ||||
|           return Promise.reject(new Error('Parent entry is not a valid file.')) | ||||
|         } | ||||
|       }) | ||||
|     } else { | ||||
|       return Promise.reject(new Error('Parent entry is root.')) | ||||
|     } | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Gets the full original path of a document. | ||||
|    * | ||||
|    * @param      {String}  entryPath  The entry path | ||||
|    * @return     {String}  The full path. | ||||
|    */ | ||||
|   getFullPath (entryPath) { | ||||
|     return path.join(this._repoPath, entryPath + '.md') | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Gets the full cache path of a document. | ||||
|    * | ||||
|    * @param      {String}    entryPath  The entry path | ||||
|    * @return     {String}  The full cache path. | ||||
|    */ | ||||
|   getCachePath (entryPath) { | ||||
|     return path.join(this._cachePath, farmhash.fingerprint32(entryPath) + '.json') | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Gets the entry path from full path. | ||||
|    * | ||||
|    * @param      {String}  fullPath  The full path | ||||
|    * @return     {String}  The entry path | ||||
|    */ | ||||
|   getEntryPathFromFullPath (fullPath) { | ||||
|     let absRepoPath = path.resolve(ROOTPATH, this._repoPath) | ||||
|     return _.chain(fullPath).replace(absRepoPath, '').replace('.md', '').replace(new RegExp('\\\\', 'g'), '/').value() | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Update an existing document | ||||
|    * | ||||
|    * @param      {String}            entryPath  The entry path | ||||
|    * @param      {String}            contents   The markdown-formatted contents | ||||
|    * @return     {Promise<Boolean>}  True on success, false on failure | ||||
|    */ | ||||
|   update (entryPath, contents) { | ||||
|     let self = this | ||||
|     let fpath = self.getFullPath(entryPath) | ||||
|  | ||||
|     return fs.statAsync(fpath).then((st) => { | ||||
|       if (st.isFile()) { | ||||
|         return self.makePersistent(entryPath, contents).then(() => { | ||||
|           return self.updateCache(entryPath) | ||||
|         }) | ||||
|       } else { | ||||
|         return Promise.reject(new Error('Entry does not exist!')) | ||||
|       } | ||||
|     }).catch((err) => { | ||||
|       winston.error(err) | ||||
|       return Promise.reject(new Error('Failed to save document.')) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Update local cache and search index | ||||
|    * | ||||
|    * @param      {String}   entryPath  The entry path | ||||
|    * @return     {Promise}  Promise of the operation | ||||
|    */ | ||||
|   updateCache (entryPath) { | ||||
|     let self = this | ||||
|  | ||||
|     return self.fetchOriginal(entryPath, { | ||||
|       parseMarkdown: true, | ||||
|       parseMeta: true, | ||||
|       parseTree: true, | ||||
|       includeMarkdown: true, | ||||
|       includeParentInfo: true, | ||||
|       cache: true | ||||
|     }).then((pageData) => { | ||||
|       return { | ||||
|         entryPath, | ||||
|         meta: pageData.meta, | ||||
|         parent: pageData.parent || {}, | ||||
|         text: mark.removeMarkdown(pageData.markdown) | ||||
|       } | ||||
|     }).then((content) => { | ||||
|       return db.Entry.findOneAndUpdate({ | ||||
|         _id: content.entryPath | ||||
|       }, { | ||||
|         _id: content.entryPath, | ||||
|         title: content.meta.title || content.entryPath, | ||||
|         subtitle: content.meta.subtitle || '', | ||||
|         parent: content.parent.title || '', | ||||
|         content: content.text || '' | ||||
|       }, { | ||||
|         new: true, | ||||
|         upsert: true | ||||
|       }) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Create a new document | ||||
|    * | ||||
|    * @param      {String}            entryPath  The entry path | ||||
|    * @param      {String}            contents   The markdown-formatted contents | ||||
|    * @return     {Promise<Boolean>}  True on success, false on failure | ||||
|    */ | ||||
|   create (entryPath, contents) { | ||||
|     let self = this | ||||
|  | ||||
|     return self.exists(entryPath).then((docExists) => { | ||||
|       if (!docExists) { | ||||
|         return self.makePersistent(entryPath, contents).then(() => { | ||||
|           return self.updateCache(entryPath) | ||||
|         }) | ||||
|       } else { | ||||
|         return Promise.reject(new Error('Entry already exists!')) | ||||
|       } | ||||
|     }).catch((err) => { | ||||
|       winston.error(err) | ||||
|       return Promise.reject(new Error('Something went wrong.')) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Makes a document persistent to disk and git repository | ||||
|    * | ||||
|    * @param      {String}            entryPath  The entry path | ||||
|    * @param      {String}            contents   The markdown-formatted contents | ||||
|    * @return     {Promise<Boolean>}  True on success, false on failure | ||||
|    */ | ||||
|   makePersistent (entryPath, contents) { | ||||
|     let self = this | ||||
|     let fpath = self.getFullPath(entryPath) | ||||
|  | ||||
|     return fs.outputFileAsync(fpath, contents).then(() => { | ||||
|       return git.commitDocument(entryPath) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Move a document | ||||
|    * | ||||
|    * @param      {String}   entryPath     The current entry path | ||||
|    * @param      {String}   newEntryPath  The new entry path | ||||
|    * @return     {Promise}  Promise of the operation | ||||
|    */ | ||||
|   move (entryPath, newEntryPath) { | ||||
|     let self = this | ||||
|  | ||||
|     if (_.isEmpty(entryPath) || entryPath === 'home') { | ||||
|       return Promise.reject(new Error('Invalid path!')) | ||||
|     } | ||||
|  | ||||
|     return git.moveDocument(entryPath, newEntryPath).then(() => { | ||||
|       return git.commitDocument(newEntryPath).then(() => { | ||||
|         // Delete old cache version | ||||
|  | ||||
|         let oldEntryCachePath = self.getCachePath(entryPath) | ||||
|         fs.unlinkAsync(oldEntryCachePath).catch((err) => { return true }) // eslint-disable-line handle-callback-err | ||||
|  | ||||
|         // Delete old index entry | ||||
|  | ||||
|         ws.emit('searchDel', { | ||||
|           auth: WSInternalKey, | ||||
|           entryPath | ||||
|         }) | ||||
|  | ||||
|         // Create cache for new entry | ||||
|  | ||||
|         return self.updateCache(newEntryPath) | ||||
|       }) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Generate a starter page content based on the entry path | ||||
|    * | ||||
|    * @param      {String}           entryPath  The entry path | ||||
|    * @return     {Promise<String>}  Starter content | ||||
|    */ | ||||
|   getStarter (entryPath) { | ||||
|     let formattedTitle = _.startCase(_.last(_.split(entryPath, '/'))) | ||||
|  | ||||
|     return fs.readFileAsync(path.join(ROOTPATH, 'client/content/create.md'), 'utf8').then((contents) => { | ||||
|       return _.replace(contents, new RegExp('{TITLE}', 'g'), formattedTitle) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Searches entries based on terms. | ||||
|    * | ||||
|    * @param      {String}  terms   The terms to search for | ||||
|    * @return     {Promise<Object>}  Promise of the search results | ||||
|    */ | ||||
|   search (terms) { | ||||
|     terms = _.chain(terms) | ||||
|       .deburr() | ||||
|       .toLower() | ||||
|       .trim() | ||||
|       .replace(/[^a-z0-9\- ]/g, '') | ||||
|       .split(' ') | ||||
|       .filter((f) => { return !_.isEmpty(f) }) | ||||
|       .join(' ') | ||||
|       .value() | ||||
|  | ||||
|     return db.Entry.find( | ||||
|       { $text: { $search: terms } }, | ||||
|       { score: { $meta: 'textScore' }, title: 1 } | ||||
|     ) | ||||
|     .sort({ score: { $meta: 'textScore' } }) | ||||
|     .limit(10) | ||||
|     .exec() | ||||
|     .then((hits) => { | ||||
|       if (hits.length < 5) { | ||||
|         let regMatch = new RegExp('^' + _.split(terms, ' ')[0]) | ||||
|         return db.Entry.find({ | ||||
|           _id: { $regex: regMatch } | ||||
|         }, '_id') | ||||
|             .sort('_id') | ||||
|             .limit(5) | ||||
|             .exec() | ||||
|             .then((matches) => { | ||||
|               return { | ||||
|                 match: hits, | ||||
|                 suggest: (matches) ? _.map(matches, '_id') : [] | ||||
|               } | ||||
|             }) | ||||
|       } else { | ||||
|         return { | ||||
|           match: _.filter(hits, (h) => { return h._doc.score >= 1 }), | ||||
|           suggest: [] | ||||
|         } | ||||
|       } | ||||
|     }).catch((err) => { | ||||
|       winston.error(err) | ||||
|       return { | ||||
|         match: [], | ||||
|         suggest: [] | ||||
|       } | ||||
|     }) | ||||
|   } | ||||
|  | ||||
| } | ||||
|   | ||||
							
								
								
									
										415
									
								
								libs/git.js
									
									
									
									
									
								
							
							
						
						
									
										415
									
								
								libs/git.js
									
									
									
									
									
								
							| @@ -1,258 +1,231 @@ | ||||
| "use strict"; | ||||
| 'use strict' | ||||
|  | ||||
| var Git = require("git-wrapper2-promise"), | ||||
| 	Promise = require('bluebird'), | ||||
| 	path = require('path'), | ||||
| 	os = require('os'), | ||||
| 	fs = Promise.promisifyAll(require("fs")), | ||||
| 	moment = require('moment'), | ||||
| 	_ = require('lodash'), | ||||
| 	URL = require('url'); | ||||
| const Git = require('git-wrapper2-promise') | ||||
| const Promise = require('bluebird') | ||||
| const path = require('path') | ||||
| const fs = Promise.promisifyAll(require('fs')) | ||||
| const _ = require('lodash') | ||||
| const URL = require('url') | ||||
|  | ||||
| /** | ||||
|  * Git Model | ||||
|  */ | ||||
| module.exports = { | ||||
|  | ||||
| 	_git: null, | ||||
| 	_url: '', | ||||
| 	_repo: { | ||||
| 		path: '', | ||||
| 		branch: 'master', | ||||
| 		exists: false | ||||
| 	}, | ||||
| 	_signature: { | ||||
| 		name: 'Wiki', | ||||
| 		email: 'user@example.com' | ||||
| 	}, | ||||
| 	_opts: { | ||||
| 		clone: {}, | ||||
| 		push: {} | ||||
| 	}, | ||||
| 	onReady: null, | ||||
|   _git: null, | ||||
|   _url: '', | ||||
|   _repo: { | ||||
|     path: '', | ||||
|     branch: 'master', | ||||
|     exists: false | ||||
|   }, | ||||
|   _signature: { | ||||
|     name: 'Wiki', | ||||
|     email: 'user@example.com' | ||||
|   }, | ||||
|   _opts: { | ||||
|     clone: {}, | ||||
|     push: {} | ||||
|   }, | ||||
|   onReady: null, | ||||
|  | ||||
| 	/** | ||||
| 	 * Initialize Git model | ||||
| 	 * | ||||
| 	 * @return     {Object}  Git model instance | ||||
| 	 */ | ||||
| 	init() { | ||||
|   /** | ||||
|    * Initialize Git model | ||||
|    * | ||||
|    * @return     {Object}  Git model instance | ||||
|    */ | ||||
|   init () { | ||||
|     let self = this | ||||
|  | ||||
| 		let self = this; | ||||
|     // -> Build repository path | ||||
|  | ||||
| 		//-> Build repository path | ||||
| 		 | ||||
| 		if(_.isEmpty(appconfig.paths.repo)) { | ||||
| 			self._repo.path = path.join(ROOTPATH, 'repo'); | ||||
| 		} else { | ||||
| 			self._repo.path = appconfig.paths.repo; | ||||
| 		} | ||||
|     if (_.isEmpty(appconfig.paths.repo)) { | ||||
|       self._repo.path = path.join(ROOTPATH, 'repo') | ||||
|     } else { | ||||
|       self._repo.path = appconfig.paths.repo | ||||
|     } | ||||
|  | ||||
| 		//-> Initialize repository | ||||
|     // -> Initialize repository | ||||
|  | ||||
| 		self.onReady = self._initRepo(appconfig); | ||||
|     self.onReady = self._initRepo(appconfig) | ||||
|  | ||||
| 		// Define signature | ||||
|     // Define signature | ||||
|  | ||||
| 		self._signature.name = appconfig.git.signature.name || 'Wiki'; | ||||
| 		self._signature.email = appconfig.git.signature.email || 'user@example.com'; | ||||
|     self._signature.name = appconfig.git.signature.name || 'Wiki' | ||||
|     self._signature.email = appconfig.git.signature.email || 'user@example.com' | ||||
|  | ||||
| 		return self; | ||||
|     return self | ||||
|   }, | ||||
|  | ||||
| 	}, | ||||
|   /** | ||||
|    * Initialize Git repository | ||||
|    * | ||||
|    * @param      {Object}  appconfig  The application config | ||||
|    * @return     {Object}  Promise | ||||
|    */ | ||||
|   _initRepo (appconfig) { | ||||
|     let self = this | ||||
|  | ||||
| 	/** | ||||
| 	 * Initialize Git repository | ||||
| 	 * | ||||
| 	 * @param      {Object}  appconfig  The application config | ||||
| 	 * @return     {Object}  Promise | ||||
| 	 */ | ||||
| 	_initRepo(appconfig) { | ||||
|     winston.info('[' + PROCNAME + '][GIT] Checking Git repository...') | ||||
|  | ||||
| 		let self = this; | ||||
|     // -> Check if path is accessible | ||||
|  | ||||
| 		winston.info('[' + PROCNAME + '][GIT] Checking Git repository...'); | ||||
|     return fs.mkdirAsync(self._repo.path).catch((err) => { | ||||
|       if (err.code !== 'EEXIST') { | ||||
|         winston.error('[' + PROCNAME + '][GIT] Invalid Git repository path or missing permissions.') | ||||
|       } | ||||
|     }).then(() => { | ||||
|       self._git = new Git({ 'git-dir': self._repo.path }) | ||||
|  | ||||
| 		//-> Check if path is accessible | ||||
|       // -> Check if path already contains a git working folder | ||||
|  | ||||
| 		return fs.mkdirAsync(self._repo.path).catch((err) => { | ||||
| 			if(err.code !== 'EEXIST') { | ||||
| 				winston.error('[' + PROCNAME + '][GIT] Invalid Git repository path or missing permissions.'); | ||||
| 			} | ||||
| 		}).then(() => { | ||||
|       return self._git.isRepo().then((isRepo) => { | ||||
|         self._repo.exists = isRepo | ||||
|         return (!isRepo) ? self._git.exec('init') : true | ||||
|       }).catch((err) => { // eslint-disable-line handle-callback-err | ||||
|         self._repo.exists = false | ||||
|       }) | ||||
|     }).then(() => { | ||||
|       // Initialize remote | ||||
|  | ||||
| 			self._git = new Git({ 'git-dir': self._repo.path }); | ||||
|       let urlObj = URL.parse(appconfig.git.url) | ||||
|       urlObj.auth = appconfig.git.auth.username + ((appconfig.git.auth.type !== 'ssh') ? ':' + appconfig.git.auth.password : '') | ||||
|       self._url = URL.format(urlObj) | ||||
|  | ||||
| 			//-> Check if path already contains a git working folder | ||||
|       return self._git.exec('remote', 'show').then((cProc) => { | ||||
|         let out = cProc.stdout.toString() | ||||
|         if (_.includes(out, 'origin')) { | ||||
|           return true | ||||
|         } else { | ||||
|           return Promise.join( | ||||
|             self._git.exec('config', ['--local', 'user.name', self._signature.name]), | ||||
|             self._git.exec('config', ['--local', 'user.email', self._signature.email]) | ||||
|           ).then(() => { | ||||
|             return self._git.exec('remote', ['add', 'origin', self._url]) | ||||
|           }) | ||||
|         } | ||||
|       }) | ||||
|     }).catch((err) => { | ||||
|       winston.error('[' + PROCNAME + '][GIT] Git remote error!') | ||||
|       throw err | ||||
|     }).then(() => { | ||||
|       winston.info('[' + PROCNAME + '][GIT] Git repository is OK.') | ||||
|       return true | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 			return self._git.isRepo().then((isRepo) => { | ||||
| 				self._repo.exists = isRepo; | ||||
| 				return (!isRepo) ? self._git.exec('init') : true; | ||||
| 			}).catch((err) => { | ||||
| 				self._repo.exists = false; | ||||
| 			}); | ||||
|   /** | ||||
|    * Gets the repo path. | ||||
|    * | ||||
|    * @return     {String}  The repo path. | ||||
|    */ | ||||
|   getRepoPath () { | ||||
|     return this._repo.path || path.join(ROOTPATH, 'repo') | ||||
|   }, | ||||
|  | ||||
| 		}).then(() => { | ||||
|   /** | ||||
|    * Sync with the remote repository | ||||
|    * | ||||
|    * @return     {Promise}  Resolve on sync success | ||||
|    */ | ||||
|   resync () { | ||||
|     let self = this | ||||
|  | ||||
| 			// Initialize remote | ||||
|     // Fetch | ||||
|  | ||||
| 			let urlObj = URL.parse(appconfig.git.url); | ||||
| 			urlObj.auth = appconfig.git.auth.username + ((appconfig.git.auth.type !== 'ssh') ? ':' + appconfig.git.auth.password : ''); | ||||
| 			self._url = URL.format(urlObj); | ||||
|     winston.info('[' + PROCNAME + '][GIT] Performing pull from remote repository...') | ||||
|     return self._git.pull('origin', self._repo.branch).then((cProc) => { | ||||
|       winston.info('[' + PROCNAME + '][GIT] Pull completed.') | ||||
|     }) | ||||
|     .catch((err) => { | ||||
|       winston.error('[' + PROCNAME + '][GIT] Unable to fetch from git origin!') | ||||
|       throw err | ||||
|     }) | ||||
|     .then(() => { | ||||
|       // Check for changes | ||||
|  | ||||
| 			return self._git.exec('remote', 'show').then((cProc) => { | ||||
| 				let out = cProc.stdout.toString(); | ||||
| 				if(_.includes(out, 'origin')) { | ||||
| 					return true; | ||||
| 				} else { | ||||
| 					return Promise.join( | ||||
| 						self._git.exec('config', ['--local', 'user.name', self._signature.name]), | ||||
| 						self._git.exec('config', ['--local', 'user.email', self._signature.email]) | ||||
| 					).then(() => { | ||||
| 						return self._git.exec('remote', ['add', 'origin', self._url]); | ||||
| 					}); | ||||
| 				} | ||||
| 			}); | ||||
|       return self._git.exec('log', 'origin/' + self._repo.branch + '..HEAD').then((cProc) => { | ||||
|         let out = cProc.stdout.toString() | ||||
|  | ||||
| 		}).catch((err) => { | ||||
| 			winston.error('[' + PROCNAME + '][GIT] Git remote error!'); | ||||
| 			throw err; | ||||
| 		}).then(() => { | ||||
| 			winston.info('[' + PROCNAME + '][GIT] Git repository is OK.'); | ||||
| 			return true; | ||||
| 		}); | ||||
|         if (_.includes(out, 'commit')) { | ||||
|           winston.info('[' + PROCNAME + '][GIT] Performing push to remote repository...') | ||||
|           return self._git.push('origin', self._repo.branch).then(() => { | ||||
|             return winston.info('[' + PROCNAME + '][GIT] Push completed.') | ||||
|           }) | ||||
|         } else { | ||||
|           winston.info('[' + PROCNAME + '][GIT] Push skipped. Repository is already in sync.') | ||||
|         } | ||||
|  | ||||
| 	}, | ||||
|         return true | ||||
|       }) | ||||
|     }) | ||||
|     .catch((err) => { | ||||
|       winston.error('[' + PROCNAME + '][GIT] Unable to push changes to remote!') | ||||
|       throw err | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the repo path. | ||||
| 	 * | ||||
| 	 * @return     {String}  The repo path. | ||||
| 	 */ | ||||
| 	getRepoPath() { | ||||
|   /** | ||||
|    * Commits a document. | ||||
|    * | ||||
|    * @param      {String}   entryPath  The entry path | ||||
|    * @return     {Promise}  Resolve on commit success | ||||
|    */ | ||||
|   commitDocument (entryPath) { | ||||
|     let self = this | ||||
|     let gitFilePath = entryPath + '.md' | ||||
|     let commitMsg = '' | ||||
|  | ||||
| 		return this._repo.path || path.join(ROOTPATH, 'repo'); | ||||
|     return self._git.exec('ls-files', gitFilePath).then((cProc) => { | ||||
|       let out = cProc.stdout.toString() | ||||
|       return _.includes(out, gitFilePath) | ||||
|     }).then((isTracked) => { | ||||
|       commitMsg = (isTracked) ? 'Updated ' + gitFilePath : 'Added ' + gitFilePath | ||||
|       return self._git.add(gitFilePath) | ||||
|     }).then(() => { | ||||
|       return self._git.commit(commitMsg).catch((err) => { | ||||
|         if (_.includes(err.stdout, 'nothing to commit')) { return true } | ||||
|       }) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 	}, | ||||
|   /** | ||||
|    * Move a document. | ||||
|    * | ||||
|    * @param      {String}            entryPath     The current entry path | ||||
|    * @param      {String}            newEntryPath  The new entry path | ||||
|    * @return     {Promise<Boolean>}  Resolve on success | ||||
|    */ | ||||
|   moveDocument (entryPath, newEntryPath) { | ||||
|     let self = this | ||||
|     let gitFilePath = entryPath + '.md' | ||||
|     let gitNewFilePath = newEntryPath + '.md' | ||||
|  | ||||
| 	/** | ||||
| 	 * Sync with the remote repository | ||||
| 	 * | ||||
| 	 * @return     {Promise}  Resolve on sync success | ||||
| 	 */ | ||||
| 	resync() { | ||||
|     return self._git.exec('mv', [gitFilePath, gitNewFilePath]).then((cProc) => { | ||||
|       let out = cProc.stdout.toString() | ||||
|       if (_.includes(out, 'fatal')) { | ||||
|         let errorMsg = _.capitalize(_.head(_.split(_.replace(out, 'fatal: ', ''), ','))) | ||||
|         throw new Error(errorMsg) | ||||
|       } | ||||
|       return true | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 		let self = this; | ||||
|   /** | ||||
|    * Commits uploads changes. | ||||
|    * | ||||
|    * @param      {String}   msg     The commit message | ||||
|    * @return     {Promise}  Resolve on commit success | ||||
|    */ | ||||
|   commitUploads (msg) { | ||||
|     let self = this | ||||
|     msg = msg || 'Uploads repository sync' | ||||
|  | ||||
| 		// Fetch | ||||
|     return self._git.add('uploads').then(() => { | ||||
|       return self._git.commit(msg).catch((err) => { | ||||
|         if (_.includes(err.stdout, 'nothing to commit')) { return true } | ||||
|       }) | ||||
|     }) | ||||
|   } | ||||
|  | ||||
| 		winston.info('[' + PROCNAME + '][GIT] Performing pull from remote repository...'); | ||||
| 		return self._git.pull('origin', self._repo.branch).then((cProc) => { | ||||
| 			winston.info('[' + PROCNAME + '][GIT] Pull completed.'); | ||||
| 		}) | ||||
| 		.catch((err) => { | ||||
| 			winston.error('[' + PROCNAME + '][GIT] Unable to fetch from git origin!'); | ||||
| 			throw err; | ||||
| 		}) | ||||
| 		.then(() => { | ||||
|  | ||||
| 			// Check for changes | ||||
|  | ||||
| 			return self._git.exec('log', 'origin/' + self._repo.branch + '..HEAD').then((cProc) => { | ||||
| 				let out = cProc.stdout.toString(); | ||||
|  | ||||
| 				if(_.includes(out, 'commit')) { | ||||
|  | ||||
| 					winston.info('[' + PROCNAME + '][GIT] Performing push to remote repository...'); | ||||
| 					return self._git.push('origin', self._repo.branch).then(() => { | ||||
| 						return winston.info('[' + PROCNAME + '][GIT] Push completed.'); | ||||
| 					}); | ||||
|  | ||||
| 				} else { | ||||
|  | ||||
| 					winston.info('[' + PROCNAME + '][GIT] Push skipped. Repository is already in sync.'); | ||||
|  | ||||
| 				} | ||||
|  | ||||
| 				return true; | ||||
|  | ||||
| 			}); | ||||
|  | ||||
| 		}) | ||||
| 		.catch((err) => { | ||||
| 			winston.error('[' + PROCNAME + '][GIT] Unable to push changes to remote!'); | ||||
| 			throw err; | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Commits a document. | ||||
| 	 * | ||||
| 	 * @param      {String}   entryPath  The entry path | ||||
| 	 * @return     {Promise}  Resolve on commit success | ||||
| 	 */ | ||||
| 	commitDocument(entryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
| 		let gitFilePath = entryPath + '.md'; | ||||
| 		let commitMsg = ''; | ||||
|  | ||||
| 		return self._git.exec('ls-files', gitFilePath).then((cProc) => { | ||||
| 			let out = cProc.stdout.toString(); | ||||
| 			return _.includes(out, gitFilePath); | ||||
| 		}).then((isTracked) => { | ||||
| 			commitMsg = (isTracked) ? 'Updated ' + gitFilePath : 'Added ' + gitFilePath; | ||||
| 			return self._git.add(gitFilePath); | ||||
| 		}).then(() => { | ||||
| 			return self._git.commit(commitMsg).catch((err) => { | ||||
| 			  if(_.includes(err.stdout, 'nothing to commit')) { return true; } | ||||
| 			}); | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Move a document. | ||||
| 	 * | ||||
| 	 * @param      {String}            entryPath     The current entry path | ||||
| 	 * @param      {String}            newEntryPath  The new entry path | ||||
| 	 * @return     {Promise<Boolean>}  Resolve on success | ||||
| 	 */ | ||||
| 	moveDocument(entryPath, newEntryPath) { | ||||
|  | ||||
| 		let self = this; | ||||
| 		let gitFilePath = entryPath + '.md'; | ||||
| 		let gitNewFilePath = newEntryPath + '.md'; | ||||
|  | ||||
| 		return self._git.exec('mv', [gitFilePath, gitNewFilePath]).then((cProc) => { | ||||
| 			let out = cProc.stdout.toString(); | ||||
| 			if(_.includes(out, 'fatal')) { | ||||
| 				let errorMsg = _.capitalize(_.head(_.split(_.replace(out, 'fatal: ', ''), ','))); | ||||
| 				throw new Error(errorMsg); | ||||
| 			} | ||||
| 			return true; | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Commits uploads changes. | ||||
| 	 * | ||||
| 	 * @param      {String}   msg     The commit message | ||||
| 	 * @return     {Promise}  Resolve on commit success | ||||
| 	 */ | ||||
| 	commitUploads(msg) { | ||||
|  | ||||
| 		let self = this; | ||||
| 		msg = msg || "Uploads repository sync"; | ||||
|  | ||||
| 		return self._git.add('uploads').then(() => { | ||||
| 			return self._git.commit(msg).catch((err) => { | ||||
| 			  if(_.includes(err.stdout, 'nothing to commit')) { return true; } | ||||
| 			}); | ||||
| 		}); | ||||
|  | ||||
| 	} | ||||
|  | ||||
| }; | ||||
| } | ||||
|   | ||||
| @@ -1,32 +1,26 @@ | ||||
| "use strict"; | ||||
| 'use strict' | ||||
|  | ||||
| const crypto = require('crypto'); | ||||
| const crypto = require('crypto') | ||||
|  | ||||
| /** | ||||
|  * Internal Authentication | ||||
|  */ | ||||
| module.exports = { | ||||
|  | ||||
| 	_curKey: false, | ||||
|   _curKey: false, | ||||
|  | ||||
| 	init(inKey) { | ||||
|   init (inKey) { | ||||
|     this._curKey = inKey | ||||
|  | ||||
| 		this._curKey = inKey; | ||||
|     return this | ||||
|   }, | ||||
|  | ||||
| 		return this; | ||||
|   generateKey () { | ||||
|     return crypto.randomBytes(20).toString('hex') | ||||
|   }, | ||||
|  | ||||
| 	}, | ||||
|   validateKey (inKey) { | ||||
|     return inKey === this._curKey | ||||
|   } | ||||
|  | ||||
| 	generateKey() { | ||||
|  | ||||
| 		return crypto.randomBytes(20).toString('hex'); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	validateKey(inKey) { | ||||
|  | ||||
| 		return inKey === this._curKey; | ||||
|  | ||||
| 	} | ||||
|  | ||||
| }; | ||||
| } | ||||
|   | ||||
							
								
								
									
										289
									
								
								libs/local.js
									
									
									
									
									
								
							
							
						
						
									
										289
									
								
								libs/local.js
									
									
									
									
									
								
							| @@ -1,187 +1,176 @@ | ||||
| "use strict"; | ||||
| 'use strict' | ||||
|  | ||||
| var path = require('path'), | ||||
| 	Promise = require('bluebird'), | ||||
| 	fs = Promise.promisifyAll(require('fs-extra')), | ||||
| 	multer  = require('multer'), | ||||
| 	os = require('os'), | ||||
| 	_ = require('lodash'); | ||||
| const path = require('path') | ||||
| const Promise = require('bluebird') | ||||
| const fs = Promise.promisifyAll(require('fs-extra')) | ||||
| const multer = require('multer') | ||||
| const os = require('os') | ||||
| const _ = require('lodash') | ||||
|  | ||||
| /** | ||||
|  * Local Data Storage | ||||
|  */ | ||||
| module.exports = { | ||||
|  | ||||
| 	_uploadsPath: './repo/uploads', | ||||
| 	_uploadsThumbsPath: './data/thumbs', | ||||
|   _uploadsPath: './repo/uploads', | ||||
|   _uploadsThumbsPath: './data/thumbs', | ||||
|  | ||||
| 	uploadImgHandler: null, | ||||
|   uploadImgHandler: null, | ||||
|  | ||||
| 	/** | ||||
| 	 * Initialize Local Data Storage model | ||||
| 	 * | ||||
| 	 * @return     {Object}  Local Data Storage model instance | ||||
| 	 */ | ||||
| 	init() { | ||||
|   /** | ||||
|    * Initialize Local Data Storage model | ||||
|    * | ||||
|    * @return     {Object}  Local Data Storage model instance | ||||
|    */ | ||||
|   init () { | ||||
|     this._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads') | ||||
|     this._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs') | ||||
|  | ||||
| 		this._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads'); | ||||
| 		this._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs'); | ||||
|     this.createBaseDirectories(appconfig) | ||||
|     this.initMulter(appconfig) | ||||
|  | ||||
| 		this.createBaseDirectories(appconfig); | ||||
| 		this.initMulter(appconfig); | ||||
|     return this | ||||
|   }, | ||||
|  | ||||
| 		return this; | ||||
|   /** | ||||
|    * Init Multer upload handlers | ||||
|    * | ||||
|    * @param      {Object}   appconfig  The application config | ||||
|    * @return     {boolean}  Void | ||||
|    */ | ||||
|   initMulter (appconfig) { | ||||
|     let maxFileSizes = { | ||||
|       img: appconfig.uploads.maxImageFileSize * 1024 * 1024, | ||||
|       file: appconfig.uploads.maxOtherFileSize * 1024 * 1024 | ||||
|     } | ||||
|  | ||||
| 	}, | ||||
|     // -> IMAGES | ||||
|  | ||||
| 	/** | ||||
| 	 * Init Multer upload handlers | ||||
| 	 * | ||||
| 	 * @param      {Object}   appconfig  The application config | ||||
| 	 * @return     {boolean}  Void | ||||
| 	 */ | ||||
| 	initMulter(appconfig) { | ||||
|     this.uploadImgHandler = multer({ | ||||
|       storage: multer.diskStorage({ | ||||
|         destination: (req, f, cb) => { | ||||
|           cb(null, path.resolve(ROOTPATH, appconfig.paths.data, 'temp-upload')) | ||||
|         } | ||||
|       }), | ||||
|       fileFilter: (req, f, cb) => { | ||||
|         // -> Check filesize | ||||
|  | ||||
| 		let maxFileSizes = { | ||||
| 			img: appconfig.uploads.maxImageFileSize * 1024 * 1024, | ||||
| 			file: appconfig.uploads.maxOtherFileSize * 1024 * 1024 | ||||
| 		}; | ||||
|         if (f.size > maxFileSizes.img) { | ||||
|           return cb(null, false) | ||||
|         } | ||||
|  | ||||
| 		//-> IMAGES | ||||
|         // -> Check MIME type (quick check only) | ||||
|  | ||||
| 		this.uploadImgHandler = multer({ | ||||
| 			storage: multer.diskStorage({ | ||||
| 				destination: (req, f, cb) => { | ||||
| 					cb(null, path.resolve(ROOTPATH, appconfig.paths.data, 'temp-upload')); | ||||
| 				} | ||||
| 			}), | ||||
| 			fileFilter: (req, f, cb) => { | ||||
|         if (!_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/webp'], f.mimetype)) { | ||||
|           return cb(null, false) | ||||
|         } | ||||
|  | ||||
| 				//-> Check filesize | ||||
|         cb(null, true) | ||||
|       } | ||||
|     }).array('imgfile', 20) | ||||
|  | ||||
| 				if(f.size > maxFileSizes.img) { | ||||
| 					return cb(null, false); | ||||
| 				} | ||||
|     // -> FILES | ||||
|  | ||||
| 				//-> Check MIME type (quick check only) | ||||
|     this.uploadFileHandler = multer({ | ||||
|       storage: multer.diskStorage({ | ||||
|         destination: (req, f, cb) => { | ||||
|           cb(null, path.resolve(ROOTPATH, appconfig.paths.data, 'temp-upload')) | ||||
|         } | ||||
|       }), | ||||
|       fileFilter: (req, f, cb) => { | ||||
|         // -> Check filesize | ||||
|  | ||||
| 				if(!_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/webp'], f.mimetype)) { | ||||
| 					return cb(null, false); | ||||
| 				} | ||||
|         if (f.size > maxFileSizes.file) { | ||||
|           return cb(null, false) | ||||
|         } | ||||
|  | ||||
| 				cb(null, true); | ||||
| 			} | ||||
| 		}).array('imgfile', 20); | ||||
|         cb(null, true) | ||||
|       } | ||||
|     }).array('binfile', 20) | ||||
|  | ||||
| 		//-> FILES | ||||
|     return true | ||||
|   }, | ||||
|  | ||||
| 		this.uploadFileHandler = multer({ | ||||
| 			storage: multer.diskStorage({ | ||||
| 				destination: (req, f, cb) => { | ||||
| 					cb(null, path.resolve(ROOTPATH, appconfig.paths.data, 'temp-upload')); | ||||
| 				} | ||||
| 			}), | ||||
| 			fileFilter: (req, f, cb) => { | ||||
|   /** | ||||
|    * Creates a base directories (Synchronous). | ||||
|    * | ||||
|    * @param      {Object}  appconfig  The application config | ||||
|    * @return     {Void}  Void | ||||
|    */ | ||||
|   createBaseDirectories (appconfig) { | ||||
|     winston.info('[SERVER] Checking data directories...') | ||||
|  | ||||
| 				//-> Check filesize | ||||
|     try { | ||||
|       fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data)) | ||||
|       fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './cache')) | ||||
|       fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './thumbs')) | ||||
|       fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './temp-upload')) | ||||
|  | ||||
| 				if(f.size > maxFileSizes.file) { | ||||
| 					return cb(null, false); | ||||
| 				} | ||||
|       if (os.type() !== 'Windows_NT') { | ||||
|         fs.chmodSync(path.resolve(ROOTPATH, appconfig.paths.data, './temp-upload'), '644') | ||||
|       } | ||||
|  | ||||
| 				cb(null, true); | ||||
| 			} | ||||
| 		}).array('binfile', 20); | ||||
|       fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.repo)) | ||||
|       fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.repo, './uploads')) | ||||
|  | ||||
| 		return true; | ||||
|       if (os.type() !== 'Windows_NT') { | ||||
|         fs.chmodSync(path.resolve(ROOTPATH, appconfig.paths.repo, './upload'), '644') | ||||
|       } | ||||
|     } catch (err) { | ||||
|       winston.error(err) | ||||
|     } | ||||
|  | ||||
| 	}, | ||||
|     winston.info('[SERVER] Data and Repository directories are OK.') | ||||
|  | ||||
| 	/** | ||||
| 	 * Creates a base directories (Synchronous). | ||||
| 	 * | ||||
| 	 * @param      {Object}  appconfig  The application config | ||||
| 	 * @return     {Void}  Void | ||||
| 	 */ | ||||
| 	createBaseDirectories(appconfig) { | ||||
|     return | ||||
|   }, | ||||
|  | ||||
| 		winston.info('[SERVER] Checking data directories...'); | ||||
|   /** | ||||
|    * Gets the uploads path. | ||||
|    * | ||||
|    * @return     {String}  The uploads path. | ||||
|    */ | ||||
|   getUploadsPath () { | ||||
|     return this._uploadsPath | ||||
|   }, | ||||
|  | ||||
| 		try { | ||||
| 			fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data)); | ||||
| 			fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './cache')); | ||||
| 			fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './thumbs')); | ||||
| 			fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.data, './temp-upload')); | ||||
|   /** | ||||
|    * Gets the thumbnails folder path. | ||||
|    * | ||||
|    * @return     {String}  The thumbs path. | ||||
|    */ | ||||
|   getThumbsPath () { | ||||
|     return this._uploadsThumbsPath | ||||
|   }, | ||||
|  | ||||
| 			if(os.type() !== 'Windows_NT') { | ||||
| 				fs.chmodSync(path.resolve(ROOTPATH, appconfig.paths.data, './temp-upload'), '644'); | ||||
| 			} | ||||
|   /** | ||||
|    * Check if filename is valid and unique | ||||
|    * | ||||
|    * @param      {String}           f        The filename | ||||
|    * @param      {String}           fld      The containing folder | ||||
|    * @param      {boolean}          isImage  Indicates if image | ||||
|    * @return     {Promise<String>}  Promise of the accepted filename | ||||
|    */ | ||||
|   validateUploadsFilename (f, fld, isImage) { | ||||
|     let fObj = path.parse(f) | ||||
|     let fname = _.chain(fObj.name).trim().toLower().kebabCase().value().replace(/[^a-z0-9-]+/g, '') | ||||
|     let fext = _.toLower(fObj.ext) | ||||
|  | ||||
| 			fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.repo)); | ||||
| 			fs.ensureDirSync(path.resolve(ROOTPATH, appconfig.paths.repo, './uploads')); | ||||
|     if (isImage && !_.includes(['.jpg', '.jpeg', '.png', '.gif', '.webp'], fext)) { | ||||
|       fext = '.png' | ||||
|     } | ||||
|  | ||||
| 			if(os.type() !== 'Windows_NT') { | ||||
| 				fs.chmodSync(path.resolve(ROOTPATH, appconfig.paths.repo, './upload'), '644'); | ||||
| 			} | ||||
|     f = fname + fext | ||||
|     let fpath = path.resolve(this._uploadsPath, fld, f) | ||||
|  | ||||
| 		} catch (err) { | ||||
| 			winston.error(err); | ||||
| 		} | ||||
|     return fs.statAsync(fpath).then((s) => { | ||||
|       throw new Error('File ' + f + ' already exists.') | ||||
|     }).catch((err) => { | ||||
|       if (err.code === 'ENOENT') { | ||||
|         return f | ||||
|       } | ||||
|       throw err | ||||
|     }) | ||||
|   } | ||||
|  | ||||
| 		winston.info('[SERVER] Data and Repository directories are OK.'); | ||||
|  | ||||
| 		return; | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the uploads path. | ||||
| 	 * | ||||
| 	 * @return     {String}  The uploads path. | ||||
| 	 */ | ||||
| 	getUploadsPath() { | ||||
| 		return this._uploadsPath; | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the thumbnails folder path. | ||||
| 	 * | ||||
| 	 * @return     {String}  The thumbs path. | ||||
| 	 */ | ||||
| 	getThumbsPath() { | ||||
| 		return this._uploadsThumbsPath; | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Check if filename is valid and unique | ||||
| 	 * | ||||
| 	 * @param      {String}           f        The filename | ||||
| 	 * @param      {String}           fld      The containing folder | ||||
| 	 * @param      {boolean}          isImage  Indicates if image | ||||
| 	 * @return     {Promise<String>}  Promise of the accepted filename | ||||
| 	 */ | ||||
| 	validateUploadsFilename(f, fld, isImage) { | ||||
|  | ||||
| 		let fObj = path.parse(f); | ||||
| 		let fname = _.chain(fObj.name).trim().toLower().kebabCase().value().replace(/[^a-z0-9\-]+/g, ''); | ||||
| 		let fext = _.toLower(fObj.ext); | ||||
|  | ||||
| 		if(isImage && !_.includes(['.jpg', '.jpeg', '.png', '.gif', '.webp'], fext)) { | ||||
| 			fext = '.png'; | ||||
| 		} | ||||
|  | ||||
| 		f = fname + fext; | ||||
| 		let fpath = path.resolve(this._uploadsPath, fld, f); | ||||
|  | ||||
| 		return fs.statAsync(fpath).then((s) => { | ||||
| 			throw new Error('File ' + f + ' already exists.'); | ||||
| 		}).catch((err) => { | ||||
| 			if(err.code === 'ENOENT') { | ||||
| 				return f; | ||||
| 			} | ||||
| 			throw err; | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| }; | ||||
| } | ||||
|   | ||||
							
								
								
									
										500
									
								
								libs/markdown.js
									
									
									
									
									
								
							
							
						
						
									
										500
									
								
								libs/markdown.js
									
									
									
									
									
								
							| @@ -1,86 +1,85 @@ | ||||
| "use strict"; | ||||
| 'use strict' | ||||
|  | ||||
| var Promise = require('bluebird'), | ||||
| 	md = require('markdown-it'), | ||||
| 	mdEmoji = require('markdown-it-emoji'), | ||||
| 	mdTaskLists = require('markdown-it-task-lists'), | ||||
| 	mdAbbr = require('markdown-it-abbr'), | ||||
| 	mdAnchor = require('markdown-it-anchor'), | ||||
| 	mdFootnote = require('markdown-it-footnote'), | ||||
| 	mdExternalLinks = require('markdown-it-external-links'), | ||||
| 	mdExpandTabs = require('markdown-it-expand-tabs'), | ||||
| 	mdAttrs = require('markdown-it-attrs'), | ||||
| 	hljs = require('highlight.js'), | ||||
| 	cheerio = require('cheerio'), | ||||
| 	_ = require('lodash'), | ||||
| 	mdRemove = require('remove-markdown'); | ||||
| const md = require('markdown-it') | ||||
| const mdEmoji = require('markdown-it-emoji') | ||||
| const mdTaskLists = require('markdown-it-task-lists') | ||||
| const mdAbbr = require('markdown-it-abbr') | ||||
| const mdAnchor = require('markdown-it-anchor') | ||||
| const mdFootnote = require('markdown-it-footnote') | ||||
| const mdExternalLinks = require('markdown-it-external-links') | ||||
| const mdExpandTabs = require('markdown-it-expand-tabs') | ||||
| const mdAttrs = require('markdown-it-attrs') | ||||
| const hljs = require('highlight.js') | ||||
| const cheerio = require('cheerio') | ||||
| const _ = require('lodash') | ||||
| const mdRemove = require('remove-markdown') | ||||
|  | ||||
| // Load plugins | ||||
|  | ||||
| var mkdown = md({ | ||||
| 		html: true, | ||||
| 		linkify: true, | ||||
| 		typography: true, | ||||
| 		highlight(str, lang) { | ||||
| 			if (lang && hljs.getLanguage(lang)) { | ||||
| 				try { | ||||
| 					return '<pre class="hljs"><code>' + hljs.highlight(lang, str, true).value + '</code></pre>'; | ||||
| 				} catch (err) { | ||||
| 					return '<pre><code>' + str + '</code></pre>'; | ||||
| 				} | ||||
| 			} | ||||
| 			return '<pre><code>' + str + '</code></pre>'; | ||||
| 		} | ||||
| 	}) | ||||
| 	.use(mdEmoji) | ||||
| 	.use(mdTaskLists) | ||||
| 	.use(mdAbbr) | ||||
| 	.use(mdAnchor, { | ||||
| 		slugify: _.kebabCase, | ||||
| 		permalink: true, | ||||
| 		permalinkClass: 'toc-anchor', | ||||
| 		permalinkSymbol: '#', | ||||
| 		permalinkBefore: true | ||||
| 	}) | ||||
| 	.use(mdFootnote) | ||||
| 	.use(mdExternalLinks, { | ||||
| 		externalClassName: 'external-link', | ||||
| 		internalClassName: 'internal-link' | ||||
| 	}) | ||||
| 	.use(mdExpandTabs, { | ||||
| 		tabWidth: 4 | ||||
| 	}) | ||||
| 	.use(mdAttrs); | ||||
|   html: true, | ||||
|   linkify: true, | ||||
|   typography: true, | ||||
|   highlight (str, lang) { | ||||
|     if (lang && hljs.getLanguage(lang)) { | ||||
|       try { | ||||
|         return '<pre class="hljs"><code>' + hljs.highlight(lang, str, true).value + '</code></pre>' | ||||
|       } catch (err) { | ||||
|         return '<pre><code>' + str + '</code></pre>' | ||||
|       } | ||||
|     } | ||||
|     return '<pre><code>' + str + '</code></pre>' | ||||
|   } | ||||
| }) | ||||
|   .use(mdEmoji) | ||||
|   .use(mdTaskLists) | ||||
|   .use(mdAbbr) | ||||
|   .use(mdAnchor, { | ||||
|     slugify: _.kebabCase, | ||||
|     permalink: true, | ||||
|     permalinkClass: 'toc-anchor', | ||||
|     permalinkSymbol: '#', | ||||
|     permalinkBefore: true | ||||
|   }) | ||||
|   .use(mdFootnote) | ||||
|   .use(mdExternalLinks, { | ||||
|     externalClassName: 'external-link', | ||||
|     internalClassName: 'internal-link' | ||||
|   }) | ||||
|   .use(mdExpandTabs, { | ||||
|     tabWidth: 4 | ||||
|   }) | ||||
|   .use(mdAttrs) | ||||
|  | ||||
| // Rendering rules | ||||
|  | ||||
| mkdown.renderer.rules.emoji = function(token, idx) { | ||||
| 	return '<i class="twa twa-' + _.replace(token[idx].markup, /_/g, '-') + '"></i>'; | ||||
| }; | ||||
| mkdown.renderer.rules.emoji = function (token, idx) { | ||||
|   return '<i class="twa twa-' + _.replace(token[idx].markup, /_/g, '-') + '"></i>' | ||||
| } | ||||
|  | ||||
| // Video rules | ||||
|  | ||||
| const videoRules = [ | ||||
| 	{ | ||||
| 		selector: 'a.youtube', | ||||
| 		regexp: new RegExp(/(?:(?:youtu\.be\/|v\/|vi\/|u\/\w\/|embed\/)|(?:(?:watch)?\?v(?:i)?=|\&v(?:i)?=))([^#\&\?]*).*/, 'i'), | ||||
| 		output: '<iframe width="640" height="360" src="https://www.youtube.com/embed/{0}?rel=0" frameborder="0" allowfullscreen></iframe>' | ||||
| 	}, | ||||
| 	{ | ||||
| 		selector: 'a.vimeo', | ||||
| 		regexp: new RegExp(/vimeo.com\/(?:channels\/(?:\w+\/)?|groups\/(?:[^\/]*)\/videos\/|album\/(?:\d+)\/video\/|)(\d+)(?:$|\/|\?)/, 'i'), | ||||
| 		output: '<iframe src="https://player.vimeo.com/video/{0}" width="640" height="360" frameborder="0" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>' | ||||
| 	}, | ||||
| 	{ | ||||
| 		selector: 'a.dailymotion', | ||||
| 		regexp: new RegExp(/(?:dailymotion\.com(?:\/embed)?(?:\/video|\/hub)|dai\.ly)\/([0-9a-z]+)(?:[\-_0-9a-zA-Z]+(?:#video=)?([a-z0-9]+)?)?/, 'i'), | ||||
| 		output: '<iframe width="640" height="360" src="//www.dailymotion.com/embed/video/{0}?endscreen-enable=false" frameborder="0" allowfullscreen></iframe>' | ||||
| 	}, | ||||
| 	{ | ||||
| 		selector: 'a.video', | ||||
| 		regexp: false, | ||||
| 		output: '<video width="640" height="360" controls preload="metadata"><source src="{0}" type="video/mp4"></video>' | ||||
| 	} | ||||
|   { | ||||
|     selector: 'a.youtube', | ||||
|     regexp: new RegExp(/(?:(?:youtu\.be\/|v\/|vi\/|u\/\w\/|embed\/)|(?:(?:watch)?\?v(?:i)?=|&v(?:i)?=))([^#&?]*).*/, 'i'), | ||||
|     output: '<iframe width="640" height="360" src="https://www.youtube.com/embed/{0}?rel=0" frameborder="0" allowfullscreen></iframe>' | ||||
|   }, | ||||
|   { | ||||
|     selector: 'a.vimeo', | ||||
|     regexp: new RegExp(/vimeo.com\/(?:channels\/(?:\w+\/)?|groups\/(?:[^/]*)\/videos\/|album\/(?:\d+)\/video\/|)(\d+)(?:$|\/|\?)/, 'i'), | ||||
|     output: '<iframe src="https://player.vimeo.com/video/{0}" width="640" height="360" frameborder="0" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>' | ||||
|   }, | ||||
|   { | ||||
|     selector: 'a.dailymotion', | ||||
|     regexp: new RegExp(/(?:dailymotion\.com(?:\/embed)?(?:\/video|\/hub)|dai\.ly)\/([0-9a-z]+)(?:[-_0-9a-zA-Z]+(?:#video=)?([a-z0-9]+)?)?/, 'i'), | ||||
|     output: '<iframe width="640" height="360" src="//www.dailymotion.com/embed/video/{0}?endscreen-enable=false" frameborder="0" allowfullscreen></iframe>' | ||||
|   }, | ||||
|   { | ||||
|     selector: 'a.video', | ||||
|     regexp: false, | ||||
|     output: '<video width="640" height="360" controls preload="metadata"><source src="{0}" type="video/mp4"></video>' | ||||
|   } | ||||
| ] | ||||
|  | ||||
| /** | ||||
| @@ -90,81 +89,79 @@ const videoRules = [ | ||||
|  * @return     {Array}             TOC tree | ||||
|  */ | ||||
| const parseTree = (content) => { | ||||
|   let tokens = md().parse(content, {}) | ||||
|   let tocArray = [] | ||||
|  | ||||
| 	let tokens = md().parse(content, {}); | ||||
| 	let tocArray = []; | ||||
|   // -> Extract headings and their respective levels | ||||
|  | ||||
| 	//-> Extract headings and their respective levels | ||||
|   for (let i = 0; i < tokens.length; i++) { | ||||
|     if (tokens[i].type !== 'heading_close') { | ||||
|       continue | ||||
|     } | ||||
|  | ||||
| 	for (let i = 0; i < tokens.length; i++) { | ||||
| 		if (tokens[i].type !== "heading_close") { | ||||
| 			continue; | ||||
| 		} | ||||
|     const heading = tokens[i - 1] | ||||
|     const headingclose = tokens[i] | ||||
|  | ||||
| 		const heading = tokens[i - 1]; | ||||
| 		const heading_close = tokens[i]; | ||||
|     if (heading.type === 'inline') { | ||||
|       let content = '' | ||||
|       let anchor = '' | ||||
|       if (heading.children && heading.children[0].type === 'link_open') { | ||||
|         content = heading.children[1].content | ||||
|         anchor = _.kebabCase(content) | ||||
|       } else { | ||||
|         content = heading.content | ||||
|         anchor = _.kebabCase(heading.children.reduce((acc, t) => acc + t.content, '')) | ||||
|       } | ||||
|  | ||||
| 		if (heading.type === "inline") { | ||||
| 			let content = ""; | ||||
| 			let anchor = ""; | ||||
| 			if (heading.children && heading.children[0].type === "link_open") { | ||||
| 			 content = heading.children[1].content; | ||||
| 			 anchor = _.kebabCase(content); | ||||
| 			} else { | ||||
| 			 content = heading.content; | ||||
| 			 anchor = _.kebabCase(heading.children.reduce((acc, t) => acc + t.content, "")); | ||||
| 			} | ||||
|       tocArray.push({ | ||||
|         content, | ||||
|         anchor, | ||||
|         level: +headingclose.tag.substr(1, 1) | ||||
|       }) | ||||
|     } | ||||
|   } | ||||
|  | ||||
| 			tocArray.push({ | ||||
| 			 content, | ||||
| 			 anchor, | ||||
| 			 level: +heading_close.tag.substr(1, 1) | ||||
| 			}); | ||||
| 		} | ||||
| 	 } | ||||
|   // -> Exclude levels deeper than 2 | ||||
|  | ||||
| 	 //-> Exclude levels deeper than 2 | ||||
|   _.remove(tocArray, (n) => { return n.level > 2 }) | ||||
|  | ||||
| 	 _.remove(tocArray, (n) => { return n.level > 2; }); | ||||
|   // -> Build tree from flat array | ||||
|  | ||||
| 	 //-> Build tree from flat array | ||||
|  | ||||
| 	 return _.reduce(tocArray, (tree, v) => { | ||||
| 		let treeLength = tree.length - 1; | ||||
| 		if(v.level < 2) { | ||||
| 			tree.push({ | ||||
| 				content: v.content, | ||||
| 				anchor: v.anchor, | ||||
| 				nodes: [] | ||||
| 			}); | ||||
| 		} else { | ||||
| 			let lastNodeLevel = 1; | ||||
| 			let GetNodePath = (startPos) => { | ||||
| 				lastNodeLevel++; | ||||
| 				if(_.isEmpty(startPos)) { | ||||
| 					startPos = 'nodes'; | ||||
| 				} | ||||
| 				if(lastNodeLevel === v.level) { | ||||
| 					return startPos; | ||||
| 				} else { | ||||
| 					return GetNodePath(startPos + '[' + (_.at(tree[treeLength], startPos).length - 1) + '].nodes'); | ||||
| 				} | ||||
| 			}; | ||||
| 			let lastNodePath = GetNodePath(); | ||||
| 			let lastNode = _.get(tree[treeLength], lastNodePath); | ||||
| 			if(lastNode) { | ||||
| 				lastNode.push({ | ||||
| 					content: v.content, | ||||
| 					anchor: v.anchor, | ||||
| 					nodes: [] | ||||
| 				}); | ||||
| 				_.set(tree[treeLength], lastNodePath, lastNode); | ||||
| 			} | ||||
| 		} | ||||
| 		return tree; | ||||
| 	}, []); | ||||
|  | ||||
| }; | ||||
|   return _.reduce(tocArray, (tree, v) => { | ||||
|     let treeLength = tree.length - 1 | ||||
|     if (v.level < 2) { | ||||
|       tree.push({ | ||||
|         content: v.content, | ||||
|         anchor: v.anchor, | ||||
|         nodes: [] | ||||
|       }) | ||||
|     } else { | ||||
|       let lastNodeLevel = 1 | ||||
|       let GetNodePath = (startPos) => { | ||||
|         lastNodeLevel++ | ||||
|         if (_.isEmpty(startPos)) { | ||||
|           startPos = 'nodes' | ||||
|         } | ||||
|         if (lastNodeLevel === v.level) { | ||||
|           return startPos | ||||
|         } else { | ||||
|           return GetNodePath(startPos + '[' + (_.at(tree[treeLength], startPos).length - 1) + '].nodes') | ||||
|         } | ||||
|       } | ||||
|       let lastNodePath = GetNodePath() | ||||
|       let lastNode = _.get(tree[treeLength], lastNodePath) | ||||
|       if (lastNode) { | ||||
|         lastNode.push({ | ||||
|           content: v.content, | ||||
|           anchor: v.anchor, | ||||
|           nodes: [] | ||||
|         }) | ||||
|         _.set(tree[treeLength], lastNodePath, lastNode) | ||||
|       } | ||||
|     } | ||||
|     return tree | ||||
|   }, []) | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * Parse markdown content to HTML | ||||
| @@ -172,87 +169,85 @@ const parseTree = (content) => { | ||||
|  * @param      {String}    content  Markdown content | ||||
|  * @return     {String}  HTML formatted content | ||||
|  */ | ||||
| const parseContent = (content)  => { | ||||
| const parseContent = (content) => { | ||||
|   let output = mkdown.render(content) | ||||
|   let cr = cheerio.load(output) | ||||
|  | ||||
| 	let output = mkdown.render(content); | ||||
| 	let cr = cheerio.load(output); | ||||
|   // -> Check for empty first element | ||||
|  | ||||
| 	//-> Check for empty first element | ||||
|   let firstElm = cr.root().children().first()[0] | ||||
|   if (firstElm.type === 'tag' && firstElm.name === 'p') { | ||||
|     let firstElmChildren = firstElm.children | ||||
|     if (firstElmChildren.length < 1) { | ||||
|       firstElm.remove() | ||||
|     } else if (firstElmChildren.length === 1 && firstElmChildren[0].type === 'tag' && firstElmChildren[0].name === 'img') { | ||||
|       cr(firstElm).addClass('is-gapless') | ||||
|     } | ||||
|   } | ||||
|  | ||||
| 	let firstElm = cr.root().children().first()[0]; | ||||
| 	if(firstElm.type === 'tag' && firstElm.name === 'p') { | ||||
| 		let firstElmChildren = firstElm.children; | ||||
| 		if(firstElmChildren.length < 1) { | ||||
| 			firstElm.remove(); | ||||
| 		} else if(firstElmChildren.length === 1 && firstElmChildren[0].type === 'tag' && firstElmChildren[0].name === 'img') { | ||||
| 			cr(firstElm).addClass('is-gapless'); | ||||
| 		} | ||||
| 	} | ||||
|   // -> Remove links in headers | ||||
|  | ||||
| 	//-> Remove links in headers | ||||
|   cr('h1 > a:not(.toc-anchor), h2 > a:not(.toc-anchor), h3 > a:not(.toc-anchor)').each((i, elm) => { | ||||
|     let txtLink = cr(elm).text() | ||||
|     cr(elm).replaceWith(txtLink) | ||||
|   }) | ||||
|  | ||||
| 	cr('h1 > a:not(.toc-anchor), h2 > a:not(.toc-anchor), h3 > a:not(.toc-anchor)').each((i, elm) => { | ||||
| 		let txtLink = cr(elm).text(); | ||||
| 		cr(elm).replaceWith(txtLink); | ||||
| 	}); | ||||
|   // -> Re-attach blockquote styling classes to their parents | ||||
|  | ||||
| 	//-> Re-attach blockquote styling classes to their parents | ||||
| 	 | ||||
| 	cr.root().children('blockquote').each((i, elm) => { | ||||
| 		if(cr(elm).children().length > 0) { | ||||
| 			let bqLastChild = cr(elm).children().last()[0]; | ||||
| 			let bqLastChildClasses = cr(bqLastChild).attr('class'); | ||||
| 			if(bqLastChildClasses && bqLastChildClasses.length > 0) { | ||||
| 				cr(bqLastChild).removeAttr('class'); | ||||
| 				cr(elm).addClass(bqLastChildClasses); | ||||
| 			} | ||||
| 		} | ||||
| 	}); | ||||
|   cr.root().children('blockquote').each((i, elm) => { | ||||
|     if (cr(elm).children().length > 0) { | ||||
|       let bqLastChild = cr(elm).children().last()[0] | ||||
|       let bqLastChildClasses = cr(bqLastChild).attr('class') | ||||
|       if (bqLastChildClasses && bqLastChildClasses.length > 0) { | ||||
|         cr(bqLastChild).removeAttr('class') | ||||
|         cr(elm).addClass(bqLastChildClasses) | ||||
|       } | ||||
|     } | ||||
|   }) | ||||
|  | ||||
| 	//-> Enclose content below headers | ||||
|   // -> Enclose content below headers | ||||
|  | ||||
| 	cr('h2').each((i, elm) => { | ||||
| 		let subH2Content = cr(elm).nextUntil('h1, h2'); | ||||
| 		cr(elm).after('<div class="indent-h2"></div>'); | ||||
| 		let subH2Container = cr(elm).next('.indent-h2'); | ||||
| 		_.forEach(subH2Content, (ch) => { | ||||
| 			cr(subH2Container).append(ch); | ||||
| 		}); | ||||
| 	}); | ||||
|   cr('h2').each((i, elm) => { | ||||
|     let subH2Content = cr(elm).nextUntil('h1, h2') | ||||
|     cr(elm).after('<div class="indent-h2"></div>') | ||||
|     let subH2Container = cr(elm).next('.indent-h2') | ||||
|     _.forEach(subH2Content, (ch) => { | ||||
|       cr(subH2Container).append(ch) | ||||
|     }) | ||||
|   }) | ||||
|  | ||||
| 	cr('h3').each((i, elm) => { | ||||
| 		let subH3Content = cr(elm).nextUntil('h1, h2, h3'); | ||||
| 		cr(elm).after('<div class="indent-h3"></div>'); | ||||
| 		let subH3Container = cr(elm).next('.indent-h3'); | ||||
| 		_.forEach(subH3Content, (ch) => { | ||||
| 			cr(subH3Container).append(ch); | ||||
| 		}); | ||||
| 	}); | ||||
|   cr('h3').each((i, elm) => { | ||||
|     let subH3Content = cr(elm).nextUntil('h1, h2, h3') | ||||
|     cr(elm).after('<div class="indent-h3"></div>') | ||||
|     let subH3Container = cr(elm).next('.indent-h3') | ||||
|     _.forEach(subH3Content, (ch) => { | ||||
|       cr(subH3Container).append(ch) | ||||
|     }) | ||||
|   }) | ||||
|  | ||||
| 	// Replace video links with embeds | ||||
|   // Replace video links with embeds | ||||
|  | ||||
| 	_.forEach(videoRules, (vrule) => { | ||||
| 		cr(vrule.selector).each((i, elm) => { | ||||
| 			let originLink = cr(elm).attr('href'); | ||||
| 			if(vrule.regexp) { | ||||
| 				let vidMatches = originLink.match(vrule.regexp); | ||||
| 				if((vidMatches && _.isArray(vidMatches))) { | ||||
| 					vidMatches = _.filter(vidMatches, (f) => { | ||||
| 						return f && _.isString(f); | ||||
| 					}); | ||||
| 					originLink = _.last(vidMatches); | ||||
| 				} | ||||
| 			} | ||||
| 			let processedLink = _.replace(vrule.output, '{0}', originLink); | ||||
| 			cr(elm).replaceWith(processedLink); | ||||
| 		}); | ||||
| 	}); | ||||
|   _.forEach(videoRules, (vrule) => { | ||||
|     cr(vrule.selector).each((i, elm) => { | ||||
|       let originLink = cr(elm).attr('href') | ||||
|       if (vrule.regexp) { | ||||
|         let vidMatches = originLink.match(vrule.regexp) | ||||
|         if ((vidMatches && _.isArray(vidMatches))) { | ||||
|           vidMatches = _.filter(vidMatches, (f) => { | ||||
|             return f && _.isString(f) | ||||
|           }) | ||||
|           originLink = _.last(vidMatches) | ||||
|         } | ||||
|       } | ||||
|       let processedLink = _.replace(vrule.output, '{0}', originLink) | ||||
|       cr(elm).replaceWith(processedLink) | ||||
|     }) | ||||
|   }) | ||||
|  | ||||
| 	output = cr.html(); | ||||
|   output = cr.html() | ||||
|  | ||||
| 	return output; | ||||
|  | ||||
| }; | ||||
|   return output | ||||
| } | ||||
|  | ||||
| /** | ||||
|  * Parse meta-data tags from content | ||||
| @@ -261,58 +256,57 @@ const parseContent = (content)  => { | ||||
|  * @return     {Object}  Properties found in the content and their values | ||||
|  */ | ||||
| const parseMeta = (content) => { | ||||
|   let commentMeta = new RegExp('<!-- ?([a-zA-Z]+):(.*)-->', 'g') | ||||
|   let results = {} | ||||
|   let match | ||||
|   while ((match = commentMeta.exec(content)) !== null) { | ||||
|     results[_.toLower(match[1])] = _.trim(match[2]) | ||||
|   } | ||||
|  | ||||
| 	let commentMeta = new RegExp('<!-- ?([a-zA-Z]+):(.*)-->','g'); | ||||
| 	let results = {}, match; | ||||
| 	while(match = commentMeta.exec(content)) { | ||||
| 		results[_.toLower(match[1])] = _.trim(match[2]); | ||||
| 	} | ||||
|  | ||||
| 	return results; | ||||
|  | ||||
| }; | ||||
|   return results | ||||
| } | ||||
|  | ||||
| module.exports = { | ||||
|  | ||||
| 	/** | ||||
| 	 * Parse content and return all data | ||||
| 	 * | ||||
| 	 * @param      {String}  content  Markdown-formatted content | ||||
| 	 * @return     {Object}  Object containing meta, html and tree data | ||||
| 	 */ | ||||
| 	parse(content) { | ||||
| 		return { | ||||
| 			meta: parseMeta(content), | ||||
| 			html: parseContent(content), | ||||
| 			tree: parseTree(content) | ||||
| 		}; | ||||
| 	}, | ||||
|   /** | ||||
|    * Parse content and return all data | ||||
|    * | ||||
|    * @param      {String}  content  Markdown-formatted content | ||||
|    * @return     {Object}  Object containing meta, html and tree data | ||||
|    */ | ||||
|   parse (content) { | ||||
|     return { | ||||
|       meta: parseMeta(content), | ||||
|       html: parseContent(content), | ||||
|       tree: parseTree(content) | ||||
|     } | ||||
|   }, | ||||
|  | ||||
| 	parseContent, | ||||
| 	parseMeta, | ||||
| 	parseTree, | ||||
|   parseContent, | ||||
|   parseMeta, | ||||
|   parseTree, | ||||
|  | ||||
| 	/** | ||||
| 	 * Strips non-text elements from Markdown content | ||||
| 	 * | ||||
| 	 * @param      {String}  content  Markdown-formatted content | ||||
| 	 * @return     {String}  Text-only version | ||||
| 	 */ | ||||
| 	removeMarkdown(content) { | ||||
| 		return mdRemove(_.chain(content) | ||||
| 			.replace(/<!-- ?([a-zA-Z]+):(.*)-->/g, '') | ||||
| 			.replace(/```[^`]+```/g, '') | ||||
| 			.replace(/`[^`]+`/g, '') | ||||
| 			.replace(new RegExp('(?!mailto:)(?:(?:http|https|ftp)://)(?:\\S+(?::\\S*)?@)?(?:(?:(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}(?:\\.(?:[0-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))|(?:(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)(?:\\.(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)*(?:\\.(?:[a-z\\u00a1-\\uffff]{2,})))|localhost)(?::\\d{2,5})?(?:(/|\\?|#)[^\\s]*)?', 'g'), '') | ||||
| 			.replace(/\r?\n|\r/g, ' ') | ||||
| 			.deburr() | ||||
| 			.toLower() | ||||
| 			.replace(/(\b([^a-z]+)\b)/g, ' ') | ||||
| 			.replace(/[^a-z]+/g, ' ') | ||||
| 			.replace(/(\b(\w{1,2})\b(\W|$))/g, '') | ||||
| 			.replace(/\s\s+/g, ' ') | ||||
| 			.value() | ||||
| 		); | ||||
| 	} | ||||
|   /** | ||||
|    * Strips non-text elements from Markdown content | ||||
|    * | ||||
|    * @param      {String}  content  Markdown-formatted content | ||||
|    * @return     {String}  Text-only version | ||||
|    */ | ||||
|   removeMarkdown (content) { | ||||
|     return mdRemove(_.chain(content) | ||||
|       .replace(/<!-- ?([a-zA-Z]+):(.*)-->/g, '') | ||||
|       .replace(/```[^`]+```/g, '') | ||||
|       .replace(/`[^`]+`/g, '') | ||||
|       .replace(new RegExp('(?!mailto:)(?:(?:http|https|ftp)://)(?:\\S+(?::\\S*)?@)?(?:(?:(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}(?:\\.(?:[0-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))|(?:(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)(?:\\.(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)*(?:\\.(?:[a-z\\u00a1-\\uffff]{2,})))|localhost)(?::\\d{2,5})?(?:(/|\\?|#)[^\\s]*)?', 'g'), '') | ||||
|       .replace(/\r?\n|\r/g, ' ') | ||||
|       .deburr() | ||||
|       .toLower() | ||||
|       .replace(/(\b([^a-z]+)\b)/g, ' ') | ||||
|       .replace(/[^a-z]+/g, ' ') | ||||
|       .replace(/(\b(\w{1,2})\b(\W|$))/g, '') | ||||
|       .replace(/\s\s+/g, ' ') | ||||
|       .value() | ||||
|     ) | ||||
|   } | ||||
|  | ||||
| }; | ||||
| } | ||||
|   | ||||
| @@ -1,292 +1,255 @@ | ||||
| "use strict"; | ||||
| 'use strict' | ||||
|  | ||||
| var path = require('path'), | ||||
| 	Promise = require('bluebird'), | ||||
| 	fs = Promise.promisifyAll(require('fs-extra')), | ||||
| 	readChunk = require('read-chunk'), | ||||
| 	fileType = require('file-type'), | ||||
| 	mime = require('mime-types'), | ||||
| 	farmhash = require('farmhash'), | ||||
| 	moment = require('moment'), | ||||
| 	chokidar = require('chokidar'), | ||||
| 	sharp = require('sharp'), | ||||
| 	_ = require('lodash'); | ||||
| const path = require('path') | ||||
| const Promise = require('bluebird') | ||||
| const fs = Promise.promisifyAll(require('fs-extra')) | ||||
| const readChunk = require('read-chunk') | ||||
| const fileType = require('file-type') | ||||
| const mime = require('mime-types') | ||||
| const farmhash = require('farmhash') | ||||
| const chokidar = require('chokidar') | ||||
| const sharp = require('sharp') | ||||
| const _ = require('lodash') | ||||
|  | ||||
| /** | ||||
|  * Uploads - Agent | ||||
|  */ | ||||
| module.exports = { | ||||
|  | ||||
| 	_uploadsPath: './repo/uploads', | ||||
| 	_uploadsThumbsPath: './data/thumbs', | ||||
|  | ||||
| 	_watcher: null, | ||||
|  | ||||
| 	/** | ||||
| 	 * Initialize Uploads model | ||||
| 	 * | ||||
| 	 * @return     {Object}  Uploads model instance | ||||
| 	 */ | ||||
| 	init() { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		self._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads'); | ||||
| 		self._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs'); | ||||
|  | ||||
| 		// Disable Sharp cache, as it cause file locks issues when deleting uploads. | ||||
| 		sharp.cache(false); | ||||
|  | ||||
| 		return self; | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Watch the uploads folder for changes | ||||
| 	 *  | ||||
| 	 * @return     {Void}  Void | ||||
| 	 */ | ||||
| 	watch() { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		self._watcher = chokidar.watch(self._uploadsPath, { | ||||
| 			persistent: true, | ||||
| 			ignoreInitial: true, | ||||
| 			cwd: self._uploadsPath, | ||||
| 			depth: 1, | ||||
| 			awaitWriteFinish: true | ||||
| 		}); | ||||
|  | ||||
| 		//-> Add new upload file | ||||
|  | ||||
| 		self._watcher.on('add', (p) => { | ||||
|  | ||||
| 			let pInfo = self.parseUploadsRelPath(p); | ||||
| 			return self.processFile(pInfo.folder, pInfo.filename).then((mData) => { | ||||
| 				return db.UplFile.findByIdAndUpdate(mData._id, mData, { upsert: true }); | ||||
| 			}).then(() => { | ||||
| 				return git.commitUploads('Uploaded ' + p); | ||||
| 			}); | ||||
|  | ||||
| 		}); | ||||
|  | ||||
| 		//-> Remove upload file | ||||
|  | ||||
| 		self._watcher.on('unlink', (p) => { | ||||
|  | ||||
| 			let pInfo = self.parseUploadsRelPath(p); | ||||
| 			return git.commitUploads('Deleted/Renamed ' + p); | ||||
|  | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Initial Uploads scan | ||||
| 	 * | ||||
| 	 * @return     {Promise<Void>}  Promise of the scan operation | ||||
| 	 */ | ||||
| 	initialScan() { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		return fs.readdirAsync(self._uploadsPath).then((ls) => { | ||||
|  | ||||
| 			// Get all folders | ||||
|  | ||||
| 			return Promise.map(ls, (f) => { | ||||
| 				return fs.statAsync(path.join(self._uploadsPath, f)).then((s) => { return { filename: f, stat: s }; }); | ||||
| 			}).filter((s) => { return s.stat.isDirectory(); }).then((arrDirs) => { | ||||
|  | ||||
| 				let folderNames = _.map(arrDirs, 'filename'); | ||||
| 				folderNames.unshift(''); | ||||
|  | ||||
| 				// Add folders to DB | ||||
|  | ||||
| 				return db.UplFolder.remove({}).then(() => { | ||||
| 					return db.UplFolder.insertMany(_.map(folderNames, (f) => { | ||||
| 						return { | ||||
| 							_id: 'f:' + f, | ||||
| 							name: f | ||||
| 						}; | ||||
| 					})); | ||||
| 				}).then(() => { | ||||
|  | ||||
| 					// Travel each directory and scan files | ||||
|  | ||||
| 					let allFiles = []; | ||||
|  | ||||
| 					return Promise.map(folderNames, (fldName) => { | ||||
|  | ||||
| 						let fldPath = path.join(self._uploadsPath, fldName); | ||||
| 						return fs.readdirAsync(fldPath).then((fList) => { | ||||
| 							return Promise.map(fList, (f) => { | ||||
| 								return upl.processFile(fldName, f).then((mData) => { | ||||
| 									if(mData) { | ||||
| 										allFiles.push(mData); | ||||
| 									} | ||||
| 									return true; | ||||
| 								}); | ||||
| 							}, {concurrency: 3}); | ||||
| 						}); | ||||
| 					}, {concurrency: 1}).finally(() => { | ||||
|  | ||||
| 						// Add files to DB | ||||
|  | ||||
| 						return db.UplFile.remove({}).then(() => { | ||||
| 							if(_.isArray(allFiles) && allFiles.length > 0) { | ||||
| 								return db.UplFile.insertMany(allFiles); | ||||
| 							} else { | ||||
| 								return true; | ||||
| 							} | ||||
| 						}); | ||||
|  | ||||
| 					}); | ||||
|  | ||||
| 				}); | ||||
| 				 | ||||
| 			}); | ||||
|  | ||||
| 		}).then(() => { | ||||
|  | ||||
| 			// Watch for new changes | ||||
|  | ||||
| 			return upl.watch(); | ||||
|  | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Parse relative Uploads path | ||||
| 	 * | ||||
| 	 * @param      {String}  f       Relative Uploads path | ||||
| 	 * @return     {Object}  Parsed path (folder and filename) | ||||
| 	 */ | ||||
| 	parseUploadsRelPath(f) { | ||||
|  | ||||
| 		let fObj = path.parse(f); | ||||
| 		return { | ||||
| 			folder: fObj.dir, | ||||
| 			filename: fObj.base | ||||
| 		}; | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Get metadata from file and generate thumbnails if necessary | ||||
| 	 * | ||||
| 	 * @param      {String}  fldName  The folder name | ||||
| 	 * @param      {String}  f        The filename | ||||
| 	 * @return     {Promise<Object>}  Promise of the file metadata | ||||
| 	 */ | ||||
| 	processFile(fldName, f) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		let fldPath = path.join(self._uploadsPath, fldName); | ||||
| 		let fPath = path.join(fldPath, f); | ||||
| 		let fPathObj = path.parse(fPath); | ||||
| 		let fUid = farmhash.fingerprint32(fldName + '/' + f); | ||||
|  | ||||
| 		return fs.statAsync(fPath).then((s) => { | ||||
|  | ||||
| 			if(!s.isFile()) { return false; } | ||||
|  | ||||
| 			// Get MIME info | ||||
|  | ||||
| 			let mimeInfo = fileType(readChunk.sync(fPath, 0, 262)); | ||||
| 			if(_.isNil(mimeInfo)) { | ||||
| 				mimeInfo = { | ||||
| 					mime: mime.lookup(fPathObj.ext) || 'application/octet-stream' | ||||
| 				}; | ||||
| 			} | ||||
|  | ||||
| 			// Images | ||||
|  | ||||
| 			if(s.size < 3145728) { // ignore files larger than 3MB | ||||
| 				if(_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/webp'], mimeInfo.mime)) { | ||||
| 					return self.getImageMetadata(fPath).then((mImgData) => { | ||||
|  | ||||
| 						let cacheThumbnailPath = path.parse(path.join(self._uploadsThumbsPath, fUid + '.png')); | ||||
| 						let cacheThumbnailPathStr = path.format(cacheThumbnailPath); | ||||
|  | ||||
| 						let mData = { | ||||
| 							_id: fUid, | ||||
| 							category: 'image', | ||||
| 							mime: mimeInfo.mime, | ||||
| 							extra: _.pick(mImgData, ['format', 'width', 'height', 'density', 'hasAlpha', 'orientation']), | ||||
| 							folder: 'f:' + fldName, | ||||
| 							filename: f, | ||||
| 							basename: fPathObj.name, | ||||
| 							filesize: s.size | ||||
| 						}; | ||||
|  | ||||
| 						// Generate thumbnail | ||||
|  | ||||
| 						return fs.statAsync(cacheThumbnailPathStr).then((st) => { | ||||
| 							return st.isFile(); | ||||
| 						}).catch((err) => { | ||||
| 							return false; | ||||
| 						}).then((thumbExists) => { | ||||
|  | ||||
| 							return (thumbExists) ? mData : fs.ensureDirAsync(cacheThumbnailPath.dir).then(() => { | ||||
| 								return self.generateThumbnail(fPath, cacheThumbnailPathStr); | ||||
| 							}).return(mData); | ||||
|  | ||||
| 						}); | ||||
|  | ||||
| 					}); | ||||
| 				} | ||||
| 			} | ||||
|  | ||||
| 			// Other Files | ||||
| 			 | ||||
| 			return { | ||||
| 				_id: fUid, | ||||
| 				category: 'binary', | ||||
| 				mime: mimeInfo.mime, | ||||
| 				folder: 'f:' + fldName, | ||||
| 				filename: f, | ||||
| 				basename: fPathObj.name, | ||||
| 				filesize: s.size | ||||
| 			}; | ||||
|  | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Generate thumbnail of image | ||||
| 	 * | ||||
| 	 * @param      {String}           sourcePath  The source path | ||||
| 	 * @param      {String}           destPath    The destination path | ||||
| 	 * @return     {Promise<Object>}  Promise returning the resized image info | ||||
| 	 */ | ||||
| 	generateThumbnail(sourcePath, destPath) { | ||||
|  | ||||
| 		return sharp(sourcePath) | ||||
| 						.withoutEnlargement() | ||||
| 						.resize(150,150) | ||||
| 						.background('white') | ||||
| 						.embed() | ||||
| 						.flatten() | ||||
| 						.toFormat('png') | ||||
| 						.toFile(destPath); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the image metadata. | ||||
| 	 * | ||||
| 	 * @param      {String}  sourcePath  The source path | ||||
| 	 * @return     {Object}  The image metadata. | ||||
| 	 */ | ||||
| 	getImageMetadata(sourcePath) { | ||||
|  | ||||
| 		return sharp(sourcePath).metadata(); | ||||
|  | ||||
| 	} | ||||
|  | ||||
| }; | ||||
|   _uploadsPath: './repo/uploads', | ||||
|   _uploadsThumbsPath: './data/thumbs', | ||||
|  | ||||
|   _watcher: null, | ||||
|  | ||||
|   /** | ||||
|    * Initialize Uploads model | ||||
|    * | ||||
|    * @return     {Object}  Uploads model instance | ||||
|    */ | ||||
|   init () { | ||||
|     let self = this | ||||
|  | ||||
|     self._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads') | ||||
|     self._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs') | ||||
|  | ||||
|     // Disable Sharp cache, as it cause file locks issues when deleting uploads. | ||||
|     sharp.cache(false) | ||||
|  | ||||
|     return self | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Watch the uploads folder for changes | ||||
|    * | ||||
|    * @return     {Void}  Void | ||||
|    */ | ||||
|   watch () { | ||||
|     let self = this | ||||
|  | ||||
|     self._watcher = chokidar.watch(self._uploadsPath, { | ||||
|       persistent: true, | ||||
|       ignoreInitial: true, | ||||
|       cwd: self._uploadsPath, | ||||
|       depth: 1, | ||||
|       awaitWriteFinish: true | ||||
|     }) | ||||
|  | ||||
|     // -> Add new upload file | ||||
|  | ||||
|     self._watcher.on('add', (p) => { | ||||
|       let pInfo = self.parseUploadsRelPath(p) | ||||
|       return self.processFile(pInfo.folder, pInfo.filename).then((mData) => { | ||||
|         return db.UplFile.findByIdAndUpdate(mData._id, mData, { upsert: true }) | ||||
|       }).then(() => { | ||||
|         return git.commitUploads('Uploaded ' + p) | ||||
|       }) | ||||
|     }) | ||||
|  | ||||
|     // -> Remove upload file | ||||
|  | ||||
|     self._watcher.on('unlink', (p) => { | ||||
|       return git.commitUploads('Deleted/Renamed ' + p) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Initial Uploads scan | ||||
|    * | ||||
|    * @return     {Promise<Void>}  Promise of the scan operation | ||||
|    */ | ||||
|   initialScan () { | ||||
|     let self = this | ||||
|  | ||||
|     return fs.readdirAsync(self._uploadsPath).then((ls) => { | ||||
|       // Get all folders | ||||
|  | ||||
|       return Promise.map(ls, (f) => { | ||||
|         return fs.statAsync(path.join(self._uploadsPath, f)).then((s) => { return { filename: f, stat: s } }) | ||||
|       }).filter((s) => { return s.stat.isDirectory() }).then((arrDirs) => { | ||||
|         let folderNames = _.map(arrDirs, 'filename') | ||||
|         folderNames.unshift('') | ||||
|  | ||||
|         // Add folders to DB | ||||
|  | ||||
|         return db.UplFolder.remove({}).then(() => { | ||||
|           return db.UplFolder.insertMany(_.map(folderNames, (f) => { | ||||
|             return { | ||||
|               _id: 'f:' + f, | ||||
|               name: f | ||||
|             } | ||||
|           })) | ||||
|         }).then(() => { | ||||
|           // Travel each directory and scan files | ||||
|  | ||||
|           let allFiles = [] | ||||
|  | ||||
|           return Promise.map(folderNames, (fldName) => { | ||||
|             let fldPath = path.join(self._uploadsPath, fldName) | ||||
|             return fs.readdirAsync(fldPath).then((fList) => { | ||||
|               return Promise.map(fList, (f) => { | ||||
|                 return upl.processFile(fldName, f).then((mData) => { | ||||
|                   if (mData) { | ||||
|                     allFiles.push(mData) | ||||
|                   } | ||||
|                   return true | ||||
|                 }) | ||||
|               }, {concurrency: 3}) | ||||
|             }) | ||||
|           }, {concurrency: 1}).finally(() => { | ||||
|             // Add files to DB | ||||
|  | ||||
|             return db.UplFile.remove({}).then(() => { | ||||
|               if (_.isArray(allFiles) && allFiles.length > 0) { | ||||
|                 return db.UplFile.insertMany(allFiles) | ||||
|               } else { | ||||
|                 return true | ||||
|               } | ||||
|             }) | ||||
|           }) | ||||
|         }) | ||||
|       }) | ||||
|     }).then(() => { | ||||
|       // Watch for new changes | ||||
|  | ||||
|       return upl.watch() | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Parse relative Uploads path | ||||
|    * | ||||
|    * @param      {String}  f       Relative Uploads path | ||||
|    * @return     {Object}  Parsed path (folder and filename) | ||||
|    */ | ||||
|   parseUploadsRelPath (f) { | ||||
|     let fObj = path.parse(f) | ||||
|     return { | ||||
|       folder: fObj.dir, | ||||
|       filename: fObj.base | ||||
|     } | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Get metadata from file and generate thumbnails if necessary | ||||
|    * | ||||
|    * @param      {String}  fldName  The folder name | ||||
|    * @param      {String}  f        The filename | ||||
|    * @return     {Promise<Object>}  Promise of the file metadata | ||||
|    */ | ||||
|   processFile (fldName, f) { | ||||
|     let self = this | ||||
|  | ||||
|     let fldPath = path.join(self._uploadsPath, fldName) | ||||
|     let fPath = path.join(fldPath, f) | ||||
|     let fPathObj = path.parse(fPath) | ||||
|     let fUid = farmhash.fingerprint32(fldName + '/' + f) | ||||
|  | ||||
|     return fs.statAsync(fPath).then((s) => { | ||||
|       if (!s.isFile()) { return false } | ||||
|  | ||||
|       // Get MIME info | ||||
|  | ||||
|       let mimeInfo = fileType(readChunk.sync(fPath, 0, 262)) | ||||
|       if (_.isNil(mimeInfo)) { | ||||
|         mimeInfo = { | ||||
|           mime: mime.lookup(fPathObj.ext) || 'application/octet-stream' | ||||
|         } | ||||
|       } | ||||
|  | ||||
|       // Images | ||||
|  | ||||
|       if (s.size < 3145728) { // ignore files larger than 3MB | ||||
|         if (_.includes(['image/png', 'image/jpeg', 'image/gif', 'image/webp'], mimeInfo.mime)) { | ||||
|           return self.getImageMetadata(fPath).then((mImgData) => { | ||||
|             let cacheThumbnailPath = path.parse(path.join(self._uploadsThumbsPath, fUid + '.png')) | ||||
|             let cacheThumbnailPathStr = path.format(cacheThumbnailPath) | ||||
|  | ||||
|             let mData = { | ||||
|               _id: fUid, | ||||
|               category: 'image', | ||||
|               mime: mimeInfo.mime, | ||||
|               extra: _.pick(mImgData, ['format', 'width', 'height', 'density', 'hasAlpha', 'orientation']), | ||||
|               folder: 'f:' + fldName, | ||||
|               filename: f, | ||||
|               basename: fPathObj.name, | ||||
|               filesize: s.size | ||||
|             } | ||||
|  | ||||
|             // Generate thumbnail | ||||
|  | ||||
|             return fs.statAsync(cacheThumbnailPathStr).then((st) => { | ||||
|               return st.isFile() | ||||
|             }).catch((err) => { // eslint-disable-line handle-callback-err | ||||
|               return false | ||||
|             }).then((thumbExists) => { | ||||
|               return (thumbExists) ? mData : fs.ensureDirAsync(cacheThumbnailPath.dir).then(() => { | ||||
|                 return self.generateThumbnail(fPath, cacheThumbnailPathStr) | ||||
|               }).return(mData) | ||||
|             }) | ||||
|           }) | ||||
|         } | ||||
|       } | ||||
|  | ||||
|       // Other Files | ||||
|  | ||||
|       return { | ||||
|         _id: fUid, | ||||
|         category: 'binary', | ||||
|         mime: mimeInfo.mime, | ||||
|         folder: 'f:' + fldName, | ||||
|         filename: f, | ||||
|         basename: fPathObj.name, | ||||
|         filesize: s.size | ||||
|       } | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Generate thumbnail of image | ||||
|    * | ||||
|    * @param      {String}           sourcePath  The source path | ||||
|    * @param      {String}           destPath    The destination path | ||||
|    * @return     {Promise<Object>}  Promise returning the resized image info | ||||
|    */ | ||||
|   generateThumbnail (sourcePath, destPath) { | ||||
|     return sharp(sourcePath) | ||||
|             .withoutEnlargement() | ||||
|             .resize(150, 150) | ||||
|             .background('white') | ||||
|             .embed() | ||||
|             .flatten() | ||||
|             .toFormat('png') | ||||
|             .toFile(destPath) | ||||
|   }, | ||||
|  | ||||
|   /** | ||||
|    * Gets the image metadata. | ||||
|    * | ||||
|    * @param      {String}  sourcePath  The source path | ||||
|    * @return     {Object}  The image metadata. | ||||
|    */ | ||||
|   getImageMetadata (sourcePath) { | ||||
|     return sharp(sourcePath).metadata() | ||||
|   } | ||||
|  | ||||
| } | ||||
|   | ||||
							
								
								
									
										498
									
								
								libs/uploads.js
									
									
									
									
									
								
							
							
						
						
									
										498
									
								
								libs/uploads.js
									
									
									
									
									
								
							| @@ -1,308 +1,280 @@ | ||||
| "use strict"; | ||||
| 'use strict' | ||||
|  | ||||
| const path = require('path'), | ||||
| 			Promise = require('bluebird'), | ||||
| 			fs = Promise.promisifyAll(require('fs-extra')), | ||||
| 			multer  = require('multer'), | ||||
| 			request = require('request'), | ||||
| 			url = require('url'), | ||||
| 			farmhash = require('farmhash'), | ||||
| 			_ = require('lodash'); | ||||
| const path = require('path') | ||||
| const Promise = require('bluebird') | ||||
| const fs = Promise.promisifyAll(require('fs-extra')) | ||||
| const request = require('request') | ||||
| const url = require('url') | ||||
| const farmhash = require('farmhash') | ||||
| const _ = require('lodash') | ||||
|  | ||||
| var regFolderName = new RegExp("^[a-z0-9][a-z0-9\-]*[a-z0-9]$"); | ||||
| const maxDownloadFileSize = 3145728; // 3 MB | ||||
| var regFolderName = new RegExp('^[a-z0-9][a-z0-9-]*[a-z0-9]$') | ||||
| const maxDownloadFileSize = 3145728 // 3 MB | ||||
|  | ||||
| /** | ||||
|  * Uploads | ||||
|  */ | ||||
| module.exports = { | ||||
|  | ||||
| 	_uploadsPath: './repo/uploads', | ||||
| 	_uploadsThumbsPath: './data/thumbs', | ||||
|   _uploadsPath: './repo/uploads', | ||||
|   _uploadsThumbsPath: './data/thumbs', | ||||
|  | ||||
| 	/** | ||||
| 	 * Initialize Local Data Storage model | ||||
| 	 * | ||||
| 	 * @return     {Object}  Uploads model instance | ||||
| 	 */ | ||||
| 	init() { | ||||
|   /** | ||||
|    * Initialize Local Data Storage model | ||||
|    * | ||||
|    * @return     {Object}  Uploads model instance | ||||
|    */ | ||||
|   init () { | ||||
|     this._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads') | ||||
|     this._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs') | ||||
|  | ||||
| 		this._uploadsPath = path.resolve(ROOTPATH, appconfig.paths.repo, 'uploads'); | ||||
| 		this._uploadsThumbsPath = path.resolve(ROOTPATH, appconfig.paths.data, 'thumbs'); | ||||
|     return this | ||||
|   }, | ||||
|  | ||||
| 		return this; | ||||
|   /** | ||||
|    * Gets the thumbnails folder path. | ||||
|    * | ||||
|    * @return     {String}  The thumbs path. | ||||
|    */ | ||||
|   getThumbsPath () { | ||||
|     return this._uploadsThumbsPath | ||||
|   }, | ||||
|  | ||||
| 	}, | ||||
|   /** | ||||
|    * Gets the uploads folders. | ||||
|    * | ||||
|    * @return     {Array<String>}  The uploads folders. | ||||
|    */ | ||||
|   getUploadsFolders () { | ||||
|     return db.UplFolder.find({}, 'name').sort('name').exec().then((results) => { | ||||
|       return (results) ? _.map(results, 'name') : [{ name: '' }] | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the thumbnails folder path. | ||||
| 	 * | ||||
| 	 * @return     {String}  The thumbs path. | ||||
| 	 */ | ||||
| 	getThumbsPath() { | ||||
| 		return this._uploadsThumbsPath; | ||||
| 	}, | ||||
|   /** | ||||
|    * Creates an uploads folder. | ||||
|    * | ||||
|    * @param      {String}  folderName  The folder name | ||||
|    * @return     {Promise}  Promise of the operation | ||||
|    */ | ||||
|   createUploadsFolder (folderName) { | ||||
|     let self = this | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the uploads folders. | ||||
| 	 * | ||||
| 	 * @return     {Array<String>}  The uploads folders. | ||||
| 	 */ | ||||
| 	getUploadsFolders() { | ||||
| 		return db.UplFolder.find({}, 'name').sort('name').exec().then((results) => { | ||||
| 			return (results) ? _.map(results, 'name') : [{ name: '' }]; | ||||
| 		}); | ||||
| 	}, | ||||
|     folderName = _.kebabCase(_.trim(folderName)) | ||||
|  | ||||
| 	/** | ||||
| 	 * Creates an uploads folder. | ||||
| 	 * | ||||
| 	 * @param      {String}  folderName  The folder name | ||||
| 	 * @return     {Promise}  Promise of the operation | ||||
| 	 */ | ||||
| 	createUploadsFolder(folderName) { | ||||
|     if (_.isEmpty(folderName) || !regFolderName.test(folderName)) { | ||||
|       return Promise.resolve(self.getUploadsFolders()) | ||||
|     } | ||||
|  | ||||
| 		let self = this; | ||||
|     return fs.ensureDirAsync(path.join(self._uploadsPath, folderName)).then(() => { | ||||
|       return db.UplFolder.findOneAndUpdate({ | ||||
|         _id: 'f:' + folderName | ||||
|       }, { | ||||
|         name: folderName | ||||
|       }, { | ||||
|         upsert: true | ||||
|       }) | ||||
|     }).then(() => { | ||||
|       return self.getUploadsFolders() | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 		folderName = _.kebabCase(_.trim(folderName)); | ||||
|   /** | ||||
|    * Check if folder is valid and exists | ||||
|    * | ||||
|    * @param      {String}  folderName  The folder name | ||||
|    * @return     {Boolean}   True if valid | ||||
|    */ | ||||
|   validateUploadsFolder (folderName) { | ||||
|     return db.UplFolder.findOne({ name: folderName }).then((f) => { | ||||
|       return (f) ? path.resolve(this._uploadsPath, folderName) : false | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 		if(_.isEmpty(folderName) || !regFolderName.test(folderName)) { | ||||
| 			return Promise.resolve(self.getUploadsFolders()); | ||||
| 		} | ||||
|   /** | ||||
|    * Adds one or more uploads files. | ||||
|    * | ||||
|    * @param      {Array<Object>}  arrFiles  The uploads files | ||||
|    * @return     {Void}  Void | ||||
|    */ | ||||
|   addUploadsFiles (arrFiles) { | ||||
|     if (_.isArray(arrFiles) || _.isPlainObject(arrFiles)) { | ||||
|       // this._uploadsDb.Files.insert(arrFiles); | ||||
|     } | ||||
|     return | ||||
|   }, | ||||
|  | ||||
| 		return fs.ensureDirAsync(path.join(self._uploadsPath, folderName)).then(() => { | ||||
| 			return db.UplFolder.findOneAndUpdate({ | ||||
| 				_id: 'f:' + folderName | ||||
| 			}, { | ||||
| 				name: folderName | ||||
| 			}, { | ||||
| 				upsert: true | ||||
| 			}); | ||||
| 		}).then(() => { | ||||
| 			return self.getUploadsFolders(); | ||||
| 		}); | ||||
|   /** | ||||
|    * Gets the uploads files. | ||||
|    * | ||||
|    * @param      {String}  cat     Category type | ||||
|    * @param      {String}  fld     Folder | ||||
|    * @return     {Array<Object>}  The files matching the query | ||||
|    */ | ||||
|   getUploadsFiles (cat, fld) { | ||||
|     return db.UplFile.find({ | ||||
|       category: cat, | ||||
|       folder: 'f:' + fld | ||||
|     }).sort('filename').exec() | ||||
|   }, | ||||
|  | ||||
| 	}, | ||||
|   /** | ||||
|    * Deletes an uploads file. | ||||
|    * | ||||
|    * @param      {string}   uid     The file unique ID | ||||
|    * @return     {Promise}  Promise of the operation | ||||
|    */ | ||||
|   deleteUploadsFile (uid) { | ||||
|     let self = this | ||||
|  | ||||
| 	/** | ||||
| 	 * Check if folder is valid and exists | ||||
| 	 * | ||||
| 	 * @param      {String}  folderName  The folder name | ||||
| 	 * @return     {Boolean}   True if valid | ||||
| 	 */ | ||||
| 	validateUploadsFolder(folderName) { | ||||
|     return db.UplFile.findOneAndRemove({ _id: uid }).then((f) => { | ||||
|       if (f) { | ||||
|         return self.deleteUploadsFileTry(f, 0) | ||||
|       } | ||||
|       return true | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 		return db.UplFolder.findOne({ name: folderName }).then((f) => { | ||||
| 			return (f) ? path.resolve(this._uploadsPath, folderName) : false; | ||||
| 		}); | ||||
|   deleteUploadsFileTry (f, attempt) { | ||||
|     let self = this | ||||
|  | ||||
| 	}, | ||||
|     let fFolder = (f.folder && f.folder !== 'f:') ? f.folder.slice(2) : './' | ||||
|  | ||||
| 	/** | ||||
| 	 * Adds one or more uploads files. | ||||
| 	 * | ||||
| 	 * @param      {Array<Object>}  arrFiles  The uploads files | ||||
| 	 * @return     {Void}  Void | ||||
| 	 */ | ||||
| 	addUploadsFiles(arrFiles) { | ||||
| 		if(_.isArray(arrFiles) || _.isPlainObject(arrFiles)) { | ||||
| 			//this._uploadsDb.Files.insert(arrFiles); | ||||
| 		} | ||||
| 		return; | ||||
| 	}, | ||||
|     return Promise.join( | ||||
|       fs.removeAsync(path.join(self._uploadsThumbsPath, f._id + '.png')), | ||||
|       fs.removeAsync(path.resolve(self._uploadsPath, fFolder, f.filename)) | ||||
|     ).catch((err) => { | ||||
|       if (err.code === 'EBUSY' && attempt < 5) { | ||||
|         return Promise.delay(100).then(() => { | ||||
|           return self.deleteUploadsFileTry(f, attempt + 1) | ||||
|         }) | ||||
|       } else { | ||||
|         winston.warn('Unable to delete uploads file ' + f.filename + '. File is locked by another process and multiple attempts failed.') | ||||
|         return true | ||||
|       } | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 	/** | ||||
| 	 * Gets the uploads files. | ||||
| 	 * | ||||
| 	 * @param      {String}  cat     Category type | ||||
| 	 * @param      {String}  fld     Folder | ||||
| 	 * @return     {Array<Object>}  The files matching the query | ||||
| 	 */ | ||||
| 	getUploadsFiles(cat, fld) { | ||||
|   /** | ||||
|    * Downloads a file from url. | ||||
|    * | ||||
|    * @param      {String}   fFolder  The folder | ||||
|    * @param      {String}   fUrl     The full URL | ||||
|    * @return     {Promise}  Promise of the operation | ||||
|    */ | ||||
|   downloadFromUrl (fFolder, fUrl) { | ||||
|     let fUrlObj = url.parse(fUrl) | ||||
|     let fUrlFilename = _.last(_.split(fUrlObj.pathname, '/')) | ||||
|     let destFolder = _.chain(fFolder).trim().toLower().value() | ||||
|  | ||||
| 		return db.UplFile.find({ | ||||
| 			category: cat, | ||||
| 			folder: 'f:' + fld | ||||
| 		}).sort('filename').exec(); | ||||
|     return upl.validateUploadsFolder(destFolder).then((destFolderPath) => { | ||||
|       if (!destFolderPath) { | ||||
|         return Promise.reject(new Error('Invalid Folder')) | ||||
|       } | ||||
|  | ||||
| 	}, | ||||
|       return lcdata.validateUploadsFilename(fUrlFilename, destFolder).then((destFilename) => { | ||||
|         let destFilePath = path.resolve(destFolderPath, destFilename) | ||||
|  | ||||
| 	/** | ||||
| 	 * Deletes an uploads file. | ||||
| 	 * | ||||
| 	 * @param      {string}   uid     The file unique ID | ||||
| 	 * @return     {Promise}  Promise of the operation | ||||
| 	 */ | ||||
| 	deleteUploadsFile(uid) { | ||||
|         return new Promise((resolve, reject) => { | ||||
|           let rq = request({ | ||||
|             url: fUrl, | ||||
|             method: 'GET', | ||||
|             followRedirect: true, | ||||
|             maxRedirects: 5, | ||||
|             timeout: 10000 | ||||
|           }) | ||||
|  | ||||
| 		let self = this; | ||||
|           let destFileStream = fs.createWriteStream(destFilePath) | ||||
|           let curFileSize = 0 | ||||
|  | ||||
| 		return db.UplFile.findOneAndRemove({ _id: uid }).then((f) => { | ||||
| 			if(f) { | ||||
| 				return self.deleteUploadsFileTry(f, 0); | ||||
| 			} | ||||
| 			return true; | ||||
| 		}); | ||||
| 	}, | ||||
|           rq.on('data', (data) => { | ||||
|             curFileSize += data.length | ||||
|             if (curFileSize > maxDownloadFileSize) { | ||||
|               rq.abort() | ||||
|               destFileStream.destroy() | ||||
|               fs.remove(destFilePath) | ||||
|               reject(new Error('Remote file is too large!')) | ||||
|             } | ||||
|           }).on('error', (err) => { | ||||
|             destFileStream.destroy() | ||||
|             fs.remove(destFilePath) | ||||
|             reject(err) | ||||
|           }) | ||||
|  | ||||
| 	deleteUploadsFileTry(f, attempt) { | ||||
|           destFileStream.on('finish', () => { | ||||
|             resolve(true) | ||||
|           }) | ||||
|  | ||||
| 		let self = this; | ||||
|           rq.pipe(destFileStream) | ||||
|         }) | ||||
|       }) | ||||
|     }) | ||||
|   }, | ||||
|  | ||||
| 		let fFolder = (f.folder && f.folder !== 'f:') ? f.folder.slice(2) : './'; | ||||
|   /** | ||||
|    * Move/Rename a file | ||||
|    * | ||||
|    * @param      {String}   uid        The file ID | ||||
|    * @param      {String}   fld        The destination folder | ||||
|    * @param      {String}   nFilename  The new filename (optional) | ||||
|    * @return     {Promise}  Promise of the operation | ||||
|    */ | ||||
|   moveUploadsFile (uid, fld, nFilename) { | ||||
|     let self = this | ||||
|  | ||||
| 		return Promise.join( | ||||
| 			fs.removeAsync(path.join(self._uploadsThumbsPath, f._id + '.png')), | ||||
| 			fs.removeAsync(path.resolve(self._uploadsPath, fFolder, f.filename)) | ||||
| 		).catch((err) => { | ||||
| 			if(err.code === 'EBUSY' && attempt < 5) { | ||||
| 				return Promise.delay(100).then(() => { | ||||
| 					return self.deleteUploadsFileTry(f, attempt + 1); | ||||
| 				}); | ||||
| 			} else { | ||||
| 				winston.warn('Unable to delete uploads file ' + f.filename + '. File is locked by another process and multiple attempts failed.'); | ||||
| 				return true; | ||||
| 			} | ||||
| 		}); | ||||
|     return db.UplFolder.findById('f:' + fld).then((folder) => { | ||||
|       if (folder) { | ||||
|         return db.UplFile.findById(uid).then((originFile) => { | ||||
|           // -> Check if rename is valid | ||||
|  | ||||
| 	}, | ||||
|           let nameCheck = null | ||||
|           if (nFilename) { | ||||
|             let originFileObj = path.parse(originFile.filename) | ||||
|             nameCheck = lcdata.validateUploadsFilename(nFilename + originFileObj.ext, folder.name) | ||||
|           } else { | ||||
|             nameCheck = Promise.resolve(originFile.filename) | ||||
|           } | ||||
|  | ||||
| 	/** | ||||
| 	 * Downloads a file from url. | ||||
| 	 * | ||||
| 	 * @param      {String}   fFolder  The folder | ||||
| 	 * @param      {String}   fUrl     The full URL | ||||
| 	 * @return     {Promise}  Promise of the operation | ||||
| 	 */ | ||||
| 	downloadFromUrl(fFolder, fUrl) { | ||||
|           return nameCheck.then((destFilename) => { | ||||
|             let originFolder = (originFile.folder && originFile.folder !== 'f:') ? originFile.folder.slice(2) : './' | ||||
|             let sourceFilePath = path.resolve(self._uploadsPath, originFolder, originFile.filename) | ||||
|             let destFilePath = path.resolve(self._uploadsPath, folder.name, destFilename) | ||||
|             let preMoveOps = [] | ||||
|  | ||||
| 		let self = this; | ||||
|             // -> Check for invalid operations | ||||
|  | ||||
| 		let fUrlObj = url.parse(fUrl); | ||||
| 		let fUrlFilename = _.last(_.split(fUrlObj.pathname, '/')); | ||||
| 		let destFolder = _.chain(fFolder).trim().toLower().value(); | ||||
|             if (sourceFilePath === destFilePath) { | ||||
|               return Promise.reject(new Error('Invalid Operation!')) | ||||
|             } | ||||
|  | ||||
| 		return upl.validateUploadsFolder(destFolder).then((destFolderPath) => { | ||||
| 			 | ||||
| 			if(!destFolderPath) { | ||||
| 				return Promise.reject(new Error('Invalid Folder')); | ||||
| 			} | ||||
|             // -> Delete DB entry | ||||
|  | ||||
| 			return lcdata.validateUploadsFilename(fUrlFilename, destFolder).then((destFilename) => { | ||||
| 				 | ||||
| 				let destFilePath = path.resolve(destFolderPath, destFilename); | ||||
|             preMoveOps.push(db.UplFile.findByIdAndRemove(uid)) | ||||
|  | ||||
| 				return new Promise((resolve, reject) => { | ||||
|             // -> Move thumbnail ahead to avoid re-generation | ||||
|  | ||||
| 					let rq = request({ | ||||
| 						url: fUrl, | ||||
| 						method: 'GET', | ||||
| 						followRedirect: true, | ||||
| 						maxRedirects: 5, | ||||
| 						timeout: 10000 | ||||
| 					}); | ||||
|             if (originFile.category === 'image') { | ||||
|               let fUid = farmhash.fingerprint32(folder.name + '/' + destFilename) | ||||
|               let sourceThumbPath = path.resolve(self._uploadsThumbsPath, originFile._id + '.png') | ||||
|               let destThumbPath = path.resolve(self._uploadsThumbsPath, fUid + '.png') | ||||
|               preMoveOps.push(fs.moveAsync(sourceThumbPath, destThumbPath)) | ||||
|             } else { | ||||
|               preMoveOps.push(Promise.resolve(true)) | ||||
|             } | ||||
|  | ||||
| 					let destFileStream = fs.createWriteStream(destFilePath); | ||||
| 					let curFileSize = 0; | ||||
|             // -> Proceed to move actual file | ||||
|  | ||||
| 					rq.on('data', (data) => { | ||||
| 						curFileSize += data.length; | ||||
| 						if(curFileSize > maxDownloadFileSize) { | ||||
| 							rq.abort(); | ||||
| 							destFileStream.destroy(); | ||||
| 							fs.remove(destFilePath); | ||||
| 							reject(new Error('Remote file is too large!')); | ||||
| 						} | ||||
| 					}).on('error', (err) => { | ||||
| 						destFileStream.destroy(); | ||||
| 						fs.remove(destFilePath); | ||||
| 						reject(err); | ||||
| 					}); | ||||
|             return Promise.all(preMoveOps).then(() => { | ||||
|               return fs.moveAsync(sourceFilePath, destFilePath, { | ||||
|                 clobber: false | ||||
|               }) | ||||
|             }) | ||||
|           }) | ||||
|         }) | ||||
|       } else { | ||||
|         return Promise.reject(new Error('Invalid Destination Folder')) | ||||
|       } | ||||
|     }) | ||||
|   } | ||||
|  | ||||
| 					destFileStream.on('finish', () => { | ||||
| 						resolve(true); | ||||
| 					}); | ||||
|  | ||||
| 					rq.pipe(destFileStream); | ||||
|  | ||||
| 				}); | ||||
|  | ||||
| 			}); | ||||
|  | ||||
| 		}); | ||||
|  | ||||
| 	}, | ||||
|  | ||||
| 	/** | ||||
| 	 * Move/Rename a file | ||||
| 	 * | ||||
| 	 * @param      {String}   uid        The file ID | ||||
| 	 * @param      {String}   fld        The destination folder | ||||
| 	 * @param      {String}   nFilename  The new filename (optional) | ||||
| 	 * @return     {Promise}  Promise of the operation | ||||
| 	 */ | ||||
| 	moveUploadsFile(uid, fld, nFilename) { | ||||
|  | ||||
| 		let self = this; | ||||
|  | ||||
| 		return db.UplFolder.findById('f:' + fld).then((folder) => { | ||||
| 			if(folder) { | ||||
| 				return db.UplFile.findById(uid).then((originFile) => { | ||||
|  | ||||
| 					//-> Check if rename is valid | ||||
|  | ||||
| 					let nameCheck = null; | ||||
| 					if(nFilename) { | ||||
| 						let originFileObj = path.parse(originFile.filename); | ||||
| 						nameCheck = lcdata.validateUploadsFilename(nFilename + originFileObj.ext, folder.name); | ||||
| 					} else { | ||||
| 						nameCheck = Promise.resolve(originFile.filename); | ||||
| 					} | ||||
|  | ||||
| 					return nameCheck.then((destFilename) => { | ||||
|  | ||||
| 						let originFolder = (originFile.folder && originFile.folder !== 'f:') ? originFile.folder.slice(2) : './'; | ||||
| 						let sourceFilePath = path.resolve(self._uploadsPath, originFolder, originFile.filename); | ||||
| 						let destFilePath = path.resolve(self._uploadsPath, folder.name, destFilename); | ||||
| 						let preMoveOps = []; | ||||
|  | ||||
| 						//-> Check for invalid operations | ||||
|  | ||||
| 						if(sourceFilePath === destFilePath) { | ||||
| 							return Promise.reject(new Error('Invalid Operation!')); | ||||
| 						} | ||||
|  | ||||
| 						//-> Delete DB entry | ||||
|  | ||||
| 						preMoveOps.push(db.UplFile.findByIdAndRemove(uid)); | ||||
|  | ||||
| 						//-> Move thumbnail ahead to avoid re-generation | ||||
|  | ||||
| 						if(originFile.category === 'image') { | ||||
| 							let fUid = farmhash.fingerprint32(folder.name + '/' + destFilename); | ||||
| 							let sourceThumbPath = path.resolve(self._uploadsThumbsPath, originFile._id + '.png'); | ||||
| 							let destThumbPath = path.resolve(self._uploadsThumbsPath, fUid + '.png'); | ||||
| 							preMoveOps.push(fs.moveAsync(sourceThumbPath, destThumbPath)); | ||||
| 						} else { | ||||
| 							preMoveOps.push(Promise.resolve(true)); | ||||
| 						} | ||||
|  | ||||
| 						//-> Proceed to move actual file | ||||
|  | ||||
| 						return Promise.all(preMoveOps).then(() => { | ||||
| 							return fs.moveAsync(sourceFilePath, destFilePath, { | ||||
| 								clobber: false | ||||
| 							}); | ||||
| 						}); | ||||
|  | ||||
| 					}) | ||||
|  | ||||
| 				}); | ||||
| 			} else { | ||||
| 				return Promise.reject(new Error('Invalid Destination Folder')); | ||||
| 			} | ||||
| 		}); | ||||
|  | ||||
| 	} | ||||
|  | ||||
| }; | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user