feat: git sync - process assets

This commit is contained in:
NGPixel 2019-10-20 18:34:38 -04:00
parent f1668b9ac5
commit c4303a5659
3 changed files with 264 additions and 172 deletions

View File

@ -0,0 +1,168 @@
const fs = require('fs-extra')
const path = require('path')
const stream = require('stream')
const Promise = require('bluebird')
const pipeline = Promise.promisify(stream.pipeline)
const klaw = require('klaw')
const mime = require('mime-types').lookup
const _ = require('lodash')
const pageHelper = require('../../../helpers/page.js')
/* global WIKI */
module.exports = {
assetFolders: null,
async importFromDisk ({ fullPath, moduleName }) {
const rootUser = await WIKI.models.users.getRootUser()
await pipeline(
klaw(fullPath, {
filter: (f) => {
return !_.includes(f, '.git')
}
}),
new stream.Transform({
objectMode: true,
transform: async (file, enc, cb) => {
const relPath = file.path.substr(fullPath.length + 1)
if (file.stats.size < 1) {
// Skip directories and zero-byte files
return cb()
} else if (relPath && relPath.length > 3) {
WIKI.logger.info(`(STORAGE/${moduleName}) Processing ${relPath}...`)
const contentType = pageHelper.getContentType(relPath)
if (contentType) {
// -> Page
try {
await this.processPage({
user: rootUser,
relPath: relPath,
fullPath: fullPath,
contentType: contentType,
moduleName: moduleName
})
} catch (err) {
WIKI.logger.warn(`(STORAGE/${moduleName}) Failed to process page ${relPath}`)
WIKI.logger.warn(err)
}
} else {
// -> Asset
try {
await this.processAsset({
user: rootUser,
relPath: relPath,
file: file,
contentType: contentType,
moduleName: moduleName
})
} catch (err) {
WIKI.logger.warn(`(STORAGE/${moduleName}) Failed to process asset ${relPath}`)
WIKI.logger.warn(err)
}
}
}
cb()
}
})
)
this.clearFolderCache()
},
async processPage ({ user, fullPath, relPath, contentType, moduleName }) {
const contentPath = pageHelper.getPagePath(relPath)
const itemContents = await fs.readFile(path.join(fullPath, relPath), 'utf8')
const pageData = WIKI.models.pages.parseMetadata(itemContents, contentType)
const currentPage = await WIKI.models.pages.query().findOne({
path: contentPath.path,
localeCode: contentPath.locale
})
if (currentPage) {
// Already in the DB, can mark as modified
WIKI.logger.info(`(STORAGE/${moduleName}) Page marked as modified: ${relPath}`)
await WIKI.models.pages.updatePage({
id: currentPage.id,
title: _.get(pageData, 'title', currentPage.title),
description: _.get(pageData, 'description', currentPage.description) || '',
isPublished: _.get(pageData, 'isPublished', currentPage.isPublished),
isPrivate: false,
content: pageData.content,
user: user,
skipStorage: true
})
} else {
// Not in the DB, can mark as new
WIKI.logger.info(`(STORAGE/${moduleName}) Page marked as new: ${relPath}`)
const pageEditor = await WIKI.models.editors.getDefaultEditor(contentType)
await WIKI.models.pages.createPage({
path: contentPath.path,
locale: contentPath.locale,
title: _.get(pageData, 'title', _.last(contentPath.path.split('/'))),
description: _.get(pageData, 'description', '') || '',
isPublished: _.get(pageData, 'isPublished', true),
isPrivate: false,
content: pageData.content,
user: user,
editor: pageEditor,
skipStorage: true
})
}
},
async processAsset ({ user, relPath, file, moduleName }) {
WIKI.logger.info(`(STORAGE/${moduleName}) Asset marked for import: ${relPath}`)
// -> Get all folder paths
if (!this.assetFolders) {
this.assetFolders = await WIKI.models.assetFolders.getAllPaths()
}
// -> Find existing folder
const filePathInfo = path.parse(file.path)
const folderPath = path.dirname(relPath).replace(/\\/g, '/')
let folderId = _.toInteger(_.findKey(this.assetFolders, fld => { return fld === folderPath })) || null
// -> Create missing folder structure
if (!folderId && folderPath !== '.') {
const folderParts = folderPath.split('/')
let currentFolderPath = []
let currentFolderParentId = null
for (const folderPart of folderParts) {
currentFolderPath.push(folderPart)
const existingFolderId = _.findKey(this.assetFolders, fld => { return fld === currentFolderPath.join('/') })
if (!existingFolderId) {
const newFolderObj = await WIKI.models.assetFolders.query().insert({
slug: folderPart,
name: folderPart,
parentId: currentFolderParentId
})
_.set(this.assetFolders, newFolderObj.id, currentFolderPath.join('/'))
currentFolderParentId = newFolderObj.id
} else {
currentFolderParentId = _.toInteger(existingFolderId)
}
}
folderId = currentFolderParentId
}
// -> Import asset
await WIKI.models.assets.upload({
mode: 'import',
originalname: filePathInfo.base,
ext: filePathInfo.ext,
mimetype: mime(filePathInfo.base) || 'application/octet-stream',
size: file.stats.size,
folderId: folderId,
path: file.path,
assetPath: relPath,
user: user,
skipStorage: true
})
},
clearFolderCache () {
this.assetFolders = null
}
}

View File

@ -6,10 +6,10 @@ const stream = require('stream')
const _ = require('lodash') const _ = require('lodash')
const Promise = require('bluebird') const Promise = require('bluebird')
const pipeline = Promise.promisify(stream.pipeline) const pipeline = Promise.promisify(stream.pipeline)
const klaw = require('klaw')
const pageHelper = require('../../../helpers/page.js')
const moment = require('moment') const moment = require('moment')
const mime = require('mime-types').lookup
const pageHelper = require('../../../helpers/page')
const commonDisk = require('./common')
/* global WIKI */ /* global WIKI */
@ -166,122 +166,10 @@ module.exports = {
}, },
async importAll() { async importAll() {
WIKI.logger.info(`(STORAGE/DISK) Importing all content from local disk folder to the DB...`) WIKI.logger.info(`(STORAGE/DISK) Importing all content from local disk folder to the DB...`)
await commonDisk.importFromDisk({
const rootUser = await WIKI.models.users.getRootUser() fullPath: this.config.path,
let assetFolders = await WIKI.models.assetFolders.getAllPaths() moduleName: 'DISK'
})
await pipeline(
klaw(this.config.path, {
filter: (f) => {
return !_.includes(f, '.git')
}
}),
new stream.Transform({
objectMode: true,
transform: async (file, enc, cb) => {
const relPath = file.path.substr(this.config.path.length + 1)
if (file.stats.size < 1) {
// Skip directories and zero-byte files
return cb()
} else if (relPath && relPath.length > 3) {
WIKI.logger.info(`(STORAGE/DISK) Processing ${relPath}...`)
const contentType = pageHelper.getContentType(relPath)
if (contentType) {
// -> Page
const contentPath = pageHelper.getPagePath(relPath)
let itemContents = ''
try {
itemContents = await fs.readFile(path.join(this.config.path, relPath), 'utf8')
const pageData = WIKI.models.pages.parseMetadata(itemContents, contentType)
const currentPage = await WIKI.models.pages.query().findOne({
path: contentPath.path,
localeCode: contentPath.locale
})
if (currentPage) {
// Already in the DB, can mark as modified
WIKI.logger.info(`(STORAGE/DISK) Page marked as modified: ${relPath}`)
await WIKI.models.pages.updatePage({
id: currentPage.id,
title: _.get(pageData, 'title', currentPage.title),
description: _.get(pageData, 'description', currentPage.description) || '',
isPublished: _.get(pageData, 'isPublished', currentPage.isPublished),
isPrivate: false,
content: pageData.content,
user: rootUser,
skipStorage: true
})
} else {
// Not in the DB, can mark as new
WIKI.logger.info(`(STORAGE/DISK) Page marked as new: ${relPath}`)
const pageEditor = await WIKI.models.editors.getDefaultEditor(contentType)
await WIKI.models.pages.createPage({
path: contentPath.path,
locale: contentPath.locale,
title: _.get(pageData, 'title', _.last(contentPath.path.split('/'))),
description: _.get(pageData, 'description', '') || '',
isPublished: _.get(pageData, 'isPublished', true),
isPrivate: false,
content: pageData.content,
user: rootUser,
editor: pageEditor,
skipStorage: true
})
}
} catch (err) {
WIKI.logger.warn(`(STORAGE/DISK) Failed to process ${relPath}`)
WIKI.logger.warn(err)
}
} else {
// -> Asset
// -> Find existing folder
const filePathInfo = path.parse(file.path)
const folderPath = path.dirname(relPath).replace(/\\/g, '/')
let folderId = _.toInteger(_.findKey(assetFolders, fld => { return fld === folderPath })) || null
// -> Create missing folder structure
if (!folderId && folderPath !== '.') {
const folderParts = folderPath.split('/')
let currentFolderPath = []
let currentFolderParentId = null
for (const folderPart of folderParts) {
currentFolderPath.push(folderPart)
const existingFolderId = _.findKey(assetFolders, fld => { return fld === currentFolderPath.join('/') })
if (!existingFolderId) {
const newFolderObj = await WIKI.models.assetFolders.query().insert({
slug: folderPart,
name: folderPart,
parentId: currentFolderParentId
})
_.set(assetFolders, newFolderObj.id, currentFolderPath.join('/'))
currentFolderParentId = newFolderObj.id
} else {
currentFolderParentId = _.toInteger(existingFolderId)
}
}
folderId = currentFolderParentId
}
// -> Import asset
await WIKI.models.assets.upload({
mode: 'import',
originalname: filePathInfo.base,
ext: filePathInfo.ext,
mimetype: mime(filePathInfo.base) || 'application/octet-stream',
size: file.stats.size,
folderId: folderId,
path: file.path,
assetPath: relPath,
user: rootUser,
skipStorage: true
})
}
}
cb()
}
})
)
WIKI.logger.info('(STORAGE/DISK) Import completed.') WIKI.logger.info('(STORAGE/DISK) Import completed.')
} }
} }

View File

@ -6,7 +6,10 @@ const stream = require('stream')
const Promise = require('bluebird') const Promise = require('bluebird')
const pipeline = Promise.promisify(stream.pipeline) const pipeline = Promise.promisify(stream.pipeline)
const klaw = require('klaw') const klaw = require('klaw')
const pageHelper = require('../../../helpers/page.js')
const pageHelper = require('../../../helpers/page')
const assetHelper = require('../../../helpers/asset')
const commonDisk = require('../disk/common')
/* global WIKI */ /* global WIKI */
@ -136,7 +139,29 @@ module.exports = {
const diff = await this.git.diffSummary(['-M', currentCommitLog.hash, latestCommitLog.hash]) const diff = await this.git.diffSummary(['-M', currentCommitLog.hash, latestCommitLog.hash])
if (_.get(diff, 'files', []).length > 0) { if (_.get(diff, 'files', []).length > 0) {
await this.processFiles(diff.files, rootUser) let filesToProcess = []
for (const f of diff.files) {
const fPath = path.join(this.repoPath, f.file)
let fStats = { size: 0 }
try {
fStats = await fs.stat(fPath)
} catch (err) {
if (err.code !== 'ENOENT') {
WIKI.logger.warn(`(STORAGE/GIT) Failed to access file ${f.file}! Skipping...`)
continue
}
}
filesToProcess.push({
...f,
file: {
path: fPath,
stats: fStats
},
relPath: f.file
})
}
await this.processFiles(filesToProcess, rootUser)
} }
} }
}, },
@ -147,62 +172,65 @@ module.exports = {
*/ */
async processFiles(files, user) { async processFiles(files, user) {
for (const item of files) { for (const item of files) {
const contentType = pageHelper.getContentType(item.file) const contentType = pageHelper.getContentType(item.relPath)
if (!contentType) { const fileExists = await fs.pathExists(item.file)
continue if (!item.binary && contentType) {
} // -> Page
const contentPath = pageHelper.getPagePath(item.file)
let itemContents = '' if (!fileExists && item.deletions > 0 && item.insertions === 0) {
try { // Page was deleted by git, can safely mark as deleted in DB
itemContents = await fs.readFile(path.join(this.repoPath, item.file), 'utf8') WIKI.logger.info(`(STORAGE/GIT) Page marked as deleted: ${item.relPath}`)
const pageData = WIKI.models.pages.parseMetadata(itemContents, contentType)
const currentPage = await WIKI.models.pages.query().findOne({
path: contentPath.path,
localeCode: contentPath.locale
})
if (currentPage) {
// Already in the DB, can mark as modified
WIKI.logger.info(`(STORAGE/GIT) Page marked as modified: ${item.file}`)
await WIKI.models.pages.updatePage({
id: currentPage.id,
title: _.get(pageData, 'title', currentPage.title),
description: _.get(pageData, 'description', currentPage.description) || '',
isPublished: _.get(pageData, 'isPublished', currentPage.isPublished),
isPrivate: false,
content: pageData.content,
user: user,
skipStorage: true
})
} else {
// Not in the DB, can mark as new
WIKI.logger.info(`(STORAGE/GIT) Page marked as new: ${item.file}`)
const pageEditor = await WIKI.models.editors.getDefaultEditor(contentType)
await WIKI.models.pages.createPage({
path: contentPath.path,
locale: contentPath.locale,
title: _.get(pageData, 'title', _.last(contentPath.path.split('/'))),
description: _.get(pageData, 'description', '') || '',
isPublished: _.get(pageData, 'isPublished', true),
isPrivate: false,
content: pageData.content,
user: user,
editor: pageEditor,
skipStorage: true
})
}
} catch (err) {
if (err.code === 'ENOENT' && item.deletions > 0 && item.insertions === 0) {
// File was deleted by git, can safely mark as deleted in DB
WIKI.logger.info(`(STORAGE/GIT) Page marked as deleted: ${item.file}`)
const contentPath = pageHelper.getPagePath(item.relPath)
await WIKI.models.pages.deletePage({ await WIKI.models.pages.deletePage({
path: contentPath.path, path: contentPath.path,
locale: contentPath.locale, locale: contentPath.locale,
skipStorage: true skipStorage: true
}) })
} else { continue
WIKI.logger.warn(`(STORAGE/GIT) Failed to process ${item.file}`) }
try {
await commonDisk.processPage({
user,
relPath: item.relPath,
fullPath: this.repoPath,
contentType: contentType,
moduleName: 'GIT'
})
} catch (err) {
WIKI.logger.warn(`(STORAGE/GIT) Failed to process ${item.relPath}`)
WIKI.logger.warn(err)
}
} else {
// -> Asset
if (!fileExists && ((item.before > 0 && item.after === 0) || (item.deletions > 0 && item.insertions === 0))) {
// Asset was deleted by git, can safely mark as deleted in DB
WIKI.logger.info(`(STORAGE/GIT) Asset marked as deleted: ${item.relPath}`)
const fileHash = assetHelper.generateHash(item.relPath)
const assetToDelete = await WIKI.models.assets.query().findOne({ hash: fileHash })
if (assetToDelete) {
await WIKI.models.knex('assetData').where('id', assetToDelete.id).del()
await WIKI.models.assets.query().deleteById(assetToDelete.id)
await assetToDelete.deleteAssetCache()
} else {
WIKI.logger.info(`(STORAGE/GIT) Asset was not found in the DB, nothing to delete: ${item.relPath}`)
}
continue
}
try {
await commonDisk.processAsset({
user,
relPath: item.relPath,
file: item.file,
contentType: contentType,
moduleName: 'GIT'
})
} catch (err) {
WIKI.logger.warn(`(STORAGE/GIT) Failed to process asset ${item.relPath}`)
WIKI.logger.warn(err) WIKI.logger.warn(err)
} }
} }
@ -346,11 +374,15 @@ module.exports = {
objectMode: true, objectMode: true,
transform: async (file, enc, cb) => { transform: async (file, enc, cb) => {
const relPath = file.path.substr(this.repoPath.length + 1) const relPath = file.path.substr(this.repoPath.length + 1)
if (relPath && relPath.length > 3) { if (file.stats.size < 1) {
// Skip directories and zero-byte files
return cb()
} else if (relPath && relPath.length > 3) {
WIKI.logger.info(`(STORAGE/GIT) Processing ${relPath}...`) WIKI.logger.info(`(STORAGE/GIT) Processing ${relPath}...`)
await this.processFiles([{ await this.processFiles([{
user: rootUser, user: rootUser,
file: relPath, relPath,
file,
deletions: 0, deletions: 0,
insertions: 0 insertions: 0
}], rootUser) }], rootUser)
@ -359,6 +391,9 @@ module.exports = {
} }
}) })
) )
commonDisk.clearFolderCache()
WIKI.logger.info('(STORAGE/GIT) Import completed.') WIKI.logger.info('(STORAGE/GIT) Import completed.')
}, },
async syncUntracked() { async syncUntracked() {
@ -401,6 +436,7 @@ module.exports = {
} }
}) })
) )
await this.git.commit(`docs: add all untracked content`) await this.git.commit(`docs: add all untracked content`)
WIKI.logger.info('(STORAGE/GIT) All content is now tracked.') WIKI.logger.info('(STORAGE/GIT) All content is now tracked.')
} }