feat: algolia search engine
This commit is contained in:
@@ -4,7 +4,7 @@ description: Algolia is a powerful search-as-a-service solution, made easy to us
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/algolia.svg
|
||||
website: https://www.algolia.com/
|
||||
isAvailable: false
|
||||
isAvailable: true
|
||||
props:
|
||||
appId:
|
||||
type: String
|
||||
|
@@ -1,26 +1,202 @@
|
||||
const _ = require('lodash')
|
||||
const algoliasearch = require('algoliasearch')
|
||||
const { pipeline, Transform } = require('stream')
|
||||
|
||||
/* global WIKI */
|
||||
|
||||
module.exports = {
|
||||
activate() {
|
||||
|
||||
async activate() {
|
||||
// not used
|
||||
},
|
||||
deactivate() {
|
||||
|
||||
async deactivate() {
|
||||
// not used
|
||||
},
|
||||
query() {
|
||||
/**
|
||||
* INIT
|
||||
*/
|
||||
async init() {
|
||||
WIKI.logger.info(`(SEARCH/ALGOLIA) Initializing...`)
|
||||
this.client = algoliasearch(this.config.appId, this.config.apiKey)
|
||||
this.index = this.client.initIndex(this.config.indexName)
|
||||
|
||||
// -> Create Search Index
|
||||
WIKI.logger.info(`(SEARCH/ALGOLIA) Setting index configuration...`)
|
||||
await this.index.setSettings({
|
||||
searchableAttributes: [
|
||||
'title',
|
||||
'description',
|
||||
'content'
|
||||
],
|
||||
attributesToRetrieve: [
|
||||
'locale',
|
||||
'path',
|
||||
'title',
|
||||
'description'
|
||||
],
|
||||
advancedSyntax: true
|
||||
})
|
||||
WIKI.logger.info(`(SEARCH/ALGOLIA) Initialization completed.`)
|
||||
},
|
||||
created() {
|
||||
|
||||
/**
|
||||
* QUERY
|
||||
*
|
||||
* @param {String} q Query
|
||||
* @param {Object} opts Additional options
|
||||
*/
|
||||
async query(q, opts) {
|
||||
try {
|
||||
const results = await this.index.search({
|
||||
query: q,
|
||||
hitsPerPage: 50
|
||||
})
|
||||
return {
|
||||
results: _.map(results.hits, r => ({
|
||||
id: r.objectID,
|
||||
locale: r.locale,
|
||||
path: r.path,
|
||||
title: r.title,
|
||||
description: r.description
|
||||
})),
|
||||
suggestions: [],
|
||||
totalHits: results.nbHits
|
||||
}
|
||||
} catch (err) {
|
||||
WIKI.logger.warn('Search Engine Error:')
|
||||
WIKI.logger.warn(err)
|
||||
}
|
||||
},
|
||||
updated() {
|
||||
|
||||
/**
|
||||
* CREATE
|
||||
*
|
||||
* @param {Object} page Page to create
|
||||
*/
|
||||
async created(page) {
|
||||
await this.index.addObject({
|
||||
objectID: page.hash,
|
||||
locale: page.localeCode,
|
||||
path: page.path,
|
||||
title: page.title,
|
||||
description: page.description,
|
||||
content: page.content
|
||||
})
|
||||
},
|
||||
deleted() {
|
||||
|
||||
/**
|
||||
* UPDATE
|
||||
*
|
||||
* @param {Object} page Page to update
|
||||
*/
|
||||
async updated(page) {
|
||||
await this.index.partialUpdateObject({
|
||||
objectID: page.hash,
|
||||
title: page.title,
|
||||
description: page.description,
|
||||
content: page.content
|
||||
})
|
||||
},
|
||||
renamed() {
|
||||
|
||||
/**
|
||||
* DELETE
|
||||
*
|
||||
* @param {Object} page Page to delete
|
||||
*/
|
||||
async deleted(page) {
|
||||
await this.index.deleteObject(page.hash)
|
||||
},
|
||||
rebuild() {
|
||||
/**
|
||||
* RENAME
|
||||
*
|
||||
* @param {Object} page Page to rename
|
||||
*/
|
||||
async renamed(page) {
|
||||
await this.index.deleteObject(page.sourceHash)
|
||||
await this.index.addObject({
|
||||
objectID: page.destinationHash,
|
||||
locale: page.localeCode,
|
||||
path: page.destinationPath,
|
||||
title: page.title,
|
||||
description: page.description,
|
||||
content: page.content
|
||||
})
|
||||
},
|
||||
/**
|
||||
* REBUILD INDEX
|
||||
*/
|
||||
async rebuild() {
|
||||
WIKI.logger.info(`(SEARCH/ALGOLIA) Rebuilding Index...`)
|
||||
await this.index.clearIndex()
|
||||
|
||||
const MAX_DOCUMENT_BYTES = 10 * Math.pow(2, 10) // 10 KB
|
||||
const MAX_INDEXING_BYTES = 10 * Math.pow(2, 20) - Buffer.from('[').byteLength - Buffer.from(']').byteLength // 10 MB
|
||||
const MAX_INDEXING_COUNT = 1000
|
||||
const COMMA_BYTES = Buffer.from(',').byteLength
|
||||
|
||||
let chunks = []
|
||||
let bytes = 0
|
||||
|
||||
const processDocument = async (cb, doc) => {
|
||||
try {
|
||||
if (doc) {
|
||||
const docBytes = Buffer.from(JSON.stringify(doc)).byteLength
|
||||
// -> Document too large
|
||||
if (docBytes >= MAX_DOCUMENT_BYTES) {
|
||||
throw new Error('Document exceeds maximum size allowed by Algolia.')
|
||||
}
|
||||
|
||||
// -> Current batch exceeds size hard limit, flush
|
||||
if (docBytes + COMMA_BYTES + bytes >= MAX_INDEXING_BYTES) {
|
||||
await flushBuffer()
|
||||
}
|
||||
|
||||
if (chunks.length > 0) {
|
||||
bytes += COMMA_BYTES
|
||||
}
|
||||
bytes += docBytes
|
||||
chunks.push(doc)
|
||||
|
||||
// -> Current batch exceeds count soft limit, flush
|
||||
if (chunks.length >= MAX_INDEXING_COUNT) {
|
||||
await flushBuffer()
|
||||
}
|
||||
} else {
|
||||
// -> End of stream, flush
|
||||
await flushBuffer()
|
||||
}
|
||||
cb()
|
||||
} catch (err) {
|
||||
cb(err)
|
||||
}
|
||||
}
|
||||
|
||||
const flushBuffer = async () => {
|
||||
WIKI.logger.info(`(SEARCH/ALGOLIA) Sending batch of ${chunks.length}...`)
|
||||
try {
|
||||
await this.index.addObjects(
|
||||
_.map(chunks, doc => ({
|
||||
objectID: doc.id,
|
||||
locale: doc.locale,
|
||||
path: doc.path,
|
||||
title: doc.title,
|
||||
description: doc.description,
|
||||
content: doc.content
|
||||
}))
|
||||
)
|
||||
} catch (err) {
|
||||
WIKI.logger.warn('(SEARCH/ALGOLIA) Failed to send batch to Algolia: ', err)
|
||||
}
|
||||
chunks.length = 0
|
||||
bytes = 0
|
||||
}
|
||||
|
||||
await pipeline(
|
||||
WIKI.models.knex.column({ id: 'hash' }, 'path', { locale: 'localeCode' }, 'title', 'description', 'content').select().from('pages').where({
|
||||
isPublished: true,
|
||||
isPrivate: false
|
||||
}).stream(),
|
||||
new Transform({
|
||||
objectMode: true,
|
||||
transform: async (chunk, enc, cb) => processDocument(cb, chunk),
|
||||
flush: async (cb) => processDocument(cb)
|
||||
})
|
||||
)
|
||||
WIKI.logger.info(`(SEARCH/ALGOLIA) Index rebuilt successfully.`)
|
||||
}
|
||||
}
|
||||
|
@@ -2,6 +2,8 @@ const _ = require('lodash')
|
||||
const AWS = require('aws-sdk')
|
||||
const { pipeline, Transform } = require('stream')
|
||||
|
||||
/* global WIKI */
|
||||
|
||||
module.exports = {
|
||||
async activate() {
|
||||
// not used
|
||||
@@ -110,12 +112,12 @@ module.exports = {
|
||||
rebuildIndex = true
|
||||
}
|
||||
|
||||
//-> Define suggester
|
||||
// -> Define suggester
|
||||
const suggesters = await this.client.describeSuggesters({
|
||||
DomainName: this.config.domain,
|
||||
SuggesterNames: ['default_suggester']
|
||||
}).promise()
|
||||
if(_.get(suggesters, 'Suggesters', []).length < 1) {
|
||||
if (_.get(suggesters, 'Suggesters', []).length < 1) {
|
||||
WIKI.logger.info(`(SEARCH/AWS) Defining Suggester...`)
|
||||
await this.client.defineSuggester({
|
||||
DomainName: this.config.domain,
|
||||
@@ -323,7 +325,7 @@ module.exports = {
|
||||
const flushBuffer = async () => {
|
||||
WIKI.logger.info(`(SEARCH/AWS) Sending batch of ${chunks.length}...`)
|
||||
try {
|
||||
const resp = await this.clientDomain.uploadDocuments({
|
||||
await this.clientDomain.uploadDocuments({
|
||||
contentType: 'application/json',
|
||||
documents: JSON.stringify(_.map(chunks, doc => ({
|
||||
type: 'add',
|
||||
@@ -351,8 +353,8 @@ module.exports = {
|
||||
}).stream(),
|
||||
new Transform({
|
||||
objectMode: true,
|
||||
transform: async (chunk, enc, cb) => await processDocument(cb, chunk),
|
||||
flush: async (cb) => await processDocument(cb)
|
||||
transform: async (chunk, enc, cb) => processDocument(cb, chunk),
|
||||
flush: async (cb) => processDocument(cb)
|
||||
})
|
||||
)
|
||||
|
||||
@@ -364,4 +366,3 @@ module.exports = {
|
||||
WIKI.logger.info(`(SEARCH/AWS) Index rebuilt successfully.`)
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -3,6 +3,8 @@ const { SearchService, QueryType } = require('azure-search-client')
|
||||
const request = require('request-promise')
|
||||
const { pipeline } = require('stream')
|
||||
|
||||
/* global WIKI */
|
||||
|
||||
module.exports = {
|
||||
async activate() {
|
||||
// not used
|
||||
@@ -20,7 +22,7 @@ module.exports = {
|
||||
// -> Create Search Index
|
||||
const indexes = await this.client.indexes.list()
|
||||
if (!_.find(_.get(indexes, 'result.value', []), ['name', this.config.indexName])) {
|
||||
WIKI.logger.info(`(SEARCH/AWS) Creating index...`)
|
||||
WIKI.logger.info(`(SEARCH/AZURE) Creating index...`)
|
||||
await this.client.indexes.create({
|
||||
name: this.config.indexName,
|
||||
fields: [
|
||||
@@ -74,7 +76,7 @@ module.exports = {
|
||||
searchMode: 'analyzingInfixMatching',
|
||||
sourceFields: ['title', 'description', 'content']
|
||||
}
|
||||
],
|
||||
]
|
||||
})
|
||||
}
|
||||
WIKI.logger.info(`(SEARCH/AZURE) Initialization completed.`)
|
||||
|
Reference in New Issue
Block a user