feat: azure search module + rebuild index
This commit is contained in:
@@ -38,7 +38,7 @@ module.exports = {
|
||||
SearchMutation: {
|
||||
async updateSearchEngines(obj, args, context) {
|
||||
try {
|
||||
for (let searchEngine of args.searchEngines) {
|
||||
for (let searchEngine of args.engines) {
|
||||
await WIKI.models.searchEngines.query().patch({
|
||||
isEnabled: searchEngine.isEnabled,
|
||||
config: _.reduce(searchEngine.config, (result, value, key) => {
|
||||
@@ -47,12 +47,23 @@ module.exports = {
|
||||
}, {})
|
||||
}).where('key', searchEngine.key)
|
||||
}
|
||||
await WIKI.models.searchEngines.initEngine({ activate: true })
|
||||
return {
|
||||
responseResult: graphHelper.generateSuccess('Search Engines updated successfully')
|
||||
}
|
||||
} catch (err) {
|
||||
return graphHelper.generateError(err)
|
||||
}
|
||||
},
|
||||
async rebuildIndex (obj, args, context) {
|
||||
try {
|
||||
await WIKI.data.searchEngine.rebuild()
|
||||
return {
|
||||
responseResult: graphHelper.generateSuccess('Index rebuilt successfully')
|
||||
}
|
||||
} catch (err) {
|
||||
return graphHelper.generateError(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -102,7 +102,7 @@ type PageSearchResponse {
|
||||
}
|
||||
|
||||
type PageSearchResult {
|
||||
id: Int!
|
||||
id: String!
|
||||
title: String!
|
||||
description: String!
|
||||
path: String!
|
||||
|
@@ -27,8 +27,10 @@ type SearchQuery {
|
||||
|
||||
type SearchMutation {
|
||||
updateSearchEngines(
|
||||
searchEngines: [SearchEngineInput]
|
||||
engines: [SearchEngineInput]
|
||||
): DefaultResponse @auth(requires: ["manage:system"])
|
||||
|
||||
rebuildIndex: DefaultResponse @auth(requires: ["manage:system"])
|
||||
}
|
||||
|
||||
# -----------------------------------------------
|
||||
@@ -42,6 +44,7 @@ type SearchEngine {
|
||||
description: String
|
||||
logo: String
|
||||
website: String
|
||||
isAvailable: Boolean
|
||||
config: [KeyValuePair]
|
||||
}
|
||||
|
||||
|
@@ -69,6 +69,10 @@ module.exports = {
|
||||
message: 'Invalid locale or namespace.',
|
||||
code: 1009
|
||||
}),
|
||||
SearchActivationFailed: CustomError('SearchActivationFailed', {
|
||||
message: 'Search Engine activation failed.',
|
||||
code: 1019
|
||||
}),
|
||||
UserCreationFailed: CustomError('UserCreationFailed', {
|
||||
message: 'An unexpected error occured during user creation.',
|
||||
code: 1010
|
||||
|
@@ -210,6 +210,7 @@ module.exports = class Page extends Model {
|
||||
isPrivate: opts.isPrivate
|
||||
})
|
||||
await WIKI.models.pages.renderPage(page)
|
||||
await WIKI.data.searchEngine.created(page)
|
||||
if (!opts.skipStorage) {
|
||||
await WIKI.models.storage.pageEvent({
|
||||
event: 'created',
|
||||
@@ -245,6 +246,7 @@ module.exports = class Page extends Model {
|
||||
isPrivate: ogPage.isPrivate
|
||||
})
|
||||
await WIKI.models.pages.renderPage(page)
|
||||
await WIKI.data.searchEngine.updated(page)
|
||||
if (!opts.skipStorage) {
|
||||
await WIKI.models.storage.pageEvent({
|
||||
event: 'updated',
|
||||
@@ -273,6 +275,7 @@ module.exports = class Page extends Model {
|
||||
})
|
||||
await WIKI.models.pages.query().delete().where('id', page.id)
|
||||
await WIKI.models.pages.deletePageFromCache(page)
|
||||
await WIKI.data.searchEngine.deleted(page)
|
||||
if (!opts.skipStorage) {
|
||||
await WIKI.models.storage.pageEvent({
|
||||
event: 'deleted',
|
||||
|
@@ -95,11 +95,25 @@ module.exports = class SearchEngine extends Model {
|
||||
}
|
||||
}
|
||||
|
||||
static async initEngine() {
|
||||
static async initEngine({ activate = false } = {}) {
|
||||
const searchEngine = await WIKI.models.searchEngines.query().findOne('isEnabled', true)
|
||||
if (searchEngine) {
|
||||
WIKI.data.searchEngine = require(`../modules/search/${searchEngine.key}/engine`)
|
||||
WIKI.data.searchEngine.config = searchEngine.config
|
||||
if (activate) {
|
||||
try {
|
||||
await WIKI.data.searchEngine.activate()
|
||||
} catch (err) {
|
||||
// -> Revert to basic engine
|
||||
if (err instanceof WIKI.Error.SearchActivationFailed) {
|
||||
await WIKI.models.searchEngines.query().patch({ isEnabled: false }).where('key', searchEngine.key)
|
||||
await WIKI.models.searchEngines.query().patch({ isEnabled: true }).where('key', 'db')
|
||||
await WIKI.models.searchEngines.initEngine()
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await WIKI.data.searchEngine.init()
|
||||
} catch (err) {
|
||||
@@ -107,19 +121,4 @@ module.exports = class SearchEngine extends Model {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static async pageEvent({ event, page }) {
|
||||
const searchEngines = await WIKI.models.storage.query().where('isEnabled', true)
|
||||
if (searchEngines && searchEngines.length > 0) {
|
||||
_.forEach(searchEngines, logger => {
|
||||
WIKI.queue.job.syncStorage.add({
|
||||
event,
|
||||
logger,
|
||||
page
|
||||
}, {
|
||||
removeOnComplete: true
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ description: Algolia is a powerful search-as-a-service solution, made easy to us
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/algolia.svg
|
||||
website: https://www.algolia.com/
|
||||
isAvailable: false
|
||||
props:
|
||||
appId:
|
||||
type: String
|
||||
|
@@ -4,4 +4,5 @@ description: Amazon CloudSearch is a managed service in the AWS Cloud that makes
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/aws-cloudsearch.svg
|
||||
website: https://aws.amazon.com/cloudsearch/
|
||||
isAvailable: false
|
||||
props: {}
|
||||
|
@@ -4,4 +4,21 @@ description: AI-Powered cloud search service for web and mobile app development.
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/azure.svg
|
||||
website: https://azure.microsoft.com/services/search/
|
||||
props: {}
|
||||
isAvailable: true
|
||||
props:
|
||||
serviceName:
|
||||
type: String
|
||||
title: Service Name
|
||||
hint: The name of the Azure Search Service. Found under Properties.
|
||||
order: 1
|
||||
adminKey:
|
||||
type: String
|
||||
title: Admin API Key
|
||||
hint: Either the primary or secondary admin key. Found under Keys.
|
||||
order: 2
|
||||
indexName:
|
||||
type: String
|
||||
title: Index Name
|
||||
hint: 'Name to use when creating the index. (default: wiki)'
|
||||
default: wiki
|
||||
order: 3
|
||||
|
@@ -1,26 +1,213 @@
|
||||
const _ = require('lodash')
|
||||
const { SearchService, QueryType } = require('azure-search-client')
|
||||
const request = require('request-promise')
|
||||
const { pipeline } = require('stream')
|
||||
|
||||
module.exports = {
|
||||
activate() {
|
||||
|
||||
async activate() {
|
||||
// not used
|
||||
},
|
||||
deactivate() {
|
||||
|
||||
async deactivate() {
|
||||
// not used
|
||||
},
|
||||
query() {
|
||||
/**
|
||||
* INIT
|
||||
*/
|
||||
async init() {
|
||||
this.client = new SearchService(this.config.serviceName, this.config.adminKey)
|
||||
|
||||
// -> Create Search Index
|
||||
const indexes = await this.client.indexes.list()
|
||||
if (!_.find(_.get(indexes, 'result.value', []), ['name', this.config.indexName])) {
|
||||
await this.client.indexes.create({
|
||||
name: this.config.indexName,
|
||||
fields: [
|
||||
{
|
||||
name: 'id',
|
||||
type: 'Edm.String',
|
||||
key: true,
|
||||
searchable: false
|
||||
},
|
||||
{
|
||||
name: 'locale',
|
||||
type: 'Edm.String',
|
||||
searchable: false
|
||||
},
|
||||
{
|
||||
name: 'path',
|
||||
type: 'Edm.String',
|
||||
searchable: false
|
||||
},
|
||||
{
|
||||
name: 'title',
|
||||
type: 'Edm.String',
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
type: 'Edm.String',
|
||||
searchable: true
|
||||
},
|
||||
{
|
||||
name: 'content',
|
||||
type: 'Edm.String',
|
||||
searchable: true
|
||||
}
|
||||
],
|
||||
scoringProfiles: [
|
||||
{
|
||||
name: 'fieldWeights',
|
||||
text: {
|
||||
weights: {
|
||||
title: 4,
|
||||
description: 3,
|
||||
content: 1
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
suggesters: [
|
||||
{
|
||||
name: 'suggestions',
|
||||
searchMode: 'analyzingInfixMatching',
|
||||
sourceFields: ['title', 'description', 'content']
|
||||
}
|
||||
],
|
||||
})
|
||||
}
|
||||
},
|
||||
created() {
|
||||
|
||||
/**
|
||||
* QUERY
|
||||
*
|
||||
* @param {String} q Query
|
||||
* @param {Object} opts Additional options
|
||||
*/
|
||||
async query(q, opts) {
|
||||
try {
|
||||
let suggestions = []
|
||||
const results = await this.client.indexes.use(this.config.indexName).search({
|
||||
count: true,
|
||||
scoringProfile: 'fieldWeights',
|
||||
search: q,
|
||||
select: 'id, locale, path, title, description',
|
||||
queryType: QueryType.simple,
|
||||
top: 50
|
||||
})
|
||||
if (results.result.value.length < 5) {
|
||||
// Using plain request, not yet available in library...
|
||||
try {
|
||||
const suggestResults = await request({
|
||||
uri: `https://${this.config.serviceName}.search.windows.net/indexes/${this.config.indexName}/docs/autocomplete`,
|
||||
method: 'post',
|
||||
qs: {
|
||||
'api-version': '2017-11-11-Preview'
|
||||
},
|
||||
headers: {
|
||||
'api-key': this.config.adminKey,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
json: true,
|
||||
body: {
|
||||
autocompleteMode: 'oneTermWithContext',
|
||||
search: q,
|
||||
suggesterName: 'suggestions'
|
||||
}
|
||||
})
|
||||
suggestions = suggestResults.value.map(s => s.queryPlusText)
|
||||
} catch (err) {
|
||||
WIKI.logger.warn('Search Engine suggestion failure: ', err)
|
||||
}
|
||||
}
|
||||
return {
|
||||
results: results.result.value,
|
||||
suggestions,
|
||||
totalHits: results.result['@odata.count']
|
||||
}
|
||||
} catch (err) {
|
||||
WIKI.logger.warn('Search Engine Error:')
|
||||
WIKI.logger.warn(err)
|
||||
}
|
||||
},
|
||||
updated() {
|
||||
|
||||
/**
|
||||
* CREATE
|
||||
*
|
||||
* @param {Object} page Page to create
|
||||
*/
|
||||
async created(page) {
|
||||
await this.client.indexes.use(this.config.indexName).index([
|
||||
{
|
||||
id: page.hash,
|
||||
locale: page.localeCode,
|
||||
path: page.path,
|
||||
title: page.title,
|
||||
description: page.description,
|
||||
content: page.content
|
||||
}
|
||||
])
|
||||
},
|
||||
deleted() {
|
||||
|
||||
/**
|
||||
* UPDATE
|
||||
*
|
||||
* @param {Object} page Page to update
|
||||
*/
|
||||
async updated(page) {
|
||||
await this.client.indexes.use(this.config.indexName).index([
|
||||
{
|
||||
id: page.hash,
|
||||
locale: page.localeCode,
|
||||
path: page.path,
|
||||
title: page.title,
|
||||
description: page.description,
|
||||
content: page.content
|
||||
}
|
||||
])
|
||||
},
|
||||
renamed() {
|
||||
|
||||
/**
|
||||
* DELETE
|
||||
*
|
||||
* @param {Object} page Page to delete
|
||||
*/
|
||||
async deleted(page) {
|
||||
await this.client.indexes.use(this.config.indexName).index([
|
||||
{
|
||||
'@search.action': 'delete',
|
||||
id: page.hash
|
||||
}
|
||||
])
|
||||
},
|
||||
rebuild() {
|
||||
|
||||
/**
|
||||
* RENAME
|
||||
*
|
||||
* @param {Object} page Page to rename
|
||||
*/
|
||||
async renamed(page) {
|
||||
await this.client.indexes.use(this.config.indexName).index([
|
||||
{
|
||||
'@search.action': 'delete',
|
||||
id: page.sourceHash
|
||||
}
|
||||
])
|
||||
await this.client.indexes.use(this.config.indexName).index([
|
||||
{
|
||||
id: page.destinationHash,
|
||||
locale: page.localeCode,
|
||||
path: page.destinationPath,
|
||||
title: page.title,
|
||||
description: page.description,
|
||||
content: page.content
|
||||
}
|
||||
])
|
||||
},
|
||||
/**
|
||||
* REBUILD INDEX
|
||||
*/
|
||||
async rebuild() {
|
||||
await pipeline(
|
||||
WIKI.models.knex.column({ id: 'hash' }, 'path', { locale: 'localeCode' }, 'title', 'description', 'content').select().from('pages').where({
|
||||
isPublished: true,
|
||||
isPrivate: false
|
||||
}).stream(),
|
||||
this.client.indexes.use(this.config.indexName).createIndexingStream()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@@ -4,4 +4,5 @@ description: Default basic database-based search engine.
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/database.svg
|
||||
website: https://www.requarks.io/
|
||||
isAvailable: true
|
||||
props: {}
|
||||
|
@@ -4,6 +4,7 @@ description: Elasticsearch is a distributed, RESTful search and analytics engine
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/elasticsearch.svg
|
||||
website: https://www.elastic.co/products/elasticsearch
|
||||
isAvailable: false
|
||||
props:
|
||||
apiVersion:
|
||||
type: String
|
||||
|
@@ -4,4 +4,5 @@ description: High performance full-text search engine with SQL and JSON support.
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/manticore.svg
|
||||
website: https://manticoresearch.com/
|
||||
isAvailable: false
|
||||
props: {}
|
||||
|
@@ -4,6 +4,7 @@ description: Advanced PostgreSQL-based search engine.
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/postgresql.svg
|
||||
website: https://www.requarks.io/
|
||||
isAvailable: true
|
||||
props:
|
||||
dictLanguage:
|
||||
type: String
|
||||
|
@@ -3,7 +3,9 @@ const tsquery = require('pg-tsquery')()
|
||||
|
||||
module.exports = {
|
||||
async activate() {
|
||||
// not used
|
||||
if (WIKI.config.db.type !== 'postgres') {
|
||||
throw new WIKI.Error.SearchActivationFailed('Must use PostgreSQL database to activate this engine!')
|
||||
}
|
||||
},
|
||||
async deactivate() {
|
||||
// not used
|
||||
@@ -75,7 +77,7 @@ module.exports = {
|
||||
INSERT INTO "pagesVector" (path, locale, title, description, tokens) VALUES (
|
||||
'?', '?', '?', '?', (setweight(to_tsvector('${this.config.dictLanguage}', '?'), 'A') || setweight(to_tsvector('${this.config.dictLanguage}', '?'), 'B') || setweight(to_tsvector('${this.config.dictLanguage}', '?'), 'C'))
|
||||
)
|
||||
`, [page.path, page.locale, page.title, page.description, page.title, page.description, page.content])
|
||||
`, [page.path, page.localeCode, page.title, page.description, page.title, page.description, page.content])
|
||||
},
|
||||
/**
|
||||
* UPDATE
|
||||
@@ -85,13 +87,13 @@ module.exports = {
|
||||
async updated(page) {
|
||||
await WIKI.models.knex.raw(`
|
||||
UPDATE "pagesVector" SET
|
||||
title = '?',
|
||||
description = '?',
|
||||
tokens = (setweight(to_tsvector('${this.config.dictLanguage}', '?'), 'A') ||
|
||||
setweight(to_tsvector('${this.config.dictLanguage}', '?'), 'B') ||
|
||||
setweight(to_tsvector('${this.config.dictLanguage}', '?'), 'C'))
|
||||
WHERE path = '?' AND locale = '?' LIMIT 1
|
||||
`, [page.title, page.description, page.title, page.description, page.content, page.path, page.locale])
|
||||
title = ?,
|
||||
description = ?,
|
||||
tokens = (setweight(to_tsvector('${this.config.dictLanguage}', ?), 'A') ||
|
||||
setweight(to_tsvector('${this.config.dictLanguage}', ?), 'B') ||
|
||||
setweight(to_tsvector('${this.config.dictLanguage}', ?), 'C'))
|
||||
WHERE path = ? AND locale = ?
|
||||
`, [page.title, page.description, page.title, page.description, page.content, page.path, page.localeCode])
|
||||
},
|
||||
/**
|
||||
* DELETE
|
||||
@@ -100,7 +102,7 @@ module.exports = {
|
||||
*/
|
||||
async deleted(page) {
|
||||
await WIKI.models.knex('pagesVector').where({
|
||||
locale: page.locale,
|
||||
locale: page.localeCode,
|
||||
path: page.path
|
||||
}).del().limit(1)
|
||||
},
|
||||
@@ -111,12 +113,12 @@ module.exports = {
|
||||
*/
|
||||
async renamed(page) {
|
||||
await WIKI.models.knex('pagesVector').where({
|
||||
locale: page.locale,
|
||||
locale: page.localeCode,
|
||||
path: page.sourcePath
|
||||
}).update({
|
||||
locale: page.locale,
|
||||
locale: page.localeCode,
|
||||
path: page.destinationPath
|
||||
}).limit(1)
|
||||
})
|
||||
},
|
||||
/**
|
||||
* REBUILD INDEX
|
||||
|
@@ -4,6 +4,7 @@ description: Solr is the popular, blazing-fast, open source enterprise search pl
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/solr.svg
|
||||
website: http://lucene.apache.org/solr/
|
||||
isAvailable: false
|
||||
props:
|
||||
host:
|
||||
type: String
|
||||
|
@@ -4,4 +4,5 @@ description: Sphinx is an open source full text search server, designed from the
|
||||
author: requarks.io
|
||||
logo: https://static.requarks.io/logo/sphinx.svg
|
||||
website: http://sphinxsearch.com/
|
||||
isAvailable: false
|
||||
props: {}
|
||||
|
Reference in New Issue
Block a user