You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

109 lines
3.4 KiB

  1. const Model = require('objection').Model
  2. const path = require('path')
  3. const fs = require('fs-extra')
  4. const _ = require('lodash')
  5. const yaml = require('js-yaml')
  6. const commonHelper = require('../helpers/common')
  7. /* global WIKI */
  8. /**
  9. * SearchEngine model
  10. */
  11. module.exports = class SearchEngine extends Model {
  12. static get tableName() { return 'searchEngines' }
  13. static get idColumn() { return 'key' }
  14. static get jsonSchema () {
  15. return {
  16. type: 'object',
  17. required: ['key', 'isEnabled'],
  18. properties: {
  19. key: {type: 'string'},
  20. isEnabled: {type: 'boolean'},
  21. level: {type: 'string'},
  22. config: {type: 'object'}
  23. }
  24. }
  25. }
  26. static async getSearchEngines() {
  27. return WIKI.models.searchEngines.query()
  28. }
  29. static async refreshSearchEnginesFromDisk() {
  30. let trx
  31. try {
  32. const dbSearchEngines = await WIKI.models.searchEngines.query()
  33. // -> Fetch definitions from disk
  34. const searchEnginesDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/search'))
  35. let diskSearchEngines = []
  36. for (let dir of searchEnginesDirs) {
  37. const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/search', dir, 'definition.yml'), 'utf8')
  38. diskSearchEngines.push(yaml.safeLoad(def))
  39. }
  40. WIKI.data.searchEngines = diskSearchEngines.map(searchEngine => ({
  41. ...searchEngine,
  42. props: commonHelper.parseModuleProps(searchEngine.props)
  43. }))
  44. // -> Insert new searchEngines
  45. let newSearchEngines = []
  46. for (let searchEngine of WIKI.data.searchEngines) {
  47. if (!_.some(dbSearchEngines, ['key', searchEngine.key])) {
  48. newSearchEngines.push({
  49. key: searchEngine.key,
  50. isEnabled: false,
  51. config: _.transform(searchEngine.props, (result, value, key) => {
  52. _.set(result, key, value.default)
  53. return result
  54. }, {})
  55. })
  56. } else {
  57. const searchEngineConfig = _.get(_.find(dbSearchEngines, ['key', searchEngine.key]), 'config', {})
  58. await WIKI.models.searchEngines.query().patch({
  59. config: _.transform(searchEngine.props, (result, value, key) => {
  60. if (!_.has(result, key)) {
  61. _.set(result, key, value.default)
  62. }
  63. return result
  64. }, searchEngineConfig)
  65. }).where('key', searchEngine.key)
  66. }
  67. }
  68. if (newSearchEngines.length > 0) {
  69. trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
  70. for (let searchEngine of newSearchEngines) {
  71. await WIKI.models.searchEngines.query(trx).insert(searchEngine)
  72. }
  73. await trx.commit()
  74. WIKI.logger.info(`Loaded ${newSearchEngines.length} new search engines: [ OK ]`)
  75. } else {
  76. WIKI.logger.info(`No new search engines found: [ SKIPPED ]`)
  77. }
  78. } catch (err) {
  79. WIKI.logger.error(`Failed to scan or load new search engines: [ FAILED ]`)
  80. WIKI.logger.error(err)
  81. if (trx) {
  82. trx.rollback()
  83. }
  84. }
  85. }
  86. static async pageEvent({ event, page }) {
  87. const searchEngines = await WIKI.models.storage.query().where('isEnabled', true)
  88. if (searchEngines && searchEngines.length > 0) {
  89. _.forEach(searchEngines, logger => {
  90. WIKI.queue.job.syncStorage.add({
  91. event,
  92. logger,
  93. page
  94. }, {
  95. removeOnComplete: true
  96. })
  97. })
  98. }
  99. }
  100. }