Browse Source

feat(admin): export tool for full migration / backup (#5294)

* feat: export content utility (wip)

* feat: export navigation + groups + users

* feat: export comments + navigation + pages + pages history + settings

* feat: export assets
pull/5304/head v2.5.282
Nicolas Giard 2 years ago
committed by GitHub
parent
commit
cd33ff0afb
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 721 additions and 5 deletions
  1. 272
      client/components/admin/admin-utilities-export.vue
  2. 7
      client/components/admin/admin-utilities.vue
  3. 1
      client/static/svg/icon-big-parcel.svg
  4. 2
      dev/index.js
  5. 6
      server/core/kernel.js
  6. 2
      server/core/scheduler.js
  7. 380
      server/core/system.js
  8. 41
      server/graph/resolvers/system.js
  9. 15
      server/graph/schemas/system.graphql

272
client/components/admin/admin-utilities-export.vue

@ -0,0 +1,272 @@
<template lang='pug'>
v-card
v-toolbar(flat, color='primary', dark, dense)
.subtitle-1 {{ $t('admin:utilities.exportTitle') }}
v-card-text
.text-center
img.animated.fadeInUp.wait-p1s(src='/_assets/svg/icon-big-parcel.svg')
.body-2 Export to tarball / file system
v-divider.my-4
.body-2 What do you want to export?
v-checkbox(
v-for='choice of entityChoices'
:key='choice.key'
:label='choice.label'
:value='choice.key'
color='deep-orange darken-2'
hide-details
v-model='entities'
)
template(v-slot:label)
div
strong.deep-orange--text.text--darken-2 {{choice.label}}
.text-caption {{choice.hint}}
v-text-field.mt-7(
outlined
label='Target Folder Path'
hint='Either an absolute path or relative to the Wiki.js installation folder, where exported content will be saved to. Note that the folder MUST be empty!'
persistent-hint
v-model='filePath'
)
v-alert.mt-3(color='deep-orange', outlined, icon='mdi-alert', prominent)
.body-2 Depending on your selection, the archive could contain sensitive data such as site configuration keys and hashed user passwords. Ensure the exported archive is treated accordingly.
.body-2 For example, you may want to encrypt the archive if stored for backup purposes.
v-card-chin
v-btn.px-3(depressed, color='deep-orange darken-2', :disabled='entities.length < 1', @click='startExport').ml-0
v-icon(left, color='white') mdi-database-export
span.white--text Start Export
v-dialog(
v-model='isLoading'
persistent
max-width='350'
)
v-card(color='deep-orange darken-2', dark)
v-card-text.pa-10.text-center
self-building-square-spinner.animated.fadeIn(
:animation-duration='4500'
:size='40'
color='#FFF'
style='margin: 0 auto;'
)
.mt-5.body-1.white--text Exporting...
.caption Please wait, this may take a while
v-progress-linear.mt-5(
color='white'
:value='progress'
stream
rounded
:buffer-value='0'
)
v-dialog(
v-model='isSuccess'
persistent
max-width='350'
)
v-card(color='green darken-2', dark)
v-card-text.pa-10.text-center
v-icon(size='60') mdi-check-circle-outline
.my-5.body-1.white--text Export completed
v-card-actions.green.darken-1
v-spacer
v-btn.px-5(
color='white'
outlined
@click='isSuccess = false'
) Close
v-spacer
v-dialog(
v-model='isFailed'
persistent
max-width='800'
)
v-card(color='red darken-2', dark)
v-toolbar(color='red darken-2', dense)
v-icon mdi-alert
.body-2.pl-3 Export failed
v-spacer
v-btn.px-5(
color='white'
text
@click='isFailed = false'
) Close
v-card-text.pa-5.red.darken-4.white--text
span {{errorMessage}}
</template>
<script>
import { SelfBuildingSquareSpinner } from 'epic-spinners'
import gql from 'graphql-tag'
import _get from 'lodash/get'
export default {
components: {
SelfBuildingSquareSpinner
},
data() {
return {
entities: [],
filePath: './data/export',
isLoading: false,
isSuccess: false,
isFailed: false,
errorMessage: '',
progress: 0
}
},
computed: {
entityChoices () {
return [
{
key: 'assets',
label: 'Assets',
hint: 'Media files such as images, documents, etc.'
},
{
key: 'comments',
label: 'Comments',
hint: 'Comments made using the default comment module only.'
},
{
key: 'navigation',
label: 'Navigation',
hint: 'Sidebar links when using Static or Custom Navigation.'
},
{
key: 'pages',
label: 'Pages',
hint: 'Page content, tags and related metadata.'
},
{
key: 'history',
label: 'Pages History',
hint: 'All previous versions of pages and their related metadata.'
},
{
key: 'settings',
label: 'Settings',
hint: 'Site configuration and modules settings.'
},
{
key: 'groups',
label: 'User Groups',
hint: 'Group permissions and page rules.'
},
{
key: 'users',
label: 'Users',
hint: 'Users metadata and their group memberships.'
}
]
}
},
methods: {
async checkProgress () {
try {
const respStatus = await this.$apollo.query({
query: gql`
{
system {
exportStatus {
status
progress
message
startedAt
}
}
}
`,
fetchPolicy: 'network-only'
})
const respStatusObj = _get(respStatus, 'data.system.exportStatus', {})
if (!respStatusObj) {
throw new Error('An unexpected error occured.')
} else {
switch (respStatusObj.status) {
case 'error': {
throw new Error(respStatusObj.message || 'An unexpected error occured.')
}
case 'running': {
this.progress = respStatusObj.progress || 0
window.requestAnimationFrame(() => {
setTimeout(() => {
this.checkProgress()
}, 5000)
})
break
}
case 'success': {
this.isLoading = false
this.isSuccess = true
break
}
default: {
throw new Error('Invalid export status.')
}
}
}
} catch (err) {
this.errorMessage = err.message
this.isLoading = false
this.isFailed = true
}
},
async startExport () {
this.isFailed = false
this.isSuccess = false
this.isLoading = true
this.progress = 0
setTimeout(async () => {
try {
// -> Initiate export
const respExport = await this.$apollo.mutate({
mutation: gql`
mutation (
$entities: [String]!
$path: String!
) {
system {
export (
entities: $entities
path: $path
) {
responseResult {
succeeded
message
}
}
}
}
`,
variables: {
entities: this.entities,
path: this.filePath
}
})
const respExportObj = _get(respExport, 'data.system.export', {})
if (!_get(respExportObj, 'responseResult.succeeded', false)) {
this.errorMessage = _get(respExportObj, 'responseResult.message', 'An unexpected error occurred')
this.isLoading = false
this.isFailed = true
return
}
// -> Check for progress
this.checkProgress()
} catch (err) {
this.$store.commit('pushGraphError', err)
this.isLoading = false
}
}, 1500)
}
}
}
</script>
<style lang='scss'>
</style>

7
client/components/admin/admin-utilities.vue

@ -37,6 +37,7 @@ export default {
UtilityAuth: () => import(/* webpackChunkName: "admin" */ './admin-utilities-auth.vue'), UtilityAuth: () => import(/* webpackChunkName: "admin" */ './admin-utilities-auth.vue'),
UtilityContent: () => import(/* webpackChunkName: "admin" */ './admin-utilities-content.vue'), UtilityContent: () => import(/* webpackChunkName: "admin" */ './admin-utilities-content.vue'),
UtilityCache: () => import(/* webpackChunkName: "admin" */ './admin-utilities-cache.vue'), UtilityCache: () => import(/* webpackChunkName: "admin" */ './admin-utilities-cache.vue'),
UtilityExport: () => import(/* webpackChunkName: "admin" */ './admin-utilities-export.vue'),
UtilityImportv1: () => import(/* webpackChunkName: "admin" */ './admin-utilities-importv1.vue'), UtilityImportv1: () => import(/* webpackChunkName: "admin" */ './admin-utilities-importv1.vue'),
UtilityTelemetry: () => import(/* webpackChunkName: "admin" */ './admin-utilities-telemetry.vue') UtilityTelemetry: () => import(/* webpackChunkName: "admin" */ './admin-utilities-telemetry.vue')
}, },
@ -56,6 +57,12 @@ export default {
i18nKey: 'content', i18nKey: 'content',
isAvailable: true isAvailable: true
}, },
{
key: 'UtilityExport',
icon: 'mdi-database-export',
i18nKey: 'export',
isAvailable: true
},
{ {
key: 'UtilityCache', key: 'UtilityCache',
icon: 'mdi-database-refresh', icon: 'mdi-database-refresh',

1
client/static/svg/icon-big-parcel.svg

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 48 48" width="144px" height="144px"><linearGradient id="rwH3R4FXAjAwf7QMo6soOa" x1="24.523" x2="39.672" y1="7.827" y2="22.933" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#c26715"/><stop offset=".508" stop-color="#b85515"/><stop offset="1" stop-color="#ad3f16"/></linearGradient><path fill="url(#rwH3R4FXAjAwf7QMo6soOa)" d="M42,17H15V6h26c0.552,0,1,0.448,1,1V17z"/><linearGradient id="rwH3R4FXAjAwf7QMo6soOb" x1="7.292" x2="27.973" y1="1.98" y2="18.107" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#eba84b"/><stop offset="1" stop-color="#d97218"/></linearGradient><path fill="url(#rwH3R4FXAjAwf7QMo6soOb)" d="M32,17H7c-0.552,0-1-0.448-1-1V7c0-0.552,0.448-1,1-1h25c0.552,0,1,0.448,1,1v9 C33,16.552,32.552,17,32,17z"/><path d="M42,14H6v2c0,0.552,0.448,1,1,1h8h17h10V14z" opacity=".05"/><path d="M42,14.5H6V16c0,0.552,0.448,1,1,1h8h17h10V14.5z" opacity=".07"/><linearGradient id="rwH3R4FXAjAwf7QMo6soOc" x1="27.534" x2="46.45" y1="492.536" y2="512.013" gradientTransform="translate(0 -474)" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#eba600"/><stop offset="1" stop-color="#c28200"/></linearGradient><path fill="url(#rwH3R4FXAjAwf7QMo6soOc)" d="M42,42H31V15h12c0.552,0,1,0.448,1,1v24C44,41.105,43.105,42,42,42z"/><linearGradient id="rwH3R4FXAjAwf7QMo6soOd" x1="5.418" x2="31.69" y1="488.435" y2="515.487" gradientTransform="translate(0 -474)" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#ffd869"/><stop offset="1" stop-color="#fec52b"/></linearGradient><path fill="url(#rwH3R4FXAjAwf7QMo6soOd)" d="M31,42H6c-1.105,0-2-0.895-2-2V16c0-0.552,0.448-1,1-1h28v25C33,41.105,32.105,42,31,42z"/><linearGradient id="rwH3R4FXAjAwf7QMo6soOe" x1="17.154" x2="17.154" y1="494.74" y2="463.029" gradientTransform="translate(0 -474)" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#eba600"/><stop offset="1" stop-color="#c28200"/></linearGradient><path fill="url(#rwH3R4FXAjAwf7QMo6soOe)" d="M33,15H4.618c-0.379,0-0.725,0.214-0.894,0.553l-2.362,4.724C1.196,20.609,1.437,21,1.809,21 h27.573c0.379,0,0.725-0.214,0.894-0.553L33,15z"/><linearGradient id="rwH3R4FXAjAwf7QMo6soOf" x1="39.846" x2="39.846" y1="494.729" y2="490.572" gradientTransform="translate(0 -474)" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#ffd869"/><stop offset="1" stop-color="#fec52b"/></linearGradient><path fill="url(#rwH3R4FXAjAwf7QMo6soOf)" d="M33,15h10.382c0.379,0,0.725,0.214,0.894,0.553l2.362,4.724 C46.804,20.609,46.563,21,46.191,21h-9.573c-0.379,0-0.725-0.214-0.894-0.553L33,15z"/></svg>

2
dev/index.js

@ -60,7 +60,7 @@ const init = {
}, },
async reload() { async reload() {
console.warn(chalk.yellow('--- Gracefully stopping server...')) console.warn(chalk.yellow('--- Gracefully stopping server...'))
await global.WIKI.kernel.shutdown()
await global.WIKI.kernel.shutdown(true)
console.warn(chalk.yellow('--- Purging node modules cache...')) console.warn(chalk.yellow('--- Purging node modules cache...'))

6
server/core/kernel.js

@ -106,7 +106,7 @@ module.exports = {
/** /**
* Graceful shutdown * Graceful shutdown
*/ */
async shutdown () {
async shutdown (devMode = false) {
if (WIKI.servers) { if (WIKI.servers) {
await WIKI.servers.stopServers() await WIKI.servers.stopServers()
} }
@ -122,6 +122,8 @@ module.exports = {
if (WIKI.asar) { if (WIKI.asar) {
await WIKI.asar.unload() await WIKI.asar.unload()
} }
process.exit(0)
if (!devMode) {
process.exit(0)
}
} }
} }

2
server/core/scheduler.js

@ -60,7 +60,7 @@ class Job {
cwd: WIKI.ROOTPATH, cwd: WIKI.ROOTPATH,
stdio: ['inherit', 'inherit', 'pipe', 'ipc'] stdio: ['inherit', 'inherit', 'pipe', 'ipc']
}) })
const stderr = [];
const stderr = []
proc.stderr.on('data', chunk => stderr.push(chunk)) proc.stderr.on('data', chunk => stderr.push(chunk))
this.finished = new Promise((resolve, reject) => { this.finished = new Promise((resolve, reject) => {
proc.on('exit', (code, signal) => { proc.on('exit', (code, signal) => {

380
server/core/system.js

@ -3,6 +3,9 @@ const cfgHelper = require('../helpers/config')
const Promise = require('bluebird') const Promise = require('bluebird')
const fs = require('fs-extra') const fs = require('fs-extra')
const path = require('path') const path = require('path')
const zlib = require('zlib')
const stream = require('stream')
const pipeline = Promise.promisify(stream.pipeline)
/* global WIKI */ /* global WIKI */
@ -14,6 +17,12 @@ module.exports = {
minimumVersionRequired: '2.0.0-beta.0', minimumVersionRequired: '2.0.0-beta.0',
minimumNodeRequired: '10.12.0' minimumNodeRequired: '10.12.0'
}, },
exportStatus: {
status: 'notrunning',
progress: 0,
message: '',
updatedAt: null
},
init() { init() {
// Clear content cache // Clear content cache
fs.emptyDir(path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, 'cache')) fs.emptyDir(path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, 'cache'))
@ -77,5 +86,376 @@ module.exports = {
db.close() db.close()
}) })
}) })
},
/**
* Export Wiki to Disk
*/
async export (opts) {
this.exportStatus.status = 'running'
this.exportStatus.progress = 0
this.exportStatus.message = ''
this.exportStatus.startedAt = new Date()
WIKI.logger.info(`Export started to path ${opts.path}`)
WIKI.logger.info(`Entities to export: ${opts.entities.join(', ')}`)
const progressMultiplier = 1 / opts.entities.length
try {
for (const entity of opts.entities) {
switch (entity) {
// -----------------------------------------
// ASSETS
// -----------------------------------------
case 'assets': {
WIKI.logger.info('Exporting assets...')
const assetFolders = await WIKI.models.assetFolders.getAllPaths()
const assetsCountRaw = await WIKI.models.assets.query().count('* as total').first()
const assetsCount = parseInt(assetsCountRaw.total)
if (assetsCount < 1) {
WIKI.logger.warn('There are no assets to export! Skipping...')
break
}
const assetsProgressMultiplier = progressMultiplier / Math.ceil(assetsCount / 50)
WIKI.logger.info(`Found ${assetsCount} assets to export. Streaming to disk...`)
await pipeline(
WIKI.models.knex.select('filename', 'folderId', 'data').from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
new stream.Transform({
objectMode: true,
transform: async (asset, enc, cb) => {
const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
WIKI.logger.info(`Exporting asset ${filename}...`)
await fs.outputFile(path.join(opts.path, 'assets', filename), asset.data)
this.exportStatus.progress += assetsProgressMultiplier * 100
cb()
}
})
)
WIKI.logger.info('Export: assets saved to disk successfully.')
break
}
// -----------------------------------------
// COMMENTS
// -----------------------------------------
case 'comments': {
WIKI.logger.info('Exporting comments...')
const outputPath = path.join(opts.path, 'comments.json.gz')
const commentsCountRaw = await WIKI.models.comments.query().count('* as total').first()
const commentsCount = parseInt(commentsCountRaw.total)
if (commentsCount < 1) {
WIKI.logger.warn('There are no comments to export! Skipping...')
break
}
const commentsProgressMultiplier = progressMultiplier / Math.ceil(commentsCount / 50)
WIKI.logger.info(`Found ${commentsCount} comments to export. Streaming to file...`)
const rs = stream.Readable({ objectMode: true })
rs._read = () => {}
const fetchCommentsBatch = async (offset) => {
const comments = await WIKI.models.comments.query().offset(offset).limit(50).withGraphJoined({
author: true,
page: true
}).modifyGraph('author', builder => {
builder.select('users.id', 'users.name', 'users.email', 'users.providerKey')
}).modifyGraph('page', builder => {
builder.select('pages.id', 'pages.path', 'pages.localeCode', 'pages.title')
})
if (comments.length > 0) {
for (const cmt of comments) {
rs.push(cmt)
}
fetchCommentsBatch(offset + 50)
} else {
rs.push(null)
}
this.exportStatus.progress += commentsProgressMultiplier * 100
}
fetchCommentsBatch(0)
let marker = 0
await pipeline(
rs,
new stream.Transform({
objectMode: true,
transform (chunk, encoding, callback) {
marker++
let outputStr = marker === 1 ? '[\n' : ''
outputStr += JSON.stringify(chunk, null, 2)
if (marker < commentsCount) {
outputStr += ',\n'
}
callback(null, outputStr)
},
flush (callback) {
callback(null, '\n]')
}
}),
zlib.createGzip(),
fs.createWriteStream(outputPath)
)
WIKI.logger.info('Export: comments.json.gz created successfully.')
break
}
// -----------------------------------------
// GROUPS
// -----------------------------------------
case 'groups': {
WIKI.logger.info('Exporting groups...')
const outputPath = path.join(opts.path, 'groups.json')
const groups = await WIKI.models.groups.query()
await fs.outputJSON(outputPath, groups, { spaces: 2 })
WIKI.logger.info('Export: groups.json created successfully.')
this.exportStatus.progress += progressMultiplier * 100
break
}
// -----------------------------------------
// HISTORY
// -----------------------------------------
case 'history': {
WIKI.logger.info('Exporting pages history...')
const outputPath = path.join(opts.path, 'pages-history.json.gz')
const pagesCountRaw = await WIKI.models.pageHistory.query().count('* as total').first()
const pagesCount = parseInt(pagesCountRaw.total)
if (pagesCount < 1) {
WIKI.logger.warn('There are no pages history to export! Skipping...')
break
}
const pagesProgressMultiplier = progressMultiplier / Math.ceil(pagesCount / 10)
WIKI.logger.info(`Found ${pagesCount} pages history to export. Streaming to file...`)
const rs = stream.Readable({ objectMode: true })
rs._read = () => {}
const fetchPagesBatch = async (offset) => {
const pages = await WIKI.models.pageHistory.query().offset(offset).limit(10).withGraphJoined({
author: true,
page: true,
tags: true
}).modifyGraph('author', builder => {
builder.select('users.id', 'users.name', 'users.email', 'users.providerKey')
}).modifyGraph('page', builder => {
builder.select('pages.id', 'pages.title', 'pages.path', 'pages.localeCode')
}).modifyGraph('tags', builder => {
builder.select('tags.tag', 'tags.title')
})
if (pages.length > 0) {
for (const page of pages) {
rs.push(page)
}
fetchPagesBatch(offset + 10)
} else {
rs.push(null)
}
this.exportStatus.progress += pagesProgressMultiplier * 100
}
fetchPagesBatch(0)
let marker = 0
await pipeline(
rs,
new stream.Transform({
objectMode: true,
transform (chunk, encoding, callback) {
marker++
let outputStr = marker === 1 ? '[\n' : ''
outputStr += JSON.stringify(chunk, null, 2)
if (marker < pagesCount) {
outputStr += ',\n'
}
callback(null, outputStr)
},
flush (callback) {
callback(null, '\n]')
}
}),
zlib.createGzip(),
fs.createWriteStream(outputPath)
)
WIKI.logger.info('Export: pages-history.json.gz created successfully.')
break
}
// -----------------------------------------
// NAVIGATION
// -----------------------------------------
case 'navigation': {
WIKI.logger.info('Exporting navigation...')
const outputPath = path.join(opts.path, 'navigation.json')
const navigationRaw = await WIKI.models.navigation.query()
const navigation = navigationRaw.reduce((obj, cur) => {
obj[cur.key] = cur.config
return obj
}, {})
await fs.outputJSON(outputPath, navigation, { spaces: 2 })
WIKI.logger.info('Export: navigation.json created successfully.')
this.exportStatus.progress += progressMultiplier * 100
break
}
// -----------------------------------------
// PAGES
// -----------------------------------------
case 'pages': {
WIKI.logger.info('Exporting pages...')
const outputPath = path.join(opts.path, 'pages.json.gz')
const pagesCountRaw = await WIKI.models.pages.query().count('* as total').first()
const pagesCount = parseInt(pagesCountRaw.total)
if (pagesCount < 1) {
WIKI.logger.warn('There are no pages to export! Skipping...')
break
}
const pagesProgressMultiplier = progressMultiplier / Math.ceil(pagesCount / 10)
WIKI.logger.info(`Found ${pagesCount} pages to export. Streaming to file...`)
const rs = stream.Readable({ objectMode: true })
rs._read = () => {}
const fetchPagesBatch = async (offset) => {
const pages = await WIKI.models.pages.query().offset(offset).limit(10).withGraphJoined({
author: true,
creator: true,
tags: true
}).modifyGraph('author', builder => {
builder.select('users.id', 'users.name', 'users.email', 'users.providerKey')
}).modifyGraph('creator', builder => {
builder.select('users.id', 'users.name', 'users.email', 'users.providerKey')
}).modifyGraph('tags', builder => {
builder.select('tags.tag', 'tags.title')
})
if (pages.length > 0) {
for (const page of pages) {
rs.push(page)
}
fetchPagesBatch(offset + 10)
} else {
rs.push(null)
}
this.exportStatus.progress += pagesProgressMultiplier * 100
}
fetchPagesBatch(0)
let marker = 0
await pipeline(
rs,
new stream.Transform({
objectMode: true,
transform (chunk, encoding, callback) {
marker++
let outputStr = marker === 1 ? '[\n' : ''
outputStr += JSON.stringify(chunk, null, 2)
if (marker < pagesCount) {
outputStr += ',\n'
}
callback(null, outputStr)
},
flush (callback) {
callback(null, '\n]')
}
}),
zlib.createGzip(),
fs.createWriteStream(outputPath)
)
WIKI.logger.info('Export: pages.json.gz created successfully.')
break
}
// -----------------------------------------
// SETTINGS
// -----------------------------------------
case 'settings': {
WIKI.logger.info('Exporting settings...')
const outputPath = path.join(opts.path, 'settings.json')
const config = {
...WIKI.config,
modules: {
analytics: await WIKI.models.analytics.query(),
authentication: (await WIKI.models.authentication.query()).map(a => ({
...a,
domainWhitelist: _.get(a, 'domainWhitelist.v', []),
autoEnrollGroups: _.get(a, 'autoEnrollGroups.v', [])
})),
commentProviders: await WIKI.models.commentProviders.query(),
renderers: await WIKI.models.renderers.query(),
searchEngines: await WIKI.models.searchEngines.query(),
storage: await WIKI.models.storage.query()
},
apiKeys: await WIKI.models.apiKeys.query().where('isRevoked', false)
}
await fs.outputJSON(outputPath, config, { spaces: 2 })
WIKI.logger.info('Export: settings.json created successfully.')
this.exportStatus.progress += progressMultiplier * 100
break
}
// -----------------------------------------
// USERS
// -----------------------------------------
case 'users': {
WIKI.logger.info('Exporting users...')
const outputPath = path.join(opts.path, 'users.json.gz')
const usersCountRaw = await WIKI.models.users.query().count('* as total').first()
const usersCount = parseInt(usersCountRaw.total)
if (usersCount < 1) {
WIKI.logger.warn('There are no users to export! Skipping...')
break
}
const usersProgressMultiplier = progressMultiplier / Math.ceil(usersCount / 50)
WIKI.logger.info(`Found ${usersCount} users to export. Streaming to file...`)
const rs = stream.Readable({ objectMode: true })
rs._read = () => {}
const fetchUsersBatch = async (offset) => {
const users = await WIKI.models.users.query().offset(offset).limit(50).withGraphJoined({
groups: true,
provider: true
}).modifyGraph('groups', builder => {
builder.select('groups.id', 'groups.name')
}).modifyGraph('provider', builder => {
builder.select('authentication.key', 'authentication.strategyKey', 'authentication.displayName')
})
if (users.length > 0) {
for (const usr of users) {
rs.push(usr)
}
fetchUsersBatch(offset + 50)
} else {
rs.push(null)
}
this.exportStatus.progress += usersProgressMultiplier * 100
}
fetchUsersBatch(0)
let marker = 0
await pipeline(
rs,
new stream.Transform({
objectMode: true,
transform (chunk, encoding, callback) {
marker++
let outputStr = marker === 1 ? '[\n' : ''
outputStr += JSON.stringify(chunk, null, 2)
if (marker < usersCount) {
outputStr += ',\n'
}
callback(null, outputStr)
},
flush (callback) {
callback(null, '\n]')
}
}),
zlib.createGzip(),
fs.createWriteStream(outputPath)
)
WIKI.logger.info('Export: users.json.gz created successfully.')
break
}
}
}
this.exportStatus.status = 'success'
this.exportStatus.progress = 100
} catch (err) {
this.exportStatus.status = 'error'
this.exportStatus.message = err.message
}
} }
} }

41
server/graph/resolvers/system.js

@ -41,6 +41,14 @@ module.exports = {
ext.isCompatible = await WIKI.extensions.ext[ext.key].isCompatible() ext.isCompatible = await WIKI.extensions.ext[ext.key].isCompatible()
} }
return exts return exts
},
async exportStatus () {
return {
status: WIKI.system.exportStatus.status,
progress: Math.ceil(WIKI.system.exportStatus.progress),
message: WIKI.system.exportStatus.message,
startedAt: WIKI.system.exportStatus.startedAt
}
} }
}, },
SystemMutation: { SystemMutation: {
@ -260,6 +268,39 @@ module.exports = {
} catch (err) { } catch (err) {
return graphHelper.generateError(err) return graphHelper.generateError(err)
} }
},
/**
* Export Wiki to Disk
*/
async export (obj, args, context) {
try {
const desiredPath = path.resolve(WIKI.ROOTPATH, args.path)
// -> Check if export process is already running
if (WIKI.system.exportStatus.status === 'running') {
throw new Error('Another export is already running.')
}
// -> Validate entities
if (args.entities.length < 1) {
throw new Error('Must specify at least 1 entity to export.')
}
// -> Check target path
await fs.ensureDir(desiredPath)
const existingFiles = await fs.readdir(desiredPath)
if (existingFiles.length) {
throw new Error('Target directory must be empty!')
}
// -> Start export
WIKI.system.export({
entities: args.entities,
path: desiredPath
})
return {
responseResult: graphHelper.generateSuccess('Export started successfully.')
}
} catch (err) {
return graphHelper.generateError(err)
}
} }
}, },
SystemInfo: { SystemInfo: {

15
server/graph/schemas/system.graphql

@ -17,7 +17,8 @@ extend type Mutation {
type SystemQuery { type SystemQuery {
flags: [SystemFlag] @auth(requires: ["manage:system"]) flags: [SystemFlag] @auth(requires: ["manage:system"])
info: SystemInfo info: SystemInfo
extensions: [SystemExtension]! @auth(requires: ["manage:system"])
extensions: [SystemExtension] @auth(requires: ["manage:system"])
exportStatus: SystemExportStatus @auth(requires: ["manage:system"])
} }
# ----------------------------------------------- # -----------------------------------------------
@ -47,6 +48,11 @@ type SystemMutation {
): DefaultResponse @auth(requires: ["manage:system"]) ): DefaultResponse @auth(requires: ["manage:system"])
renewHTTPSCertificate: DefaultResponse @auth(requires: ["manage:system"]) renewHTTPSCertificate: DefaultResponse @auth(requires: ["manage:system"])
export(
entities: [String]!
path: String!
): DefaultResponse @auth(requires: ["manage:system"])
} }
# ----------------------------------------------- # -----------------------------------------------
@ -121,3 +127,10 @@ type SystemExtension {
isInstalled: Boolean! isInstalled: Boolean!
isCompatible: Boolean! isCompatible: Boolean!
} }
type SystemExportStatus {
status: String
progress: Int
message: String
startedAt: Date
}
Loading…
Cancel
Save