refactor: fix models + storage modules

pull/5698/head
NGPixel 3 years ago
parent 027b1614ff
commit edb529378e
No known key found for this signature in database
GPG Key ID: 8FDA2F1757F60D63

@ -1,10 +1,12 @@
{ {
"folders": [ "folders": [
{ {
"path": "ux" "name": "server",
"path": "server"
}, },
{ {
"path": "server" "name": "ux",
"path": "ux"
} }
], ],
"settings": { "settings": {
@ -13,4 +15,4 @@
"src/i18n/locales" "src/i18n/locales"
] ]
} }
} }

@ -7,7 +7,8 @@
"dev": true, "dev": true,
"scripts": { "scripts": {
"start": "node server", "start": "node server",
"dev": "node dev", "dev": "nodemon server",
"dev-legacy": "node dev",
"test": "eslint --format codeframe --ext .js,.vue . && pug-lint server/views && jest", "test": "eslint --format codeframe --ext .js,.vue . && pug-lint server/views && jest",
"cypress:open": "cypress open" "cypress:open": "cypress open"
}, },
@ -119,12 +120,12 @@
"moment-timezone": "0.5.31", "moment-timezone": "0.5.31",
"ms": "2.1.3", "ms": "2.1.3",
"multer": "1.4.4", "multer": "1.4.4",
"nanoid": "3.2.0", "nanoid": "3.3.2",
"node-2fa": "1.1.2", "node-2fa": "1.1.2",
"node-cache": "5.1.2", "node-cache": "5.1.2",
"nodemailer": "6.7.3", "nodemailer": "6.7.3",
"objection": "3.0.1", "objection": "3.0.1",
"passport": "0.4.1", "passport": "0.5.2",
"passport-auth0": "1.4.2", "passport-auth0": "1.4.2",
"passport-azure-ad": "4.3.1", "passport-azure-ad": "4.3.1",
"passport-cas": "0.1.1", "passport-cas": "0.1.1",
@ -135,13 +136,13 @@
"passport-gitlab2": "5.0.0", "passport-gitlab2": "5.0.0",
"passport-google-oauth20": "2.0.0", "passport-google-oauth20": "2.0.0",
"passport-jwt": "4.0.0", "passport-jwt": "4.0.0",
"passport-ldapauth": "2.1.4", "passport-ldapauth": "3.0.1",
"passport-local": "1.0.0", "passport-local": "1.0.0",
"passport-microsoft": "0.1.0", "passport-microsoft": "0.1.0",
"passport-oauth2": "1.6.1", "passport-oauth2": "1.6.1",
"passport-okta-oauth": "0.0.1", "passport-okta-oauth": "0.0.1",
"passport-openidconnect": "0.0.2", "passport-openidconnect": "0.1.1",
"passport-saml": "1.3.5", "passport-saml": "3.2.1",
"passport-slack-oauth2": "1.1.1", "passport-slack-oauth2": "1.1.1",
"passport-twitch-oauth": "1.0.0", "passport-twitch-oauth": "1.0.0",
"pem-jwk": "2.0.0", "pem-jwk": "2.0.0",
@ -241,7 +242,7 @@
"filepond-plugin-file-validate-type": "1.2.6", "filepond-plugin-file-validate-type": "1.2.6",
"filesize.js": "2.0.0", "filesize.js": "2.0.0",
"graphql-persisted-document-loader": "2.0.0", "graphql-persisted-document-loader": "2.0.0",
"graphql-tag": "2.11.0", "graphql-tag": "2.12.6",
"hammerjs": "2.0.8", "hammerjs": "2.0.8",
"html-webpack-plugin": "4.5.0", "html-webpack-plugin": "4.5.0",
"html-webpack-pug-plugin": "2.0.0", "html-webpack-pug-plugin": "2.0.0",
@ -256,6 +257,7 @@
"mini-css-extract-plugin": "0.11.3", "mini-css-extract-plugin": "0.11.3",
"moment-duration-format": "2.3.2", "moment-duration-format": "2.3.2",
"moment-timezone-data-webpack-plugin": "1.3.0", "moment-timezone-data-webpack-plugin": "1.3.0",
"nodemon": "2.0.15",
"offline-plugin": "5.0.7", "offline-plugin": "5.0.7",
"optimize-css-assets-webpack-plugin": "5.0.4", "optimize-css-assets-webpack-plugin": "5.0.4",
"pako": "1.0.11", "pako": "1.0.11",
@ -265,7 +267,7 @@
"postcss-import": "12.0.1", "postcss-import": "12.0.1",
"postcss-loader": "3.0.0", "postcss-loader": "3.0.0",
"postcss-preset-env": "6.7.0", "postcss-preset-env": "6.7.0",
"postcss-selector-parser": "6.0.9", "postcss-selector-parser": "6.0.10",
"prismjs": "1.22.0", "prismjs": "1.22.0",
"pug-lint": "2.6.0", "pug-lint": "2.6.0",
"pug-loader": "2.4.0", "pug-loader": "2.4.0",
@ -371,5 +373,14 @@
"type": "opencollective", "type": "opencollective",
"url": "https://opencollective.com/wikijs", "url": "https://opencollective.com/wikijs",
"logo": "https://opencollective.com/opencollective/logo.txt" "logo": "https://opencollective.com/opencollective/logo.txt"
},
"nodemonConfig": {
"env": {
"NODE_ENV": "development"
},
"ext": "js,json,graphql,gql",
"watch": [
"server/"
]
} }
} }

@ -66,7 +66,7 @@ module.exports = {
// Load JWT // Load JWT
passport.use('jwt', new passportJWT.Strategy({ passport.use('jwt', new passportJWT.Strategy({
jwtFromRequest: securityHelper.extractJWT, jwtFromRequest: securityHelper.extractJWT,
secretOrKey: WIKI.config.certs.public, secretOrKey: WIKI.config.auth.certs.public,
audience: WIKI.config.auth.audience, audience: WIKI.config.auth.audience,
issuer: 'urn:wiki.js', issuer: 'urn:wiki.js',
algorithms: ['RS256'] algorithms: ['RS256']
@ -76,13 +76,13 @@ module.exports = {
// Load enabled strategies // Load enabled strategies
const enabledStrategies = await WIKI.models.authentication.getStrategies() const enabledStrategies = await WIKI.models.authentication.getStrategies()
for (let idx in enabledStrategies) { for (const idx in enabledStrategies) {
const stg = enabledStrategies[idx] const stg = enabledStrategies[idx]
try { try {
const strategy = require(`../modules/authentication/${stg.strategyKey}/authentication.js`) const strategy = require(`../modules/authentication/${stg.module}/authentication.js`)
stg.config.callbackURL = `${WIKI.config.host}/login/${stg.key}/callback` stg.config.callbackURL = `${WIKI.config.host}/login/${stg.id}/callback`
stg.config.key = stg.key; stg.config.key = stg.id
strategy.init(passport, stg.config) strategy.init(passport, stg.config)
strategy.config = stg.config strategy.config = stg.config
@ -92,7 +92,7 @@ module.exports = {
} }
WIKI.logger.info(`Authentication Strategy ${stg.displayName}: [ OK ]`) WIKI.logger.info(`Authentication Strategy ${stg.displayName}: [ OK ]`)
} catch (err) { } catch (err) {
WIKI.logger.error(`Authentication Strategy ${stg.displayName} (${stg.key}): [ FAILED ]`) WIKI.logger.error(`Authentication Strategy ${stg.displayName} (${stg.id}): [ FAILED ]`)
WIKI.logger.error(err) WIKI.logger.error(err)
} }
} }

@ -67,7 +67,6 @@ module.exports = {
await WIKI.models.analytics.refreshProvidersFromDisk() await WIKI.models.analytics.refreshProvidersFromDisk()
await WIKI.models.authentication.refreshStrategiesFromDisk() await WIKI.models.authentication.refreshStrategiesFromDisk()
await WIKI.models.commentProviders.refreshProvidersFromDisk() await WIKI.models.commentProviders.refreshProvidersFromDisk()
await WIKI.models.editors.refreshEditorsFromDisk()
await WIKI.models.renderers.refreshRenderersFromDisk() await WIKI.models.renderers.refreshRenderersFromDisk()
await WIKI.models.storage.refreshTargetsFromDisk() await WIKI.models.storage.refreshTargetsFromDisk()
@ -76,7 +75,7 @@ module.exports = {
await WIKI.auth.activateStrategies() await WIKI.auth.activateStrategies()
await WIKI.models.commentProviders.initProvider() await WIKI.models.commentProviders.initProvider()
await WIKI.models.storage.initTargets() await WIKI.models.storage.initTargets()
WIKI.scheduler.start() // WIKI.scheduler.start()
await WIKI.models.subscribeToNotifications() await WIKI.models.subscribeToNotifications()
}, },

@ -4,6 +4,7 @@ const https = require('https')
const { ApolloServer } = require('apollo-server-express') const { ApolloServer } = require('apollo-server-express')
const Promise = require('bluebird') const Promise = require('bluebird')
const _ = require('lodash') const _ = require('lodash')
const { ApolloServerPluginLandingPageGraphQLPlayground, ApolloServerPluginLandingPageProductionDefault } = require('apollo-server-core')
/* global WIKI */ /* global WIKI */
@ -123,6 +124,11 @@ module.exports = {
uploads: false, uploads: false,
context: ({ req, res }) => ({ req, res }), context: ({ req, res }) => ({ req, res }),
plugins: [ plugins: [
process.env.NODE_ENV === 'development' ? ApolloServerPluginLandingPageGraphQLPlayground({
footer: false
}) : ApolloServerPluginLandingPageProductionDefault({
footer: false
})
// ApolloServerPluginDrainHttpServer({ httpServer: this.servers.http }) // ApolloServerPluginDrainHttpServer({ httpServer: this.servers.http })
// ...(this.servers.https && ApolloServerPluginDrainHttpServer({ httpServer: this.servers.https })) // ...(this.servers.https && ApolloServerPluginDrainHttpServer({ httpServer: this.servers.https }))
] ]

@ -71,6 +71,12 @@ exports.up = async knex => {
table.jsonb('autoEnrollGroups').notNullable().defaultTo('[]') table.jsonb('autoEnrollGroups').notNullable().defaultTo('[]')
table.jsonb('hideOnSites').notNullable().defaultTo('[]') table.jsonb('hideOnSites').notNullable().defaultTo('[]')
}) })
.createTable('commentProviders', table => {
table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
table.string('module').notNullable()
table.boolean('isEnabled').notNullable().defaultTo(false)
table.json('config').notNullable()
})
// COMMENTS ---------------------------- // COMMENTS ----------------------------
.createTable('comments', table => { .createTable('comments', table => {
table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()')) table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
@ -140,6 +146,7 @@ exports.up = async knex => {
table.timestamp('publishEndDate') table.timestamp('publishEndDate')
table.string('action').defaultTo('updated') table.string('action').defaultTo('updated')
table.text('content') table.text('content')
table.string('editor').notNullable()
table.string('contentType').notNullable() table.string('contentType').notNullable()
table.jsonb('extra').notNullable().defaultTo('{}') table.jsonb('extra').notNullable().defaultTo('{}')
table.jsonb('tags').defaultTo('[]') table.jsonb('tags').defaultTo('[]')
@ -166,6 +173,7 @@ exports.up = async knex => {
table.text('content') table.text('content')
table.text('render') table.text('render')
table.jsonb('toc') table.jsonb('toc')
table.string('editor').notNullable()
table.string('contentType').notNullable() table.string('contentType').notNullable()
table.jsonb('extra').notNullable().defaultTo('{}') table.jsonb('extra').notNullable().defaultTo('{}')
table.timestamp('createdAt').notNullable().defaultTo(knex.fn.now()) table.timestamp('createdAt').notNullable().defaultTo(knex.fn.now())
@ -279,6 +287,9 @@ exports.up = async knex => {
.table('assetFolders', table => { .table('assetFolders', table => {
table.uuid('parentId').references('id').inTable('assetFolders').index() table.uuid('parentId').references('id').inTable('assetFolders').index()
}) })
.table('commentProviders', table => {
table.uuid('siteId').notNullable().references('id').inTable('sites')
})
.table('comments', table => { .table('comments', table => {
table.uuid('pageId').notNullable().references('id').inTable('pages').index() table.uuid('pageId').notNullable().references('id').inTable('pages').index()
table.uuid('authorId').notNullable().references('id').inTable('users').index() table.uuid('authorId').notNullable().references('id').inTable('users').index()
@ -306,6 +317,9 @@ exports.up = async knex => {
table.uuid('pageId').notNullable().references('id').inTable('pages').onDelete('CASCADE') table.uuid('pageId').notNullable().references('id').inTable('pages').onDelete('CASCADE')
table.string('localeCode', 5).references('code').inTable('locales') table.string('localeCode', 5).references('code').inTable('locales')
}) })
.table('renderers', table => {
table.uuid('siteId').notNullable().references('id').inTable('sites')
})
.table('storage', table => { .table('storage', table => {
table.uuid('siteId').notNullable().references('id').inTable('sites') table.uuid('siteId').notNullable().references('id').inTable('sites')
}) })
@ -324,9 +338,50 @@ exports.up = async knex => {
// DEFAULT DATA // DEFAULT DATA
// ===================================== // =====================================
// -> GENERATE IDS
const groupAdminId = uuid()
const groupGuestId = '10000000-0000-4000-0000-000000000001'
const siteId = uuid()
const authModuleId = uuid()
const userAdminId = uuid()
const userGuestId = uuid()
// -> SYSTEM CONFIG // -> SYSTEM CONFIG
WIKI.logger.info('Generating certificates...')
const secret = crypto.randomBytes(32).toString('hex')
const certs = crypto.generateKeyPairSync('rsa', {
modulusLength: 2048,
publicKeyEncoding: {
type: 'pkcs1',
format: 'pem'
},
privateKeyEncoding: {
type: 'pkcs1',
format: 'pem',
cipher: 'aes-256-cbc',
passphrase: secret
}
})
await knex('settings').insert([ await knex('settings').insert([
{
key: 'auth',
value: {
audience: 'urn:wiki.js',
tokenExpiration: '30m',
tokenRenewal: '14d',
certs: {
jwk: pem2jwk(certs.publicKey),
public: certs.publicKey,
private: certs.privateKey
},
secret,
rootAdminUserId: userAdminId,
guestUserId: userGuestId
}
},
{ {
key: 'mail', key: 'mail',
value: { value: {
@ -367,12 +422,6 @@ exports.up = async knex => {
uploadScanSVG: true uploadScanSVG: true
} }
}, },
{
key: 'system',
value: {
sessionSecret: crypto.randomBytes(32).toString('hex')
}
},
{ {
key: 'update', key: 'update',
value: { value: {
@ -393,39 +442,11 @@ exports.up = async knex => {
// -> DEFAULT SITE // -> DEFAULT SITE
WIKI.logger.info('Generating certificates...')
const secret = crypto.randomBytes(32).toString('hex')
const certs = crypto.generateKeyPairSync('rsa', {
modulusLength: 2048,
publicKeyEncoding: {
type: 'pkcs1',
format: 'pem'
},
privateKeyEncoding: {
type: 'pkcs1',
format: 'pem',
cipher: 'aes-256-cbc',
passphrase: secret
}
})
const siteId = uuid()
await knex('sites').insert({ await knex('sites').insert({
id: siteId, id: siteId,
hostname: '*', hostname: '*',
isEnabled: true, isEnabled: true,
config: { config: {
auth: {
audience: 'urn:wiki.js',
tokenExpiration: '30m',
tokenRenewal: '14d',
certs: {
jwk: pem2jwk(certs.publicKey),
public: certs.publicKey,
private: certs.privateKey
},
secret
},
title: 'My Wiki Site', title: 'My Wiki Site',
description: '', description: '',
company: '', company: '',
@ -471,8 +492,6 @@ exports.up = async knex => {
// -> DEFAULT GROUPS // -> DEFAULT GROUPS
const groupAdminId = uuid()
const groupGuestId = '10000000-0000-4000-0000-000000000001'
await knex('groups').insert([ await knex('groups').insert([
{ {
id: groupAdminId, id: groupAdminId,
@ -503,7 +522,6 @@ exports.up = async knex => {
// -> AUTHENTICATION MODULE // -> AUTHENTICATION MODULE
const authModuleId = uuid()
await knex('authentication').insert({ await knex('authentication').insert({
id: authModuleId, id: authModuleId,
module: 'local', module: 'local',
@ -513,8 +531,6 @@ exports.up = async knex => {
// -> USERS // -> USERS
const userAdminId = uuid()
const userGuestId = uuid()
await knex('users').insert([ await knex('users').insert([
{ {
id: userAdminId, id: userAdminId,

@ -22,7 +22,11 @@ let WIKI = {
Error: require('./helpers/error'), Error: require('./helpers/error'),
configSvc: require('./core/config'), configSvc: require('./core/config'),
kernel: require('./core/kernel'), kernel: require('./core/kernel'),
startedAt: DateTime.utc() startedAt: DateTime.utc(),
storage: {
defs: [],
modules: []
}
} }
global.WIKI = WIKI global.WIKI = WIKI

@ -77,7 +77,7 @@ module.exports = async () => {
app.use(cookieParser()) app.use(cookieParser())
app.use(session({ app.use(session({
secret: WIKI.config.system.sessionSecret, secret: WIKI.config.auth.secret,
resave: false, resave: false,
saveUninitialized: false, saveUninitialized: false,
store: new KnexSessionStore({ store: new KnexSessionStore({

@ -12,7 +12,6 @@ const commonHelper = require('../helpers/common')
*/ */
module.exports = class Analytics extends Model { module.exports = class Analytics extends Model {
static get tableName() { return 'analytics' } static get tableName() { return 'analytics' }
static get idColumn() { return 'key' }
static get jsonSchema () { static get jsonSchema () {
return { return {
@ -52,7 +51,7 @@ module.exports = class Analytics extends Model {
WIKI.logger.info(`Loaded ${WIKI.data.analytics.length} analytics module definitions: [ OK ]`) WIKI.logger.info(`Loaded ${WIKI.data.analytics.length} analytics module definitions: [ OK ]`)
} catch (err) { } catch (err) {
WIKI.logger.error(`Failed to scan or load new analytics providers: [ FAILED ]`) WIKI.logger.error(`Failed to scan or load analytics providers: [ FAILED ]`)
WIKI.logger.error(err) WIKI.logger.error(err)
} }
} }

@ -12,15 +12,15 @@ const commonHelper = require('../helpers/common')
*/ */
module.exports = class Authentication extends Model { module.exports = class Authentication extends Model {
static get tableName() { return 'authentication' } static get tableName() { return 'authentication' }
static get idColumn() { return 'key' }
static get jsonSchema () { static get jsonSchema () {
return { return {
type: 'object', type: 'object',
required: ['key'], required: ['module'],
properties: { properties: {
key: {type: 'string'}, id: { type: 'string' },
module: { type: 'string' },
selfRegistration: {type: 'boolean'} selfRegistration: {type: 'boolean'}
} }
} }
@ -43,79 +43,23 @@ module.exports = class Authentication extends Model {
})) }))
} }
static async getStrategiesForLegacyClient() {
const strategies = await WIKI.models.authentication.query().select('key', 'selfRegistration')
let formStrategies = []
let socialStrategies = []
for (let stg of strategies) {
const stgInfo = _.find(WIKI.data.authentication, ['key', stg.key]) || {}
if (stgInfo.useForm) {
formStrategies.push({
key: stg.key,
title: stgInfo.title
})
} else {
socialStrategies.push({
...stgInfo,
...stg,
icon: await fs.readFile(path.join(WIKI.ROOTPATH, `assets/svg/auth-icon-${stg.key}.svg`), 'utf8').catch(err => {
if (err.code === 'ENOENT') {
return null
}
throw err
})
})
}
}
return {
formStrategies,
socialStrategies
}
}
static async refreshStrategiesFromDisk() { static async refreshStrategiesFromDisk() {
try { try {
const dbStrategies = await WIKI.models.authentication.query()
// -> Fetch definitions from disk // -> Fetch definitions from disk
const authDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/authentication')) const authenticationDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/authentication'))
WIKI.data.authentication = [] WIKI.data.authentication = []
for (let dir of authDirs) { for (const dir of authenticationDirs) {
const defRaw = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/authentication', dir, 'definition.yml'), 'utf8') const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/authentication', dir, 'definition.yml'), 'utf8')
const def = yaml.safeLoad(defRaw) const defParsed = yaml.load(def)
WIKI.data.authentication.push({ defParsed.key = dir
...def, defParsed.props = commonHelper.parseModuleProps(defParsed.props)
props: commonHelper.parseModuleProps(def.props) WIKI.data.analytics.push(defParsed)
}) WIKI.logger.debug(`Loaded authentication module definition ${dir}: [ OK ]`)
}
for (const strategy of dbStrategies) {
const strategyDef = _.find(WIKI.data.authentication, ['key', strategy.strategyKey])
if (!strategyDef) {
await WIKI.models.authentication.query().delete().where('key', strategy.key)
WIKI.logger.info(`Authentication strategy ${strategy.strategyKey} was removed from disk: [ REMOVED ]`)
continue
}
strategy.config = _.transform(strategyDef.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, strategy.config)
// Fix pre-2.5 strategies displayName
if (!strategy.displayName) {
await WIKI.models.authentication.query().patch({
displayName: strategyDef.title
}).where('key', strategy.key)
}
} }
WIKI.logger.info(`Loaded ${WIKI.data.authentication.length} authentication strategies: [ OK ]`) WIKI.logger.info(`Loaded ${WIKI.data.analytics.length} authentication module definitions: [ OK ]`)
} catch (err) { } catch (err) {
WIKI.logger.error(`Failed to scan or load new authentication providers: [ FAILED ]`) WIKI.logger.error(`Failed to scan or load authentication providers: [ FAILED ]`)
WIKI.logger.error(err) WIKI.logger.error(err)
} }
} }

@ -36,65 +36,27 @@ module.exports = class CommentProvider extends Model {
static async getProviders(isEnabled) { static async getProviders(isEnabled) {
const providers = await WIKI.models.commentProviders.query().where(_.isBoolean(isEnabled) ? { isEnabled } : {}) const providers = await WIKI.models.commentProviders.query().where(_.isBoolean(isEnabled) ? { isEnabled } : {})
return _.sortBy(providers, ['key']) return _.sortBy(providers, ['module'])
} }
static async refreshProvidersFromDisk() { static async refreshProvidersFromDisk() {
let trx
try { try {
const dbProviders = await WIKI.models.commentProviders.query()
// -> Fetch definitions from disk // -> Fetch definitions from disk
const commentDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/comments')) const commentsDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/comments'))
let diskProviders = [] WIKI.data.commentProviders = []
for (let dir of commentDirs) { for (const dir of commentsDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/comments', dir, 'definition.yml'), 'utf8') const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/comments', dir, 'definition.yml'), 'utf8')
diskProviders.push(yaml.safeLoad(def)) const defParsed = yaml.load(def)
defParsed.key = dir
defParsed.props = commonHelper.parseModuleProps(defParsed.props)
WIKI.data.commentProviders.push(defParsed)
WIKI.logger.debug(`Loaded comments provider module definition ${dir}: [ OK ]`)
} }
WIKI.data.commentProviders = diskProviders.map(provider => ({
...provider,
props: commonHelper.parseModuleProps(provider.props)
}))
let newProviders = [] WIKI.logger.info(`Loaded ${WIKI.data.commentProviders.length} comments providers module definitions: [ OK ]`)
for (let provider of WIKI.data.commentProviders) {
if (!_.some(dbProviders, ['key', provider.key])) {
newProviders.push({
key: provider.key,
isEnabled: provider.key === 'default',
config: _.transform(provider.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {})
})
} else {
const providerConfig = _.get(_.find(dbProviders, ['key', provider.key]), 'config', {})
await WIKI.models.commentProviders.query().patch({
config: _.transform(provider.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, providerConfig)
}).where('key', provider.key)
}
}
if (newProviders.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let provider of newProviders) {
await WIKI.models.commentProviders.query(trx).insert(provider)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newProviders.length} new comment providers: [ OK ]`)
} else {
WIKI.logger.info(`No new comment providers found: [ SKIPPED ]`)
}
} catch (err) { } catch (err) {
WIKI.logger.error(`Failed to scan or load new comment providers: [ FAILED ]`) WIKI.logger.error(`Failed to scan or load comments providers: [ FAILED ]`)
WIKI.logger.error(err) WIKI.logger.error(err)
if (trx) {
trx.rollback()
}
} }
} }
@ -102,7 +64,7 @@ module.exports = class CommentProvider extends Model {
const commentProvider = await WIKI.models.commentProviders.query().findOne('isEnabled', true) const commentProvider = await WIKI.models.commentProviders.query().findOne('isEnabled', true)
if (commentProvider) { if (commentProvider) {
WIKI.data.commentProvider = { WIKI.data.commentProvider = {
..._.find(WIKI.data.commentProviders, ['key', commentProvider.key]), ..._.find(WIKI.data.commentProviders, ['key', commentProvider.module]),
head: '', head: '',
bodyStart: '', bodyStart: '',
bodyEnd: '', bodyEnd: '',

@ -1,9 +1,4 @@
const Model = require('objection').Model const Model = require('objection').Model
const fs = require('fs-extra')
const path = require('path')
const _ = require('lodash')
const yaml = require('js-yaml')
const commonHelper = require('../helpers/common')
/* global WIKI */ /* global WIKI */
@ -34,66 +29,6 @@ module.exports = class Editor extends Model {
return WIKI.models.editors.query() return WIKI.models.editors.query()
} }
static async refreshEditorsFromDisk() {
let trx
try {
const dbEditors = await WIKI.models.editors.query()
// -> Fetch definitions from disk
const editorDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/editor'))
let diskEditors = []
for (let dir of editorDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/editor', dir, 'definition.yml'), 'utf8')
diskEditors.push(yaml.safeLoad(def))
}
WIKI.data.editors = diskEditors.map(editor => ({
...editor,
props: commonHelper.parseModuleProps(editor.props)
}))
// -> Insert new editors
let newEditors = []
for (let editor of WIKI.data.editors) {
if (!_.some(dbEditors, ['key', editor.key])) {
newEditors.push({
key: editor.key,
isEnabled: false,
config: _.transform(editor.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {})
})
} else {
const editorConfig = _.get(_.find(dbEditors, ['key', editor.key]), 'config', {})
await WIKI.models.editors.query().patch({
config: _.transform(editor.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, editorConfig)
}).where('key', editor.key)
}
}
if (newEditors.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let editor of newEditors) {
await WIKI.models.editors.query(trx).insert(editor)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newEditors.length} new editors: [ OK ]`)
} else {
WIKI.logger.info(`No new editors found: [ SKIPPED ]`)
}
} catch (err) {
WIKI.logger.error(`Failed to scan or load new editors: [ FAILED ]`)
WIKI.logger.error(err)
if (trx) {
trx.rollback()
}
}
}
static async getDefaultEditor(contentType) { static async getDefaultEditor(contentType) {
// TODO - hardcoded for now // TODO - hardcoded for now
switch (contentType) { switch (contentType) {

@ -41,7 +41,7 @@ module.exports = class Page extends Model {
hash: {type: 'string'}, hash: {type: 'string'},
title: {type: 'string'}, title: {type: 'string'},
description: {type: 'string'}, description: {type: 'string'},
isPublished: {type: 'boolean'}, publishState: {type: 'string'},
privateNS: {type: 'string'}, privateNS: {type: 'string'},
publishStartDate: {type: 'string'}, publishStartDate: {type: 'string'},
publishEndDate: {type: 'string'}, publishEndDate: {type: 'string'},
@ -96,14 +96,6 @@ module.exports = class Page extends Model {
to: 'users.id' to: 'users.id'
} }
}, },
editor: {
relation: Model.BelongsToOneRelation,
modelClass: require('./editors'),
join: {
from: 'pages.editorKey',
to: 'editors.key'
}
},
locale: { locale: {
relation: Model.BelongsToOneRelation, relation: Model.BelongsToOneRelation,
modelClass: require('./locales'), modelClass: require('./locales'),
@ -143,16 +135,14 @@ module.exports = class Page extends Model {
creatorId: 'uint', creatorId: 'uint',
creatorName: 'string', creatorName: 'string',
description: 'string', description: 'string',
editorKey: 'string', editor: 'string',
isPrivate: 'boolean', publishState: 'string',
isPublished: 'boolean',
publishEndDate: 'string', publishEndDate: 'string',
publishStartDate: 'string', publishStartDate: 'string',
render: 'string', render: 'string',
tags: [ tags: [
{ {
tag: 'string', tag: 'string'
title: 'string'
} }
], ],
extra: { extra: {
@ -301,10 +291,9 @@ module.exports = class Page extends Model {
creatorId: opts.user.id, creatorId: opts.user.id,
contentType: _.get(_.find(WIKI.data.editors, ['key', opts.editor]), `contentType`, 'text'), contentType: _.get(_.find(WIKI.data.editors, ['key', opts.editor]), `contentType`, 'text'),
description: opts.description, description: opts.description,
editorKey: opts.editor, editor: opts.editor,
hash: pageHelper.generateHash({ path: opts.path, locale: opts.locale, privateNS: opts.isPrivate ? 'TODO' : '' }), hash: pageHelper.generateHash({ path: opts.path, locale: opts.locale }),
isPrivate: opts.isPrivate, publishState: opts.publishState,
isPublished: opts.isPublished,
localeCode: opts.locale, localeCode: opts.locale,
path: opts.path, path: opts.path,
publishEndDate: opts.publishEndDate || '', publishEndDate: opts.publishEndDate || '',
@ -319,8 +308,7 @@ module.exports = class Page extends Model {
const page = await WIKI.models.pages.getPageFromDb({ const page = await WIKI.models.pages.getPageFromDb({
path: opts.path, path: opts.path,
locale: opts.locale, locale: opts.locale,
userId: opts.user.id, userId: opts.user.id
isPrivate: opts.isPrivate
}) })
// -> Save Tags // -> Save Tags
@ -389,7 +377,6 @@ module.exports = class Page extends Model {
// -> Create version snapshot // -> Create version snapshot
await WIKI.models.pageHistory.addVersion({ await WIKI.models.pageHistory.addVersion({
...ogPage, ...ogPage,
isPublished: ogPage.isPublished === true || ogPage.isPublished === 1,
action: opts.action ? opts.action : 'updated', action: opts.action ? opts.action : 'updated',
versionDate: ogPage.updatedAt versionDate: ogPage.updatedAt
}) })
@ -426,7 +413,7 @@ module.exports = class Page extends Model {
authorId: opts.user.id, authorId: opts.user.id,
content: opts.content, content: opts.content,
description: opts.description, description: opts.description,
isPublished: opts.isPublished === true || opts.isPublished === 1, publishState: opts.publishState,
publishEndDate: opts.publishEndDate || '', publishEndDate: opts.publishEndDate || '',
publishStartDate: opts.publishStartDate || '', publishStartDate: opts.publishStartDate || '',
title: opts.title, title: opts.title,
@ -500,7 +487,7 @@ module.exports = class Page extends Model {
throw new Error('Invalid Page Id') throw new Error('Invalid Page Id')
} }
if (ogPage.editorKey === opts.editor) { if (ogPage.editor === opts.editor) {
throw new Error('Page is already using this editor. Nothing to convert.') throw new Error('Page is already using this editor. Nothing to convert.')
} }
@ -631,7 +618,6 @@ module.exports = class Page extends Model {
if (shouldConvert) { if (shouldConvert) {
await WIKI.models.pageHistory.addVersion({ await WIKI.models.pageHistory.addVersion({
...ogPage, ...ogPage,
isPublished: ogPage.isPublished === true || ogPage.isPublished === 1,
action: 'updated', action: 'updated',
versionDate: ogPage.updatedAt versionDate: ogPage.updatedAt
}) })
@ -640,7 +626,7 @@ module.exports = class Page extends Model {
// -> Update page // -> Update page
await WIKI.models.pages.query().patch({ await WIKI.models.pages.query().patch({
contentType: targetContentType, contentType: targetContentType,
editorKey: opts.editor, editor: opts.editor,
...(convertedContent ? { content: convertedContent } : {}) ...(convertedContent ? { content: convertedContent } : {})
}).where('id', ogPage.id) }).where('id', ogPage.id)
const page = await WIKI.models.pages.getPageFromDb(ogPage.id) const page = await WIKI.models.pages.getPageFromDb(ogPage.id)
@ -721,7 +707,7 @@ module.exports = class Page extends Model {
versionDate: page.updatedAt versionDate: page.updatedAt
}) })
const destinationHash = pageHelper.generateHash({ path: opts.destinationPath, locale: opts.destinationLocale, privateNS: opts.isPrivate ? 'TODO' : '' }) const destinationHash = pageHelper.generateHash({ path: opts.destinationPath, locale: opts.destinationLocale })
// -> Move page // -> Move page
const destinationTitle = (page.title === page.path ? opts.destinationPath : page.title) const destinationTitle = (page.title === page.path ? opts.destinationPath : page.title)
@ -991,9 +977,7 @@ module.exports = class Page extends Model {
'pages.hash', 'pages.hash',
'pages.title', 'pages.title',
'pages.description', 'pages.description',
'pages.isPrivate', 'pages.publishState',
'pages.isPublished',
'pages.privateNS',
'pages.publishStartDate', 'pages.publishStartDate',
'pages.publishEndDate', 'pages.publishEndDate',
'pages.content', 'pages.content',
@ -1002,7 +986,7 @@ module.exports = class Page extends Model {
'pages.contentType', 'pages.contentType',
'pages.createdAt', 'pages.createdAt',
'pages.updatedAt', 'pages.updatedAt',
'pages.editorKey', 'pages.editor',
'pages.localeCode', 'pages.localeCode',
'pages.authorId', 'pages.authorId',
'pages.creatorId', 'pages.creatorId',
@ -1018,7 +1002,7 @@ module.exports = class Page extends Model {
.joinRelated('creator') .joinRelated('creator')
.withGraphJoined('tags') .withGraphJoined('tags')
.modifyGraph('tags', builder => { .modifyGraph('tags', builder => {
builder.select('tag', 'title') builder.select('tag')
}) })
.where(queryModeID ? { .where(queryModeID ? {
'pages.id': opts 'pages.id': opts
@ -1066,17 +1050,16 @@ module.exports = class Page extends Model {
creatorId: page.creatorId, creatorId: page.creatorId,
creatorName: page.creatorName, creatorName: page.creatorName,
description: page.description, description: page.description,
editorKey: page.editorKey, editor: page.editor,
extra: { extra: {
css: _.get(page, 'extra.css', ''), css: _.get(page, 'extra.css', ''),
js: _.get(page, 'extra.js', '') js: _.get(page, 'extra.js', '')
}, },
isPrivate: page.isPrivate === 1 || page.isPrivate === true, publishState: page.publishState,
isPublished: page.isPublished === 1 || page.isPublished === true,
publishEndDate: page.publishEndDate, publishEndDate: page.publishEndDate,
publishStartDate: page.publishStartDate, publishStartDate: page.publishStartDate,
render: page.render, render: page.render,
tags: page.tags.map(t => _.pick(t, ['tag', 'title'])), tags: page.tags.map(t => _.pick(t, ['tag'])),
title: page.title, title: page.title,
toc: _.isString(page.toc) ? page.toc : JSON.stringify(page.toc), toc: _.isString(page.toc) ? page.toc : JSON.stringify(page.toc),
updatedAt: page.updatedAt updatedAt: page.updatedAt
@ -1090,7 +1073,7 @@ module.exports = class Page extends Model {
* @returns {Promise} Promise of the Page Model Instance * @returns {Promise} Promise of the Page Model Instance
*/ */
static async getPageFromCache(opts) { static async getPageFromCache(opts) {
const pageHash = pageHelper.generateHash({ path: opts.path, locale: opts.locale, privateNS: opts.isPrivate ? 'TODO' : '' }) const pageHash = pageHelper.generateHash({ path: opts.path, locale: opts.locale })
const cachePath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${pageHash}.bin`) const cachePath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${pageHash}.bin`)
try { try {

@ -13,15 +13,15 @@ const commonHelper = require('../helpers/common')
*/ */
module.exports = class Renderer extends Model { module.exports = class Renderer extends Model {
static get tableName() { return 'renderers' } static get tableName() { return 'renderers' }
static get idColumn() { return 'key' }
static get jsonSchema () { static get jsonSchema () {
return { return {
type: 'object', type: 'object',
required: ['key', 'isEnabled'], required: ['module', 'isEnabled'],
properties: { properties: {
key: {type: 'string'}, id: {type: 'string'},
module: {type: 'string'},
isEnabled: {type: 'boolean'} isEnabled: {type: 'boolean'}
} }
} }
@ -36,77 +36,35 @@ module.exports = class Renderer extends Model {
} }
static async fetchDefinitions() { static async fetchDefinitions() {
const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering'))
let diskRenderers = []
for (let dir of rendererDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8')
diskRenderers.push(yaml.safeLoad(def))
}
WIKI.data.renderers = diskRenderers.map(renderer => ({
...renderer,
props: commonHelper.parseModuleProps(renderer.props)
}))
}
static async refreshRenderersFromDisk() {
let trx
try { try {
const dbRenderers = await WIKI.models.renderers.query()
// -> Fetch definitions from disk // -> Fetch definitions from disk
await WIKI.models.renderers.fetchDefinitions() const renderersDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering'))
WIKI.data.renderers = []
// -> Insert new Renderers for (const dir of renderersDirs) {
let newRenderers = [] const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8')
for (let renderer of WIKI.data.renderers) { const defParsed = yaml.load(def)
if (!_.some(dbRenderers, ['key', renderer.key])) { defParsed.key = dir
newRenderers.push({ defParsed.props = commonHelper.parseModuleProps(defParsed.props)
key: renderer.key, WIKI.data.renderers.push(defParsed)
isEnabled: _.get(renderer, 'enabledDefault', true), WIKI.logger.debug(`Loaded renderers module definition ${dir}: [ OK ]`)
config: _.transform(renderer.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {})
})
} else {
const rendererConfig = _.get(_.find(dbRenderers, ['key', renderer.key]), 'config', {})
await WIKI.models.renderers.query().patch({
config: _.transform(renderer.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, rendererConfig)
}).where('key', renderer.key)
}
}
if (newRenderers.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let renderer of newRenderers) {
await WIKI.models.renderers.query(trx).insert(renderer)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newRenderers.length} new renderers: [ OK ]`)
} else {
WIKI.logger.info(`No new renderers found: [ SKIPPED ]`)
} }
// -> Delete removed Renderers WIKI.logger.info(`Loaded ${WIKI.data.renderers.length} renderers module definitions: [ OK ]`)
for (const renderer of dbRenderers) {
if (!_.some(WIKI.data.renderers, ['key', renderer.key])) {
await WIKI.models.renderers.query().where('key', renderer.key).del()
WIKI.logger.info(`Removed renderer ${renderer.key} because it is no longer present in the modules folder: [ OK ]`)
}
}
} catch (err) { } catch (err) {
WIKI.logger.error(`Failed to scan or load new renderers: [ FAILED ]`) WIKI.logger.error(`Failed to scan or load renderers providers: [ FAILED ]`)
WIKI.logger.error(err) WIKI.logger.error(err)
if (trx) {
trx.rollback()
}
} }
} }
static async refreshRenderersFromDisk() {
// const dbRenderers = await WIKI.models.renderers.query()
// -> Fetch definitions from disk
await WIKI.models.renderers.fetchDefinitions()
// TODO: Merge existing configs with updated modules
}
static async getRenderingPipeline(contentType) { static async getRenderingPipeline(contentType) {
const renderersDb = await WIKI.models.renderers.query().where('isEnabled', true) const renderersDb = await WIKI.models.renderers.query().where('isEnabled', true)
if (renderersDb && renderersDb.length > 0) { if (renderersDb && renderersDb.length > 0) {

@ -0,0 +1,108 @@
const Model = require('objection').Model
const crypto = require('crypto')
const pem2jwk = require('pem-jwk').pem2jwk
const _ = require('lodash')
/* global WIKI */
/**
* Site model
*/
module.exports = class Site extends Model {
static get tableName () { return 'sites' }
static get jsonSchema () {
return {
type: 'object',
required: ['hostname'],
properties: {
id: { type: 'string' },
hostname: { type: 'string' },
isEnabled: { type: 'boolean', default: false }
}
}
}
static get jsonAttributes () {
return ['config']
}
static async createSite (hostname, config) {
const newSite = await WIKI.models.sites.query().insertAndFetch({
hostname,
isEnabled: true,
config: _.defaultsDeep(config, {
title: 'My Wiki Site',
description: '',
company: '',
contentLicense: '',
defaults: {
timezone: 'America/New_York',
dateFormat: 'YYYY-MM-DD',
timeFormat: '12h'
},
features: {
ratings: false,
ratingsMode: 'off',
comments: false,
contributions: false,
profile: true,
search: true
},
logoUrl: '',
logoText: true,
robots: {
index: true,
follow: true
},
locale: 'en',
localeNamespacing: false,
localeNamespaces: [],
theme: {
dark: false,
colorPrimary: '#1976d2',
colorSecondary: '#02c39a',
colorAccent: '#f03a47',
colorHeader: '#000000',
colorSidebar: '#1976d2',
injectCSS: '',
injectHead: '',
injectBody: '',
sidebarPosition: 'left',
tocPosition: 'right',
showSharingMenu: true,
showPrintBtn: true
}
})
})
await WIKI.models.storage.query().insert({
module: 'db',
siteId: newSite.id,
isEnabled: true,
contentTypes: {
activeTypes: ['pages', 'images', 'documents', 'others', 'large'],
largeThreshold: '5MB'
},
assetDelivery: {
streaming: true,
directAccess: false
},
state: {
current: 'ok'
}
})
return newSite
}
static async updateSite (id, patch) {
return WIKI.models.sites.query().findById(id).patch(patch)
}
static async deleteSite (id) {
await WIKI.models.storage.query().delete().where('siteId', id)
return WIKI.models.sites.query().deleteById(id)
}
}

@ -17,93 +17,45 @@ module.exports = class Storage extends Model {
static get jsonSchema () { static get jsonSchema () {
return { return {
type: 'object', type: 'object',
required: ['key', 'isEnabled'], required: ['module', 'isEnabled', 'siteId'],
properties: { properties: {
key: {type: 'string'}, module: {type: 'string'},
isEnabled: {type: 'boolean'}, isEnabled: {type: 'boolean'},
mode: {type: 'string'} SVGAnimatedInteger: {type: 'string'}
} }
} }
} }
static get jsonAttributes() { static get jsonAttributes() {
return ['config', 'state'] return ['contentTypes', 'assetDelivery', 'versioning', 'schedule', 'config', 'state']
} }
static async getTargets() { static async getTargets ({ siteId }) {
return WIKI.models.storage.query() return WIKI.models.storage.query().where(builder => {
if (siteId) {
builder.where('siteId', siteId)
}
})
} }
static async refreshTargetsFromDisk() { static async refreshTargetsFromDisk () {
let trx let trx
try { try {
const dbTargets = await WIKI.models.storage.query()
// -> Fetch definitions from disk // -> Fetch definitions from disk
const storageDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/storage')) const storageDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/storage'))
let diskTargets = [] WIKI.storage.defs = []
for (let dir of storageDirs) { for (const dir of storageDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/storage', dir, 'definition.yml'), 'utf8') const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/storage', dir, 'definition.yml'), 'utf8')
diskTargets.push(yaml.safeLoad(def)) const defParsed = yaml.load(def)
} defParsed.key = dir
WIKI.data.storage = diskTargets.map(target => ({ defParsed.isLoaded = false
...target, WIKI.storage.defs.push(defParsed)
isAvailable: _.get(target, 'isAvailable', false), WIKI.logger.debug(`Loaded storage module definition ${dir}: [ OK ]`)
props: commonHelper.parseModuleProps(target.props)
}))
// -> Insert new targets
let newTargets = []
for (let target of WIKI.data.storage) {
if (!_.some(dbTargets, ['key', target.key])) {
newTargets.push({
key: target.key,
isEnabled: false,
mode: target.defaultMode || 'push',
syncInterval: target.schedule || 'P0D',
config: _.transform(target.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {}),
state: {
status: 'pending',
message: '',
lastAttempt: null
}
})
} else {
const targetConfig = _.get(_.find(dbTargets, ['key', target.key]), 'config', {})
await WIKI.models.storage.query().patch({
config: _.transform(target.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, targetConfig)
}).where('key', target.key)
}
}
if (newTargets.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let target of newTargets) {
await WIKI.models.storage.query(trx).insert(target)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newTargets.length} new storage targets: [ OK ]`)
} else {
WIKI.logger.info(`No new storage targets found: [ SKIPPED ]`)
}
// -> Delete removed targets
for (const target of dbTargets) {
if (!_.some(WIKI.data.storage, ['key', target.key])) {
await WIKI.models.storage.query().where('key', target.key).del()
WIKI.logger.info(`Removed target ${target.key} because it is no longer present in the modules folder: [ OK ]`)
}
} }
WIKI.logger.info(`Loaded ${WIKI.storage.defs.length} storage module definitions: [ OK ]`)
} catch (err) { } catch (err) {
WIKI.logger.error(`Failed to scan or load new storage providers: [ FAILED ]`) WIKI.logger.error('Failed to scan or load new storage providers: [ FAILED ]')
WIKI.logger.error(err) WIKI.logger.error(err)
if (trx) { if (trx) {
trx.rollback() trx.rollback()
@ -111,66 +63,91 @@ module.exports = class Storage extends Model {
} }
} }
/**
* Ensure a storage module is loaded
*/
static async ensureModule (moduleName) {
if (!_.has(WIKI.storage.modules, moduleName)) {
try {
WIKI.storage.modules[moduleName] = require(`../modules/storage/${moduleName}/storage`)
WIKI.logger.debug(`Activated storage module ${moduleName}: [ OK ]`)
return true
} catch (err) {
WIKI.logger.warn(`Failed to load storage module ${moduleName}: [ FAILED ]`)
WIKI.logger.warn(err)
return false
}
} else {
return true
}
}
/** /**
* Initialize active storage targets * Initialize active storage targets
*/ */
static async initTargets() { static async initTargets () {
this.targets = await WIKI.models.storage.query().where('isEnabled', true).orderBy('key') const dbTargets = await WIKI.models.storage.query().where('isEnabled', true)
const activeModules = _.uniq(dbTargets.map(t => t.module))
try { try {
// -> Stop and delete existing jobs // -> Stop and delete existing jobs
const prevjobs = _.remove(WIKI.scheduler.jobs, job => job.name === `sync-storage`) // const prevjobs = _.remove(WIKI.scheduler.jobs, job => job.name === 'sync-storage')
if (prevjobs.length > 0) { // if (prevjobs.length > 0) {
prevjobs.forEach(job => job.stop()) // prevjobs.forEach(job => job.stop())
// }
// -> Load active modules
for (const md of activeModules) {
this.ensureModule(md)
} }
// -> Initialize targets // -> Initialize targets
for (let target of this.targets) { // for (const target of this.targets) {
const targetDef = _.find(WIKI.data.storage, ['key', target.key]) // const targetDef = _.find(WIKI.data.storage, ['key', target.key])
target.fn = require(`../modules/storage/${target.key}/storage`) // target.fn = require(`../modules/storage/${target.key}/storage`)
target.fn.config = target.config // target.fn.config = target.config
target.fn.mode = target.mode // target.fn.mode = target.mode
try { // try {
await target.fn.init() // await target.fn.init()
// -> Save succeeded init state // // -> Save succeeded init state
await WIKI.models.storage.query().patch({ // await WIKI.models.storage.query().patch({
state: { // state: {
status: 'operational', // status: 'operational',
message: '', // message: '',
lastAttempt: new Date().toISOString() // lastAttempt: new Date().toISOString()
} // }
}).where('key', target.key) // }).where('key', target.key)
// -> Set recurring sync job // // -> Set recurring sync job
if (targetDef.schedule && target.syncInterval !== `P0D`) { // if (targetDef.schedule && target.syncInterval !== 'P0D') {
WIKI.scheduler.registerJob({ // WIKI.scheduler.registerJob({
name: `sync-storage`, // name: 'sync-storage',
immediate: false, // immediate: false,
schedule: target.syncInterval, // schedule: target.syncInterval,
repeat: true // repeat: true
}, target.key) // }, target.key)
} // }
// -> Set internal recurring sync job // // -> Set internal recurring sync job
if (targetDef.internalSchedule && targetDef.internalSchedule !== `P0D`) { // if (targetDef.internalSchedule && targetDef.internalSchedule !== 'P0D') {
WIKI.scheduler.registerJob({ // WIKI.scheduler.registerJob({
name: `sync-storage`, // name: 'sync-storage',
immediate: false, // immediate: false,
schedule: target.internalSchedule, // schedule: target.internalSchedule,
repeat: true // repeat: true
}, target.key) // }, target.key)
} // }
} catch (err) { // } catch (err) {
// -> Save initialization error // // -> Save initialization error
await WIKI.models.storage.query().patch({ // await WIKI.models.storage.query().patch({
state: { // state: {
status: 'error', // status: 'error',
message: err.message, // message: err.message,
lastAttempt: new Date().toISOString() // lastAttempt: new Date().toISOString()
} // }
}).where('key', target.key) // }).where('key', target.key)
} // }
} // }
} catch (err) { } catch (err) {
WIKI.logger.warn(err) WIKI.logger.warn(err)
throw err throw err

@ -17,7 +17,6 @@ module.exports = class Tag extends Model {
properties: { properties: {
id: {type: 'integer'}, id: {type: 'integer'},
tag: {type: 'string'}, tag: {type: 'string'},
title: {type: 'string'},
createdAt: {type: 'string'}, createdAt: {type: 'string'},
updatedAt: {type: 'string'} updatedAt: {type: 'string'}
@ -59,10 +58,7 @@ module.exports = class Tag extends Model {
// Create missing tags // Create missing tags
const newTags = _.filter(tags, t => !_.some(existingTags, ['tag', t])).map(t => ({ const newTags = _.filter(tags, t => !_.some(existingTags, ['tag', t])).map(t => ({ tag: t }))
tag: t,
title: t
}))
if (newTags.length > 0) { if (newTags.length > 0) {
if (WIKI.config.db.type === 'postgres') { if (WIKI.config.db.type === 'postgres') {
const createdTags = await WIKI.models.tags.query().insert(newTags) const createdTags = await WIKI.models.tags.query().insert(newTags)

@ -861,7 +861,7 @@ module.exports = class User extends Model {
* Logout the current user * Logout the current user
*/ */
static async logout (context) { static async logout (context) {
if (!context.req.user || context.req.user.id === 2) { if (!context.req.user || context.req.user.id === WIKI.config.auth.guestUserId) {
return '/' return '/'
} }
const usr = await WIKI.models.users.query().findById(context.req.user.id).select('providerKey') const usr = await WIKI.models.users.query().findById(context.req.user.id).select('providerKey')
@ -870,7 +870,7 @@ module.exports = class User extends Model {
} }
static async getGuestUser () { static async getGuestUser () {
const user = await WIKI.models.users.query().findById(2).withGraphJoined('groups').modifyGraph('groups', builder => { const user = await WIKI.models.users.query().findById(WIKI.config.auth.guestUserId).withGraphJoined('groups').modifyGraph('groups', builder => {
builder.select('groups.id', 'permissions') builder.select('groups.id', 'permissions')
}) })
if (!user) { if (!user) {
@ -882,7 +882,7 @@ module.exports = class User extends Model {
} }
static async getRootUser () { static async getRootUser () {
let user = await WIKI.models.users.query().findById(1) let user = await WIKI.models.users.query().findById(WIKI.config.auth.rootAdminUserId)
if (!user) { if (!user) {
WIKI.logger.error('CRITICAL ERROR: Root Administrator user is missing!') WIKI.logger.error('CRITICAL ERROR: Root Administrator user is missing!')
process.exit(1) process.exit(1)

@ -1,6 +0,0 @@
key: api
title: API Docs
description: REST / GraphQL Editor
contentType: yml
author: requarks.io
props: {}

@ -1,6 +0,0 @@
key: ckeditor
title: Visual Editor
description: Rich-text WYSIWYG Editor
contentType: html
author: requarks.io
props: {}

@ -1,6 +0,0 @@
key: code
title: Code
description: Raw HTML editor
contentType: html
author: requarks.io
props: {}

@ -1,6 +0,0 @@
key: markdown
title: Markdown
description: Basic Markdown editor
contentType: markdown
author: requarks.io
props: {}

@ -1,6 +0,0 @@
key: redirect
title: Redirection
description: Redirect the user
contentType: redirect
author: requarks.io
props: {}

@ -1,6 +0,0 @@
key: wysiwyg
title: WYSIWYG
description: Advanced Visual HTML Builder
contentType: html
author: requarks.io
props: {}

@ -1,44 +1,56 @@
key: azure
title: Azure Blob Storage title: Azure Blob Storage
description: Azure Blob Storage by Microsoft provides massively scalable object storage for unstructured data. icon: '/_assets/icons/ultraviolet-azure.svg'
author: requarks.io banner: '/_assets/storage/azure.jpg'
logo: https://static.requarks.io/logo/azure.svg description: Azure Blob Storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data.
website: https://azure.microsoft.com/services/storage/blobs/ vendor: Microsoft Corporation
isAvailable: true website: 'https://azure.microsoft.com'
supportedModes: assetDelivery:
- push isStreamingSupported: true
defaultMode: push isDirectAccessSupported: true
schedule: false defaultStreamingEnabled: true
defaultDirectAccessEnabled: true
contentTypes:
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: false
defaultEnabled: false
sync: false
props: props:
accountName: accountName:
type: String type: String
title: Account Name title: Account Name
default: '' default: ''
hint: Your unique account name. hint: Your unique account name.
icon: 3d-touch
order: 1 order: 1
accountKey: accountKey:
type: String type: String
title: Account Access Key title: Account Access Key
default: '' default: ''
hint: Either key 1 or key 2. hint: Either key 1 or key 2.
icon: key
sensitive: true sensitive: true
order: 2 order: 2
containerName: containerName:
type: String type: String
title: Container Name title: Container Name
default: 'wiki' default: wiki
hint: Will automatically be created if it doesn't exist yet. hint: Will automatically be created if it doesn't exist yet.
icon: shipping-container
order: 3 order: 3
storageTier: storageTier:
type: String type: String
title: Storage Tier title: Storage Tier
hint: Represents the access tier on a blob. Use Cool for lower storage costs but at higher retrieval costs. hint: Represents the access tier on a blob. Use Cool for lower storage costs but at higher retrieval costs.
icon: scan-stock
order: 4 order: 4
default: 'Cool' default: cool
enum: enum:
- 'Hot' - hot|Hot
- 'Cool' - cool|Cool
actions: actions:
- handler: exportAll - handler: exportAll
label: Export All label: Export All DB Assets to Azure
hint: Output all content from the DB to Azure Blog Storage, overwriting any existing data. If you enabled Azure Blog Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content. hint: Output all content from the DB to Azure Blog Storage, overwriting any existing data. If you enabled Azure Blog Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
icon: this-way-up

@ -1,10 +0,0 @@
key: box
title: Box
description: Box is a cloud content management and file sharing service for businesses.
author: requarks.io
logo: https://static.requarks.io/logo/box.svg
website: https://www.box.com/platform
props:
clientId: String
clientSecret: String
rootFolder: String

@ -1,26 +0,0 @@
module.exports = {
async activated() {
},
async deactivated() {
},
async init() {
},
async created() {
},
async updated() {
},
async deleted() {
},
async renamed() {
},
async getLocalLocation () {
}
}

@ -0,0 +1,25 @@
title: 'Database'
icon: '/_assets/icons/ultraviolet-database.svg'
banner: '/_assets/storage/database.jpg'
description: 'The local PostgreSQL database can store any assets. It is however not recommended to store large files directly in the database as this can cause performance issues.'
vendor: 'Wiki.js'
website: 'https://js.wiki'
assetDelivery:
isStreamingSupported: true
isDirectAccessSupported: false
defaultStreamingEnabled: true
defaultDirectAccessEnabled: false
contentTypes:
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: true
defaultEnabled: false
sync: false
props: {}
actions:
- handler: purge
label: Purge All Assets
hint: Delete all asset data from the database (not the metadata). Useful if you moved assets to another storage target and want to reduce the size of the database.
warn: This is a destructive action! Make sure all asset files are properly stored on another storage module! This action cannot be undone!
icon: explosion

@ -0,0 +1,14 @@
module.exports = {
async activated () { },
async deactivated () { },
async init () { },
async created (page) { },
async updated (page) { },
async deleted (page) { },
async renamed (page) { },
async assetUploaded (asset) { },
async assetDeleted (asset) { },
async assetRenamed (asset) { },
async getLocalLocation () { },
async exportAll () { }
}

@ -1,45 +0,0 @@
key: digitalocean
title: DigitalOcean Spaces
description: DigitalOcean provides developers and businesses a reliable, easy-to-use cloud computing platform of virtual servers (Droplets), object storage (Spaces) and more.
author: andrewsim
logo: https://static.requarks.io/logo/digitalocean.svg
website: https://www.digitalocean.com/products/spaces/
isAvailable: true
supportedModes:
- push
defaultMode: push
schedule: false
props:
endpoint:
type: String
title: Endpoint
hint: The DigitalOcean spaces endpoint that has the form ${REGION}.digitaloceanspaces.com
default: nyc3.digitaloceanspaces.com
enum:
- ams3.digitaloceanspaces.com
- fra1.digitaloceanspaces.com
- nyc3.digitaloceanspaces.com
- sfo2.digitaloceanspaces.com
- sgp1.digitaloceanspaces.com
order: 1
bucket:
type: String
title: Space Unique Name
hint: The unique space name to create (e.g. wiki-johndoe)
order: 2
accessKeyId:
type: String
title: Access Key ID
hint: The Access Key (Generated in API > Tokens/Keys > Spaces access keys).
order: 3
secretAccessKey :
type: String
title: Access Key Secret
hint: The Access Key Secret for the Access Key ID you created above.
sensitive: true
order: 4
actions:
- handler: exportAll
label: Export All
hint: Output all content from the DB to DigitalOcean Spaces, overwriting any existing data. If you enabled DigitalOcean Spaces after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.

@ -1,3 +0,0 @@
const S3CompatibleStorage = require('../s3/common')
module.exports = new S3CompatibleStorage('Digitalocean')

@ -1,34 +1,46 @@
key: disk
title: Local File System title: Local File System
description: Local storage on disk or network shares. icon: '/_assets/icons/ultraviolet-hdd.svg'
author: requarks.io banner: '/_assets/storage/disk.jpg'
logo: https://static.requarks.io/logo/local-fs.svg description: Store files on the local file system or over network attached storage. Note that you must use replicated storage if using high-availability instances.
website: https://wiki.js.org vendor: Wiki.js
isAvailable: true website: 'https://js.wiki'
supportedModes: assetDelivery:
- push isStreamingSupported: true
defaultMode: push isDirectAccessSupported: false
schedule: false defaultStreamingEnabled: true
defaultDirectAccessEnabled: false
contentTypes:
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: false
defaultEnabled: false
sync: false
internalSchedule: P1D internalSchedule: P1D
props: props:
path: path:
type: String type: String
title: Path title: Path
hint: Absolute path without a trailing slash (e.g. /home/wiki/backup, C:\wiki\backup) hint: Absolute path without a trailing slash (e.g. /home/wiki/backup, C:\wiki\backup)
icon: symlink-directory
order: 1 order: 1
createDailyBackups: createDailyBackups:
type: Boolean type: Boolean
default: false default: false
title: Create Daily Backups title: Create Daily Backups
hint: A tar.gz archive containing all content will be created daily in subfolder named _daily. Archives are kept for a month. hint: A tar.gz archive containing all content will be created daily in subfolder named _daily. Archives are kept for a month.
icon: archive-folder
order: 2 order: 2
actions: actions:
- handler: dump - handler: dump
label: Dump all content to disk label: Dump all content to disk
hint: Output all content from the DB to the local disk. If you enabled this module after content was created or you temporarily disabled this module, you'll want to execute this action to add the missing files. hint: Output all content from the DB to the local disk. If you enabled this module after content was created or you temporarily disabled this module, you'll want to execute this action to add the missing files.
icon: downloads
- handler: backup - handler: backup
label: Create Backup label: Create Backup
hint: Will create a manual backup archive at this point in time, in a subfolder named _manual, from the contents currently on disk. hint: Will create a manual backup archive at this point in time, in a subfolder named _manual, from the contents currently on disk.
icon: archive-folder
- handler: importAll - handler: importAll
label: Import Everything label: Import Everything
hint: Will import all content currently in the local disk folder. hint: Will import all content currently in the local disk folder.
icon: database-daily-import

@ -127,15 +127,12 @@ module.exports = {
// -> Pages // -> Pages
await pipeline( await pipeline(
WIKI.models.knex.column('id', 'path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt', 'editorKey').select().from('pages').where({ WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
isPrivate: false isPrivate: false
}).stream(), }).stream(),
new stream.Transform({ new stream.Transform({
objectMode: true, objectMode: true,
transform: async (page, enc, cb) => { transform: async (page, enc, cb) => {
const pageObject = await WIKI.models.pages.query().findById(page.id)
page.tags = await pageObject.$relatedQuery('tags')
let fileName = `${page.path}.${pageHelper.getFileExtension(page.contentType)}` let fileName = `${page.path}.${pageHelper.getFileExtension(page.contentType)}`
if (WIKI.config.lang.code !== page.localeCode) { if (WIKI.config.lang.code !== page.localeCode) {
fileName = `${page.localeCode}/${fileName}` fileName = `${page.localeCode}/${fileName}`

@ -1,9 +0,0 @@
key: dropbox
title: Dropbox
description: Dropbox is a file hosting service that offers cloud storage, file synchronization, personal cloud, and client software.
author: requarks.io
logo: https://static.requarks.io/logo/dropbox.svg
website: https://dropbox.com
props:
appKey: String
appSecret: String

@ -1,26 +0,0 @@
module.exports = {
async activated() {
},
async deactivated() {
},
async init() {
},
async created() {
},
async updated() {
},
async deleted() {
},
async renamed() {
},
async getLocalLocation () {
}
}

@ -0,0 +1,65 @@
title: Google Cloud Storage
icon: '/_assets/icons/ultraviolet-google.svg'
banner: '/_assets/storage/gcs.jpg'
description: Google Cloud Storage is an online file storage web service for storing and accessing data on Google Cloud Platform infrastructure.
vendor: Alphabet Inc.
website: 'https://cloud.google.com'
assetDelivery:
isStreamingSupported: true
isDirectAccessSupported: true
defaultStreamingEnabled: true
defaultDirectAccessEnabled: true
contentTypes:
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: false
defaultEnabled: false
sync: false
props:
accountName:
type: String
title: Project ID
hint: The project ID from the Google Developer's Console (e.g. grape-spaceship-123).
icon: 3d-touch
default: ''
order: 1
credentialsJSON:
type: String
title: JSON Credentials
hint: Contents of the JSON credentials file for the service account having Cloud Storage permissions.
icon: key
default: ''
multiline: true
sensitive: true
order: 2
bucket:
type: String
title: Unique bucket name
hint: The unique bucket name to create (e.g. wiki-johndoe).
icon: open-box
order: 3
storageTier:
type: String
title: Storage Tier
hint: Select the storage class to use when uploading new assets.
icon: scan-stock
order: 4
default: STANDARD
enum:
- STANDARD|Standard
- NEARLINE|Nearline
- COLDLINE|Coldline
- ARCHIVE|Archive
apiEndpoint:
type: String
title: API Endpoint
hint: The API endpoint of the service used to make requests.
icon: api
default: storage.google.com
order: 5
actions:
- handler: exportAll
label: Export All DB Assets to GCS
hint: Output all content from the DB to Google Cloud Storage, overwriting any existing data. If you enabled Google Cloud Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
icon: this-way-up

@ -0,0 +1,164 @@
const { BlobServiceClient, StorageSharedKeyCredential } = require('@azure/storage-blob')
const stream = require('stream')
const Promise = require('bluebird')
const pipeline = Promise.promisify(stream.pipeline)
const pageHelper = require('../../../helpers/page.js')
const _ = require('lodash')
/* global WIKI */
const getFilePath = (page, pathKey) => {
const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
}
module.exports = {
async activated() {
},
async deactivated() {
},
async init() {
WIKI.logger.info(`(STORAGE/AZURE) Initializing...`)
const { accountName, accountKey, containerName } = this.config
this.client = new BlobServiceClient(
`https://${accountName}.blob.core.windows.net`,
new StorageSharedKeyCredential(accountName, accountKey)
)
this.container = this.client.getContainerClient(containerName)
try {
await this.container.create()
} catch (err) {
if (err.statusCode !== 409) {
WIKI.logger.warn(err)
throw err
}
}
WIKI.logger.info(`(STORAGE/AZURE) Initialization completed.`)
},
async created (page) {
WIKI.logger.info(`(STORAGE/AZURE) Creating file ${page.path}...`)
const filePath = getFilePath(page, 'path')
const pageContent = page.injectMetadata()
const blockBlobClient = this.container.getBlockBlobClient(filePath)
await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
},
async updated (page) {
WIKI.logger.info(`(STORAGE/AZURE) Updating file ${page.path}...`)
const filePath = getFilePath(page, 'path')
const pageContent = page.injectMetadata()
const blockBlobClient = this.container.getBlockBlobClient(filePath)
await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
},
async deleted (page) {
WIKI.logger.info(`(STORAGE/AZURE) Deleting file ${page.path}...`)
const filePath = getFilePath(page, 'path')
const blockBlobClient = this.container.getBlockBlobClient(filePath)
await blockBlobClient.delete({
deleteSnapshots: 'include'
})
},
async renamed(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
let sourceFilePath = getFilePath(page, 'path')
let destinationFilePath = getFilePath(page, 'destinationPath')
if (WIKI.config.lang.namespacing) {
if (WIKI.config.lang.code !== page.localeCode) {
sourceFilePath = `${page.localeCode}/${sourceFilePath}`
}
if (WIKI.config.lang.code !== page.destinationLocaleCode) {
destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
}
}
const sourceBlockBlobClient = this.container.getBlockBlobClient(sourceFilePath)
const destBlockBlobClient = this.container.getBlockBlobClient(destinationFilePath)
await destBlockBlobClient.syncCopyFromURL(sourceBlockBlobClient.url)
await sourceBlockBlobClient.delete({
deleteSnapshots: 'include'
})
},
/**
* ASSET UPLOAD
*
* @param {Object} asset Asset to upload
*/
async assetUploaded (asset) {
WIKI.logger.info(`(STORAGE/AZURE) Creating new file ${asset.path}...`)
const blockBlobClient = this.container.getBlockBlobClient(asset.path)
await blockBlobClient.upload(asset.data, asset.data.length, { tier: this.config.storageTier })
},
/**
* ASSET DELETE
*
* @param {Object} asset Asset to delete
*/
async assetDeleted (asset) {
WIKI.logger.info(`(STORAGE/AZURE) Deleting file ${asset.path}...`)
const blockBlobClient = this.container.getBlockBlobClient(asset.path)
await blockBlobClient.delete({
deleteSnapshots: 'include'
})
},
/**
* ASSET RENAME
*
* @param {Object} asset Asset to rename
*/
async assetRenamed (asset) {
WIKI.logger.info(`(STORAGE/AZURE) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
const sourceBlockBlobClient = this.container.getBlockBlobClient(asset.path)
const destBlockBlobClient = this.container.getBlockBlobClient(asset.destinationPath)
await destBlockBlobClient.syncCopyFromURL(sourceBlockBlobClient.url)
await sourceBlockBlobClient.delete({
deleteSnapshots: 'include'
})
},
async getLocalLocation () {
},
/**
* HANDLERS
*/
async exportAll() {
WIKI.logger.info(`(STORAGE/AZURE) Exporting all content to Azure Blob Storage...`)
// -> Pages
await pipeline(
WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
isPrivate: false
}).stream(),
new stream.Transform({
objectMode: true,
transform: async (page, enc, cb) => {
const filePath = getFilePath(page, 'path')
WIKI.logger.info(`(STORAGE/AZURE) Adding page ${filePath}...`)
const pageContent = pageHelper.injectPageMetadata(page)
const blockBlobClient = this.container.getBlockBlobClient(filePath)
await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
cb()
}
})
)
// -> Assets
const assetFolders = await WIKI.models.assetFolders.getAllPaths()
await pipeline(
WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
new stream.Transform({
objectMode: true,
transform: async (asset, enc, cb) => {
const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
WIKI.logger.info(`(STORAGE/AZURE) Adding asset ${filename}...`)
const blockBlobClient = this.container.getBlockBlobClient(filename)
await blockBlobClient.upload(asset.data, asset.data.length, { tier: this.config.storageTier })
cb()
}
})
)
WIKI.logger.info('(STORAGE/AZURE) All content has been pushed to Azure Blob Storage.')
}
}

@ -1,9 +0,0 @@
key: gdrive
title: Google Drive
description: Google Drive is a file storage and synchronization service developed by Google.
author: requarks.io
logo: https://static.requarks.io/logo/google-drive.svg
website: https://www.google.com/drive/
props:
clientId: String
clientSecret: String

@ -1,26 +0,0 @@
module.exports = {
async activated() {
},
async deactivated() {
},
async init() {
},
async created() {
},
async updated() {
},
async deleted() {
},
async renamed() {
},
async getLocalLocation () {
}
}

@ -1,108 +1,151 @@
key: git
title: Git title: Git
description: Git is a version control system for tracking changes in computer files and coordinating work on those files among multiple people. icon: '/_assets/icons/ultraviolet-git.svg'
author: requarks.io banner: '/_assets/storage/git.jpg'
logo: https://static.requarks.io/logo/git-alt.svg description: Git is a version control system for tracking changes in computer files and coordinating work on those files among multiple people. If using GitHub, use the GitHub module instead!
website: https://git-scm.com/ vendor: Software Freedom Conservancy, Inc.
isAvailable: true website: 'https://git-scm.com'
supportedModes: assetDelivery:
- sync isStreamingSupported: true
- push isDirectAccessSupported: false
- pull defaultStreamingEnabled: true
defaultMode: sync defaultDirectAccessEnabled: false
schedule: PT5M contentTypes:
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: true
defaultEnabled: true
isForceEnabled: true
sync:
supportedModes:
- sync
- push
- pull
defaultMode: sync
schedule: PT5M
props: props:
authType: authType:
type: String type: String
default: 'ssh' default: 'ssh'
title: Authentication Type title: Authentication Type
hint: Use SSH for maximum security. hint: Use SSH for maximum security.
icon: security-configuration
enum: enum:
- 'basic' - basic|Basic
- 'ssh' - ssh|SSH
enumDisplay: buttons
order: 1 order: 1
repoUrl: repoUrl:
type: String type: String
title: Repository URI title: Repository URI
hint: Git-compliant URI (e.g. git@github.com:org/repo.git for ssh, https://github.com/org/repo.git for basic) hint: Git-compliant URI (e.g. git@server.com:org/repo.git for ssh, https://server.com/org/repo.git for basic)
icon: dns
order: 2 order: 2
branch: branch:
type: String type: String
default: 'master' default: 'main'
title: Branch
hint: The branch to use during pull / push hint: The branch to use during pull / push
icon: code-fork
order: 3 order: 3
sshPrivateKeyMode: sshPrivateKeyMode:
type: String type: String
title: SSH Private Key Mode title: SSH Private Key Mode
hint: SSH Authentication Only - The mode to use to load the private key. Fill in the corresponding field below. hint: The mode to use to load the private key. Fill in the corresponding field below.
icon: grand-master-key
order: 11 order: 11
default: 'path' default: inline
enum: enum:
- 'path' - path|File Path
- 'contents' - inline|Inline Contents
enumDisplay: buttons
if:
- { key: 'authType', eq: 'ssh' }
sshPrivateKeyPath: sshPrivateKeyPath:
type: String type: String
title: A - SSH Private Key Path title: SSH Private Key Path
hint: SSH Authentication Only - Absolute path to the key. The key must NOT be passphrase-protected. Mode must be set to path to use this option. hint: Absolute path to the key. The key must NOT be passphrase-protected.
icon: key
order: 12 order: 12
if:
- { key: 'authType', eq: 'ssh' }
- { key: 'sshPrivateKeyMode', eq: 'path' }
sshPrivateKeyContent: sshPrivateKeyContent:
type: String type: String
title: B - SSH Private Key Contents title: SSH Private Key Contents
hint: SSH Authentication Only - Paste the contents of the private key. The key must NOT be passphrase-protected. Mode must be set to contents to use this option. hint: Paste the contents of the private key. The key must NOT be passphrase-protected.
icon: key
multiline: true multiline: true
sensitive: true sensitive: true
order: 13 order: 13
if:
- { key: 'sshPrivateKeyMode', eq: 'inline' }
verifySSL: verifySSL:
type: Boolean type: Boolean
default: true default: true
title: Verify SSL Certificate title: Verify SSL Certificate
hint: Some hosts requires SSL certificate checking to be disabled. Leave enabled for proper security. hint: Some hosts requires SSL certificate checking to be disabled. Leave enabled for proper security.
icon: security-ssl
order: 14 order: 14
basicUsername: basicUsername:
type: String type: String
title: Username title: Username
hint: Basic Authentication Only hint: Basic Authentication Only
icon: test-account
order: 20 order: 20
if:
- { key: 'authType', eq: 'basic' }
basicPassword: basicPassword:
type: String type: String
title: Password / PAT title: Password / PAT
hint: Basic Authentication Only hint: Basic Authentication Only
icon: password
sensitive: true sensitive: true
order: 21 order: 21
if:
- { key: 'authType', eq: 'basic' }
defaultEmail: defaultEmail:
type: String type: String
title: Default Author Email title: Default Author Email
default: 'name@company.com' default: 'name@company.com'
hint: 'Used as fallback in case the author of the change is not present.' hint: 'Used as fallback in case the author of the change is not present.'
order: 22 icon: email
order: 30
defaultName: defaultName:
type: String type: String
title: Default Author Name title: Default Author Name
default: 'John Smith' default: 'John Smith'
hint: 'Used as fallback in case the author of the change is not present.' hint: 'Used as fallback in case the author of the change is not present.'
order: 23 icon: customer
order: 31
localRepoPath: localRepoPath:
type: String type: String
title: Local Repository Path title: Local Repository Path
default: './data/repo' default: './data/repo'
hint: 'Path where the local git repository will be created.' hint: 'Path where the local git repository will be created.'
order: 30 icon: symlink-directory
order: 32
gitBinaryPath: gitBinaryPath:
type: String type: String
title: Git Binary Path title: Git Binary Path
default: '' default: ''
hint: Optional - Absolute path to the Git binary, when not available in PATH. Leave empty to use the default PATH location (recommended). hint: Optional - Absolute path to the Git binary, when not available in PATH. Leave empty to use the default PATH location (recommended).
icon: run-command
order: 50 order: 50
actions: actions:
- handler: syncUntracked - handler: syncUntracked
label: Add Untracked Changes label: Add Untracked Changes
hint: Output all content from the DB to the local Git repository to ensure all untracked content is saved. If you enabled Git after content was created or you temporarily disabled Git, you'll want to execute this action to add the missing untracked changes. hint: Output all content from the DB to the local Git repository to ensure all untracked content is saved. If you enabled Git after content was created or you temporarily disabled Git, you'll want to execute this action to add the missing untracked changes.
icon: database-daily-export
- handler: sync - handler: sync
label: Force Sync label: Force Sync
hint: Will trigger an immediate sync operation, regardless of the current sync schedule. The sync direction is respected. hint: Will trigger an immediate sync operation, regardless of the current sync schedule. The sync direction is respected.
icon: synchronize
- handler: importAll - handler: importAll
label: Import Everything label: Import Everything
hint: Will import all content currently in the local Git repository, regardless of the latest commit state. Useful for importing content from the remote repository created before git was enabled. hint: Will import all content currently in the local Git repository, regardless of the latest commit state. Useful for importing content from the remote repository created before git was enabled.
icon: database-daily-import
- handler: purge - handler: purge
label: Purge Local Repository label: Purge Local Repository
hint: If you have unrelated merge histories, clearing the local repository can resolve this issue. This will not affect the remote repository or perform any commit. hint: If you have unrelated merge histories, clearing the local repository can resolve this issue. This will not affect the remote repository or perform any commit.
icon: trash

@ -73,7 +73,7 @@ module.exports = {
mode: 0o600 mode: 0o600
}) })
} catch (err) { } catch (err) {
WIKI.logger.error(err) console.error(err)
throw err throw err
} }
} }
@ -142,9 +142,7 @@ module.exports = {
if (_.get(diff, 'files', []).length > 0) { if (_.get(diff, 'files', []).length > 0) {
let filesToProcess = [] let filesToProcess = []
for (const f of diff.files) { for (const f of diff.files) {
const fMoved = f.file.split(' => ') const fPath = path.join(this.repoPath, f.file)
const fName = fMoved.length === 2 ? fMoved[1] : fMoved[0]
const fPath = path.join(this.repoPath, fName)
let fStats = { size: 0 } let fStats = { size: 0 }
try { try {
fStats = await fs.stat(fPath) fStats = await fs.stat(fPath)
@ -161,8 +159,7 @@ module.exports = {
path: fPath, path: fPath,
stats: fStats stats: fStats
}, },
oldPath: fMoved[0], relPath: f.file
relPath: fName
}) })
} }
await this.processFiles(filesToProcess, rootUser) await this.processFiles(filesToProcess, rootUser)
@ -177,25 +174,11 @@ module.exports = {
async processFiles(files, user) { async processFiles(files, user) {
for (const item of files) { for (const item of files) {
const contentType = pageHelper.getContentType(item.relPath) const contentType = pageHelper.getContentType(item.relPath)
const fileExists = await fs.pathExists(item.file.path) const fileExists = await fs.pathExists(item.file)
if (!item.binary && contentType) { if (!item.binary && contentType) {
// -> Page // -> Page
if (fileExists && !item.importAll && item.relPath !== item.oldPath) { if (!fileExists && item.deletions > 0 && item.insertions === 0) {
// Page was renamed by git, so rename in DB
WIKI.logger.info(`(STORAGE/GIT) Page marked as renamed: from ${item.oldPath} to ${item.relPath}`)
const contentPath = pageHelper.getPagePath(item.oldPath)
const contentDestinationPath = pageHelper.getPagePath(item.relPath)
await WIKI.models.pages.movePage({
user: user,
path: contentPath.path,
destinationPath: contentDestinationPath.path,
locale: contentPath.locale,
destinationLocale: contentPath.locale,
skipStorage: true
})
} else if (!fileExists && !item.importAll && item.deletions > 0 && item.insertions === 0) {
// Page was deleted by git, can safely mark as deleted in DB // Page was deleted by git, can safely mark as deleted in DB
WIKI.logger.info(`(STORAGE/GIT) Page marked as deleted: ${item.relPath}`) WIKI.logger.info(`(STORAGE/GIT) Page marked as deleted: ${item.relPath}`)
@ -224,23 +207,7 @@ module.exports = {
} else { } else {
// -> Asset // -> Asset
if (fileExists && !item.importAll && ((item.before === item.after) || (item.deletions === 0 && item.insertions === 0))) { if (!fileExists && ((item.before > 0 && item.after === 0) || (item.deletions > 0 && item.insertions === 0))) {
// Asset was renamed by git, so rename in DB
WIKI.logger.info(`(STORAGE/GIT) Asset marked as renamed: from ${item.oldPath} to ${item.relPath}`)
const fileHash = assetHelper.generateHash(item.relPath)
const assetToRename = await WIKI.models.assets.query().findOne({ hash: fileHash })
if (assetToRename) {
await WIKI.models.assets.query().patch({
filename: item.relPath,
hash: fileHash
}).findById(assetToRename.id)
await assetToRename.deleteAssetCache()
} else {
WIKI.logger.info(`(STORAGE/GIT) Asset was not found in the DB, nothing to rename: ${item.relPath}`)
}
continue
} else if (!fileExists && !item.importAll && ((item.before > 0 && item.after === 0) || (item.deletions > 0 && item.insertions === 0))) {
// Asset was deleted by git, can safely mark as deleted in DB // Asset was deleted by git, can safely mark as deleted in DB
WIKI.logger.info(`(STORAGE/GIT) Asset marked as deleted: ${item.relPath}`) WIKI.logger.info(`(STORAGE/GIT) Asset marked as deleted: ${item.relPath}`)
@ -427,8 +394,7 @@ module.exports = {
relPath, relPath,
file, file,
deletions: 0, deletions: 0,
insertions: 0, insertions: 0
importAll: true
}], rootUser) }], rootUser)
} }
cb() cb()
@ -445,15 +411,12 @@ module.exports = {
// -> Pages // -> Pages
await pipeline( await pipeline(
WIKI.models.knex.column('id', 'path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt', 'editorKey').select().from('pages').where({ WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
isPrivate: false isPrivate: false
}).stream(), }).stream(),
new stream.Transform({ new stream.Transform({
objectMode: true, objectMode: true,
transform: async (page, enc, cb) => { transform: async (page, enc, cb) => {
const pageObject = await WIKI.models.pages.query().findById(page.id)
page.tags = await pageObject.$relatedQuery('tags')
let fileName = `${page.path}.${pageHelper.getFileExtension(page.contentType)}` let fileName = `${page.path}.${pageHelper.getFileExtension(page.contentType)}`
if (WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode) { if (WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode) {
fileName = `${page.localeCode}/${fileName}` fileName = `${page.localeCode}/${fileName}`

@ -0,0 +1,49 @@
title: GitHub
icon: '/_assets/icons/ultraviolet-github.svg'
banner: '/_assets/storage/github.jpg'
description: Millions of developers and companies build, ship, and maintain their software on GitHub - the largest and most advanced development platform in the world.
vendor: GitHub, Inc.
website: 'https://github.com'
assetDelivery:
isStreamingSupported: false
isDirectAccessSupported: false
defaultStreamingEnabled: false
defaultDirectAccessEnabled: false
contentTypes:
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: true
defaultEnabled: true
isForceEnabled: true
sync: false
setup:
handler: github
defaultValues:
accountType: org
org: ''
publicUrl: https://
props:
appName:
readOnly: true
type: String
title: App Name
hint: Name of the generated app in GitHub.
icon: 3d-touch
repoFullName:
readOnly: true
type: String
title: GitHub Repository
hint: The GitHub repository used for content synchronization.
icon: github
repoDefaultBranch:
readOnly: true
type: String
title: Default Branch
hint: The repository default branch.
icon: code-fork
actions:
- handler: exportAll
label: Export All DB Assets to GitHub
hint: Output all content from the DB to GitHub, overwriting any existing data. If you enabled GitHub after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
icon: this-way-up

@ -0,0 +1,211 @@
const { Octokit, App } = require('octokit')
/* global WIKI */
module.exports = {
async activated () { },
async deactivated () { },
async init () { },
/**
* SETUP FUNCTIONS
*/
async setup (id, state) {
try {
switch (state.step) {
// --------------------------------------------
// -> VALIDATE CALLBACK CODE AFTER APP CREATION
// --------------------------------------------
case 'connect': {
const gh = new Octokit({
userAgent: 'wikijs'
})
const resp = await gh.request('POST /app-manifests/{code}/conversions', {
code: state.code
})
if (resp.status > 200 && resp.status < 300) {
await WIKI.models.storage.query().patch({
config: {
appId: resp.data.id,
appName: resp.data.name,
appSlug: resp.data.slug,
appClientId: resp.data.client_id,
appClientSecret: resp.data.client_secret,
appWebhookSecret: resp.data.webhook_secret,
appPem: resp.data.pem,
appPermissions: resp.data.permissions,
appEvents: resp.data.events,
ownerLogin: resp.data.owner?.login,
ownerId: resp.data.owner?.id
},
state: {
current: 'ok',
setup: 'pendinginstall'
}
}).where('id', id)
return {
nextStep: 'installApp',
url: `https://github.com/apps/${resp.data.slug}/installations/new/permissions?target_id=${resp.data.owner?.id}`
}
} else {
throw new Error('GitHub refused the code or could not be reached.')
}
}
// -----------------------
// VERIFY APP INSTALLATION
// -----------------------
case 'verify': {
const tgt = await WIKI.models.storage.query().findById(id)
if (!tgt) {
throw new Error('Invalid Target ID')
}
const ghApp = new App({
appId: tgt.config.appId,
privateKey: tgt.config.appPem,
Octokit: Octokit.defaults({
userAgent: 'wikijs'
}),
oauth: {
clientId: tgt.config.appClientId,
clientSecret: tgt.config.appClientSecret
},
webhooks: {
secret: tgt.config.appWebhookSecret
}
})
// -> Find Installation ID
let installId = null
let installTotal = 0
for await (const { installation } of ghApp.eachInstallation.iterator()) {
if (installTotal < 1) {
installId = installation.id
WIKI.logger.debug(`Using GitHub App installation ID ${installId}`)
}
installTotal++
}
if (installTotal < 1) {
throw new Error('App is not installed on any GitHub account!')
} else if (installTotal > 1) {
WIKI.logger.warn(`GitHub App ${tgt.config.appName} is installed on more than 1 account. Only the first one ${installId} will be used.`)
}
// -> Fetch Repository Info
let repo = null
let repoTotal = 0
for await (const { repository } of ghApp.eachRepository.iterator({ installationId: installId })) {
if (repository.archived || repository.disabled) {
WIKI.logger.debug(`Skipping GitHub Repository ${repo.id} because of it is archived or disabled.`)
continue
}
if (repoTotal < 1) {
repo = repository
WIKI.logger.debug(`Using GitHub Repository ${repo.id}`)
}
repoTotal++
}
if (repoTotal < 1) {
throw new Error('App is not installed on any GitHub repository!')
} else if (repoTotal > 1) {
WIKI.logger.warn(`GitHub App ${tgt.config.appName} is installed on more than 1 repository. Only the first one (${repo.full_name}) will be used.`)
}
// -> Save install/repo info
await WIKI.models.storage.query().patch({
isEnabled: true,
config: {
...tgt.config,
installId,
repoId: repo.id,
repoName: repo.name,
repoOwner: repo.owner?.login,
repoDefaultBranch: repo.default_branch,
repoFullName: repo.full_name
},
state: {
current: 'ok',
setup: 'configured'
}
}).where('id', id)
return {
nextStep: 'completed'
}
}
default: {
throw new Error('Invalid Setup Step')
}
}
} catch (err) {
WIKI.logger.warn('GitHub Storage Module Setup Failed:')
WIKI.logger.warn(err)
throw err
}
},
async setupDestroy (id) {
try {
const tgt = await WIKI.models.storage.query().findById(id)
if (!tgt) {
throw new Error('Invalid Target ID')
}
WIKI.logger.info('Resetting GitHub storage configuration...')
const ghApp = new App({
appId: tgt.config.appId,
privateKey: tgt.config.appPem,
Octokit: Octokit.defaults({
userAgent: 'wikijs'
}),
oauth: {
clientId: tgt.config.appClientId,
clientSecret: tgt.config.appClientSecret
},
webhooks: {
secret: tgt.config.appWebhookSecret
}
})
// -> Reset storage module config
await WIKI.models.storage.query().patch({
isEnabled: false,
config: {},
state: {
current: 'ok',
setup: 'notconfigured'
}
}).where('id', id)
// -> Try to delete installation on GitHub
if (tgt.config.installId) {
try {
await ghApp.octokit.request('DELETE /app/installations/{installation_id}', {
installation_id: tgt.config.installId
})
WIKI.logger.info('Deleted GitHub installation successfully.')
} catch (err) {
WIKI.logger.warn('Could not delete GitHub installation automatically. Please remove the installation on GitHub.')
}
}
} catch (err) {
WIKI.logger.warn('GitHub Storage Module Destroy Failed:')
WIKI.logger.warn(err)
throw err
}
},
async created (page) { },
async updated (page) { },
async deleted (page) { },
async renamed (page) { },
async assetUploaded (asset) { },
async assetDeleted (asset) { },
async assetRenamed (asset) { },
async getLocalLocation () { },
async exportAll () { }
}

@ -1,9 +0,0 @@
key: onedrive
title: OneDrive
description: OneDrive is a file hosting service operated by Microsoft as part of its suite of Office Online services.
author: requarks.io
logo: https://static.requarks.io/logo/onedrive.svg
website: https://onedrive.live.com/about/
props:
clientId: String
clientSecret: String

@ -1,26 +0,0 @@
module.exports = {
async activated() {
},
async deactivated() {
},
async init() {
},
async created() {
},
async updated() {
},
async deleted() {
},
async renamed() {
},
async getLocalLocation () {
}
}

@ -1,168 +0,0 @@
const S3 = require('aws-sdk/clients/s3')
const stream = require('stream')
const Promise = require('bluebird')
const pipeline = Promise.promisify(stream.pipeline)
const _ = require('lodash')
const pageHelper = require('../../../helpers/page.js')
/* global WIKI */
/**
* Deduce the file path given the `page` object and the object's key to the page's path.
*/
const getFilePath = (page, pathKey) => {
const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
}
/**
* Can be used with S3 compatible storage.
*/
module.exports = class S3CompatibleStorage {
constructor(storageName) {
this.storageName = storageName
this.bucketName = ""
}
async activated() {
// not used
}
async deactivated() {
// not used
}
async init() {
WIKI.logger.info(`(STORAGE/${this.storageName}) Initializing...`)
const { accessKeyId, secretAccessKey, bucket } = this.config
const s3Config = {
accessKeyId,
secretAccessKey,
params: { Bucket: bucket },
apiVersions: '2006-03-01'
}
if (!_.isNil(this.config.region)) {
s3Config.region = this.config.region
}
if (!_.isNil(this.config.endpoint)) {
s3Config.endpoint = this.config.endpoint
}
if (!_.isNil(this.config.sslEnabled)) {
s3Config.sslEnabled = this.config.sslEnabled
}
if (!_.isNil(this.config.s3ForcePathStyle)) {
s3Config.s3ForcePathStyle = this.config.s3ForcePathStyle
}
if (!_.isNil(this.config.s3BucketEndpoint)) {
s3Config.s3BucketEndpoint = this.config.s3BucketEndpoint
}
this.s3 = new S3(s3Config)
this.bucketName = bucket
// determine if a bucket exists and you have permission to access it
await this.s3.headBucket().promise()
WIKI.logger.info(`(STORAGE/${this.storageName}) Initialization completed.`)
}
async created(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating file ${page.path}...`)
const filePath = getFilePath(page, 'path')
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
}
async updated(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Updating file ${page.path}...`)
const filePath = getFilePath(page, 'path')
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
}
async deleted(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${page.path}...`)
const filePath = getFilePath(page, 'path')
await this.s3.deleteObject({ Key: filePath }).promise()
}
async renamed(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
let sourceFilePath = getFilePath(page, 'path')
let destinationFilePath = getFilePath(page, 'destinationPath')
if (WIKI.config.lang.namespacing) {
if (WIKI.config.lang.code !== page.localeCode) {
sourceFilePath = `${page.localeCode}/${sourceFilePath}`
}
if (WIKI.config.lang.code !== page.destinationLocaleCode) {
destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
}
}
await this.s3.copyObject({ CopySource: `${this.bucketName}/${sourceFilePath}`, Key: destinationFilePath }).promise()
await this.s3.deleteObject({ Key: sourceFilePath }).promise()
}
/**
* ASSET UPLOAD
*
* @param {Object} asset Asset to upload
*/
async assetUploaded (asset) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating new file ${asset.path}...`)
await this.s3.putObject({ Key: asset.path, Body: asset.data }).promise()
}
/**
* ASSET DELETE
*
* @param {Object} asset Asset to delete
*/
async assetDeleted (asset) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${asset.path}...`)
await this.s3.deleteObject({ Key: asset.path }).promise()
}
/**
* ASSET RENAME
*
* @param {Object} asset Asset to rename
*/
async assetRenamed (asset) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
await this.s3.copyObject({ CopySource: `${this.bucketName}/${asset.path}`, Key: asset.destinationPath }).promise()
await this.s3.deleteObject({ Key: asset.path }).promise()
}
async getLocalLocation () {
}
/**
* HANDLERS
*/
async exportAll() {
WIKI.logger.info(`(STORAGE/${this.storageName}) Exporting all content to the cloud provider...`)
// -> Pages
await pipeline(
WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
isPrivate: false
}).stream(),
new stream.Transform({
objectMode: true,
transform: async (page, enc, cb) => {
const filePath = getFilePath(page, 'path')
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding page ${filePath}...`)
await this.s3.putObject({ Key: filePath, Body: pageHelper.injectPageMetadata(page) }).promise()
cb()
}
})
)
// -> Assets
const assetFolders = await WIKI.models.assetFolders.getAllPaths()
await pipeline(
WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
new stream.Transform({
objectMode: true,
transform: async (asset, enc, cb) => {
const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding asset ${filename}...`)
await this.s3.putObject({ Key: filename, Body: asset.data }).promise()
cb()
}
})
)
WIKI.logger.info(`(STORAGE/${this.storageName}) All content has been pushed to the cloud provider.`)
}
}

@ -1,37 +1,159 @@
key: s3 title: AWS S3 / DigitalOcean Spaces
title: Amazon S3 icon: '/_assets/icons/ultraviolet-amazon-web-services.svg'
description: Amazon S3 is a cloud computing web service offered by Amazon Web Services which provides object storage. banner: '/_assets/storage/s3.jpg'
author: andrewsim description: Amazon Simple Storage Service (Amazon S3) is an object storage service offering industry-leading scalability, data availability, security, and performance.
logo: https://static.requarks.io/logo/aws-s3.svg vendor: Amazon.com, Inc.
website: https://aws.amazon.com/s3/ website: 'https://aws.amazon.com'
isAvailable: true assetDelivery:
supportedModes: isStreamingSupported: true
- push isDirectAccessSupported: true
defaultMode: push defaultStreamingEnabled: true
schedule: false defaultDirectAccessEnabled: true
contentTypes:
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: false
defaultEnabled: false
sync: false
props: props:
region: mode:
type: String
title: Mode
hint: Select a preset configuration mode or define a custom one.
icon: tune
default: aws
order: 1
enum:
- aws|AWS S3
- do|DigitalOcean Spaces
- custom|Custom
awsRegion:
type: String type: String
title: Region title: Region
hint: The AWS datacenter region where the bucket will be created. hint: The AWS datacenter region where the bucket will be created.
order: 1 icon: geography
default: us-east-1
enum:
- af-south-1|af-south-1 - Africa (Cape Town)
- ap-east-1|ap-east-1 - Asia Pacific (Hong Kong)
- ap-southeast-3|ap-southeast-3 - Asia Pacific (Jakarta)
- ap-south-1|ap-south-1 - Asia Pacific (Mumbai)
- ap-northeast-3|ap-northeast-3 - Asia Pacific (Osaka)
- ap-northeast-2|ap-northeast-2 - Asia Pacific (Seoul)
- ap-southeast-1|ap-southeast-1 - Asia Pacific (Singapore)
- ap-southeast-2|ap-southeast-2 - Asia Pacific (Sydney)
- ap-northeast-1|ap-northeast-1 - Asia Pacific (Tokyo)
- ca-central-1|ca-central-1 - Canada (Central)
- cn-north-1|cn-north-1 - China (Beijing)
- cn-northwest-1|cn-northwest-1 - China (Ningxia)
- eu-central-1|eu-central-1 - Europe (Frankfurt)
- eu-west-1|eu-west-1 - Europe (Ireland)
- eu-west-2|eu-west-2 - Europe (London)
- eu-south-1|eu-south-1 - Europe (Milan)
- eu-west-3|eu-west-3 - Europe (Paris)
- eu-north-1|eu-north-1 - Europe (Stockholm)
- me-south-1|me-south-1 - Middle East (Bahrain)
- sa-east-1|sa-east-1 - South America (São Paulo)
- us-east-1|us-east-1 - US East (N. Virginia)
- us-east-2|us-east-2 - US East (Ohio)
- us-west-1|us-west-1 - US West (N. California)
- us-west-2|us-west-2 - US West (Oregon)
order: 2
if:
- { key: 'mode', eq: 'aws' }
doRegion:
type: String
title: Region
hint: The DigitalOcean Spaces region
icon: geography
default: nyc3
enum:
- ams3|Amsterdam
- fra1|Frankfurt
- nyc3|New York
- sfo2|San Francisco 2
- sfo3|San Francisco 3
- sgp1|Singapore
order: 2
if:
- { key: 'mode', eq: 'do' }
endpoint:
type: String
title: Endpoint URI
hint: The full S3-compliant endpoint URI.
icon: dns
default: https://service.region.example.com
order: 2
if:
- { key: 'mode', eq: 'custom' }
bucket: bucket:
type: String type: String
title: Unique bucket name title: Unique bucket name
hint: The unique bucket name to create (e.g. wiki-johndoe). hint: The unique bucket name to create (e.g. wiki-johndoe).
order: 2 icon: open-box
order: 3
accessKeyId: accessKeyId:
type: String type: String
title: Access Key ID title: Access Key ID
hint: The Access Key. hint: The Access Key.
order: 3 icon: 3d-touch
order: 4
secretAccessKey: secretAccessKey:
type: String type: String
title: Secret Access Key title: Secret Access Key
hint: The Secret Access Key for the Access Key ID you created above. hint: The Secret Access Key for the Access Key ID you created above.
icon: key
sensitive: true sensitive: true
order: 4 order: 5
storageTier:
type: String
title: Storage Tier
hint: The storage tier to use when adding files.
icon: scan-stock
order: 6
default: STANDARD
enum:
- STANDARD|Standard
- STANDARD_IA|Standard Infrequent Access
- INTELLIGENT_TIERING|Intelligent Tiering
- ONEZONE_IA|One Zone Infrequent Access
- REDUCED_REDUNDANCY|Reduced Redundancy
- GLACIER_IR|Glacier Instant Retrieval
- GLACIER|Glacier Flexible Retrieval
- DEEP_ARCHIVE|Glacier Deep Archive
- OUTPOSTS|Outposts
if:
- { key: 'mode', eq: 'aws' }
sslEnabled:
type: Boolean
title: Use SSL
hint: Whether to enable SSL for requests
icon: secure
default: true
order: 10
if:
- { key: 'mode', eq: 'custom' }
s3ForcePathStyle:
type: Boolean
title: Force Path Style for S3 objects
hint: Whether to force path style URLs for S3 objects.
icon: filtration
default: false
order: 11
if:
- { key: 'mode', eq: 'custom' }
s3BucketEndpoint:
type: Boolean
title: Single Bucket Endpoint
hint: Whether the provided endpoint addresses an individual bucket.
icon: swipe-right
default: false
order: 12
if:
- { key: 'mode', eq: 'custom' }
actions: actions:
- handler: exportAll - handler: exportAll
label: Export All label: Export All DB Assets to S3
hint: Output all content from the DB to S3, overwriting any existing data. If you enabled S3 after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content. hint: Output all content from the DB to S3, overwriting any existing data. If you enabled S3 after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
icon: this-way-up

@ -1,3 +1,166 @@
const S3CompatibleStorage = require('./common') const S3 = require('aws-sdk/clients/s3')
const stream = require('stream')
const Promise = require('bluebird')
const pipeline = Promise.promisify(stream.pipeline)
const _ = require('lodash')
const pageHelper = require('../../../helpers/page.js')
module.exports = new S3CompatibleStorage('S3') /* global WIKI */
/**
* Deduce the file path given the `page` object and the object's key to the page's path.
*/
const getFilePath = (page, pathKey) => {
const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
}
/**
* Can be used with S3 compatible storage.
*/
module.exports = class S3CompatibleStorage {
constructor(storageName) {
this.storageName = storageName
}
async activated() {
// not used
}
async deactivated() {
// not used
}
async init() {
WIKI.logger.info(`(STORAGE/${this.storageName}) Initializing...`)
const { accessKeyId, secretAccessKey, bucket } = this.config
const s3Config = {
accessKeyId,
secretAccessKey,
params: { Bucket: bucket },
apiVersions: '2006-03-01'
}
if (!_.isNil(this.config.region)) {
s3Config.region = this.config.region
}
if (!_.isNil(this.config.endpoint)) {
s3Config.endpoint = this.config.endpoint
}
if (!_.isNil(this.config.sslEnabled)) {
s3Config.sslEnabled = this.config.sslEnabled
}
if (!_.isNil(this.config.s3ForcePathStyle)) {
s3Config.s3ForcePathStyle = this.config.s3ForcePathStyle
}
if (!_.isNil(this.config.s3BucketEndpoint)) {
s3Config.s3BucketEndpoint = this.config.s3BucketEndpoint
}
this.s3 = new S3(s3Config)
// determine if a bucket exists and you have permission to access it
await this.s3.headBucket().promise()
WIKI.logger.info(`(STORAGE/${this.storageName}) Initialization completed.`)
}
async created(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating file ${page.path}...`)
const filePath = getFilePath(page, 'path')
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
}
async updated(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Updating file ${page.path}...`)
const filePath = getFilePath(page, 'path')
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
}
async deleted(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${page.path}...`)
const filePath = getFilePath(page, 'path')
await this.s3.deleteObject({ Key: filePath }).promise()
}
async renamed(page) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
let sourceFilePath = getFilePath(page, 'path')
let destinationFilePath = getFilePath(page, 'destinationPath')
if (WIKI.config.lang.namespacing) {
if (WIKI.config.lang.code !== page.localeCode) {
sourceFilePath = `${page.localeCode}/${sourceFilePath}`
}
if (WIKI.config.lang.code !== page.destinationLocaleCode) {
destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
}
}
await this.s3.copyObject({ CopySource: sourceFilePath, Key: destinationFilePath }).promise()
await this.s3.deleteObject({ Key: sourceFilePath }).promise()
}
/**
* ASSET UPLOAD
*
* @param {Object} asset Asset to upload
*/
async assetUploaded (asset) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating new file ${asset.path}...`)
await this.s3.putObject({ Key: asset.path, Body: asset.data }).promise()
}
/**
* ASSET DELETE
*
* @param {Object} asset Asset to delete
*/
async assetDeleted (asset) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${asset.path}...`)
await this.s3.deleteObject({ Key: asset.path }).promise()
}
/**
* ASSET RENAME
*
* @param {Object} asset Asset to rename
*/
async assetRenamed (asset) {
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
await this.s3.copyObject({ CopySource: asset.path, Key: asset.destinationPath }).promise()
await this.s3.deleteObject({ Key: asset.path }).promise()
}
async getLocalLocation () {
}
/**
* HANDLERS
*/
async exportAll() {
WIKI.logger.info(`(STORAGE/${this.storageName}) Exporting all content to the cloud provider...`)
// -> Pages
await pipeline(
WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
isPrivate: false
}).stream(),
new stream.Transform({
objectMode: true,
transform: async (page, enc, cb) => {
const filePath = getFilePath(page, 'path')
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding page ${filePath}...`)
await this.s3.putObject({ Key: filePath, Body: pageHelper.injectPageMetadata(page) }).promise()
cb()
}
})
)
// -> Assets
const assetFolders = await WIKI.models.assetFolders.getAllPaths()
await pipeline(
WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
new stream.Transform({
objectMode: true,
transform: async (asset, enc, cb) => {
const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding asset ${filename}...`)
await this.s3.putObject({ Key: filename, Body: asset.data }).promise()
cb()
}
})
)
WIKI.logger.info(`(STORAGE/${this.storageName}) All content has been pushed to the cloud provider.`)
}
}

@ -1,57 +0,0 @@
key: s3generic
title: S3 Generic
description: Generic storage module for S3-compatible services.
author: requarks.io
logo: https://static.requarks.io/logo/aws-s3-alt.svg
website: https://wiki.js.org
isAvailable: true
supportedModes:
- push
defaultMode: push
schedule: false
props:
endpoint:
type: String
title: Endpoint URI
hint: The full S3-compliant endpoint URI.
default: https://service.region.example.com
order: 1
bucket:
type: String
title: Unique bucket name
hint: The unique bucket name to create (e.g. wiki-johndoe)
order: 2
accessKeyId:
type: String
title: Access Key ID
hint: The Access Key ID.
order: 3
secretAccessKey:
type: String
title: Access Key Secret
hint: The Access Key Secret for the Access Key ID above.
sensitive: true
order: 4
sslEnabled:
type: Boolean
title: Use SSL
hint: Whether to enable SSL for requests
default: true
order: 5
s3ForcePathStyle:
type: Boolean
title: Force Path Style for S3 objects
hint: Whether to force path style URLs for S3 objects.
default: false
order: 6
s3BucketEndpoint:
type: Boolean
title: Single Bucket Endpoint
hint: Whether the provided endpoint addresses an individual bucket.
default: false
order: 7
actions:
- handler: exportAll
label: Export All
hint: Output all content from the DB to the external service, overwriting any existing data. If you enabled this module after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.

@ -1,3 +0,0 @@
const S3CompatibleStorage = require('../s3/common')
module.exports = new S3CompatibleStorage('S3Generic')

@ -1,71 +1,94 @@
key: sftp title: 'SFTP'
title: SFTP icon: '/_assets/icons/ultraviolet-nas.svg'
description: SFTP (SSH File Transfer Protocol) is a secure file transfer protocol. It runs over the SSH protocol. It supports the full security and authentication functionality of SSH. banner: '/_assets/storage/ssh.jpg'
author: requarks.io description: 'Store files over a remote connection using the SSH File Transfer Protocol.'
logo: https://static.requarks.io/logo/ssh.svg vendor: 'Wiki.js'
website: https://www.ssh.com/ssh/sftp website: 'https://js.wiki'
isAvailable: true assetDelivery:
supportedModes: isStreamingSupported: false
- push isDirectAccessSupported: false
defaultMode: push defaultStreamingEnabled: false
schedule: false defaultDirectAccessEnabled: false
contentTypes:
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
defaultLargeThreshold: '5MB'
versioning:
isSupported: false
defaultEnabled: false
sync: false
props: props:
host: host:
type: String type: String
title: Host title: Host
default: '' default: ''
hint: Hostname or IP of the remote SSH server. hint: Hostname or IP of the remote SSH server.
icon: dns
order: 1 order: 1
port: port:
type: Number type: Number
title: Port title: Port
default: 22 default: 22
hint: SSH port of the remote server. hint: SSH port of the remote server.
icon: ethernet-off
order: 2 order: 2
authMode: authMode:
type: String type: String
title: Authentication Method title: Authentication Method
default: 'privateKey' default: 'privateKey'
hint: Whether to use Private Key or Password-based authentication. A private key is highly recommended for best security. hint: Whether to use Private Key or Password-based authentication. A private key is highly recommended for best security.
icon: grand-master-key
enum: enum:
- privateKey - privateKey|Private Key
- password - password|Password
enumDisplay: buttons
order: 3 order: 3
username: username:
type: String type: String
title: Username title: Username
default: '' default: ''
hint: Username for authentication. hint: Username for authentication.
icon: test-account
order: 4 order: 4
privateKey: privateKey:
type: String type: String
title: Private Key Contents title: Private Key Contents
default: '' default: ''
hint: (Private Key Authentication Only) - Contents of the private key hint: Contents of the private key
icon: key
multiline: true multiline: true
sensitive: true sensitive: true
order: 5 order: 5
if:
- { key: 'authMode', eq: 'privateKey' }
passphrase: passphrase:
type: String type: String
title: Private Key Passphrase title: Private Key Passphrase
default: '' default: ''
hint: (Private Key Authentication Only) - Passphrase if the private key is encrypted, leave empty otherwise hint: Passphrase if the private key is encrypted, leave empty otherwise
icon: password
sensitive: true sensitive: true
order: 6 order: 6
if:
- { key: 'authMode', eq: 'privateKey' }
password: password:
type: String type: String
title: Password title: Password
default: '' default: ''
hint: (Password-based Authentication Only) - Password for authentication hint: Password for authentication
icon: password
sensitive: true sensitive: true
order: 6 order: 6
if:
- { key: 'authMode', eq: 'password' }
basePath: basePath:
type: String type: String
title: Base Directory Path title: Base Directory Path
default: '/root/wiki' default: '/root/wiki'
hint: Base directory where files will be transferred to. The path must already exists and be writable by the user. hint: Base directory where files will be transferred to. The path must already exists and be writable by the user.
icon: symlink-directory
actions: actions:
- handler: exportAll - handler: exportAll
label: Export All label: Export All DB Assets to Remote
hint: Output all content from the DB to the remote SSH server, overwriting any existing data. If you enabled SFTP after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content. hint: Output all content from the DB to the remote SSH server, overwriting any existing data. If you enabled SFTP after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
icon: this-way-up

@ -155,12 +155,7 @@ module.exports = {
const folderPaths = _.dropRight(filePath.split('/')) const folderPaths = _.dropRight(filePath.split('/'))
for (let i = 1; i <= folderPaths.length; i++) { for (let i = 1; i <= folderPaths.length; i++) {
const folderSection = _.take(folderPaths, i).join('/') const folderSection = _.take(folderPaths, i).join('/')
const folderDir = path.posix.join(this.config.basePath, folderSection) await this.sftp.mkdir(path.posix.join(this.config.basePath, folderSection))
try {
await this.sftp.readdir(folderDir)
} catch (err) {
await this.sftp.mkdir(folderDir)
}
} }
} catch (err) {} } catch (err) {}
} }

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save