mirror of https://github.com/requarks/wiki
parent
027b1614ff
commit
edb529378e
@ -0,0 +1,108 @@
|
|||||||
|
const Model = require('objection').Model
|
||||||
|
const crypto = require('crypto')
|
||||||
|
const pem2jwk = require('pem-jwk').pem2jwk
|
||||||
|
const _ = require('lodash')
|
||||||
|
|
||||||
|
/* global WIKI */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Site model
|
||||||
|
*/
|
||||||
|
module.exports = class Site extends Model {
|
||||||
|
static get tableName () { return 'sites' }
|
||||||
|
|
||||||
|
static get jsonSchema () {
|
||||||
|
return {
|
||||||
|
type: 'object',
|
||||||
|
required: ['hostname'],
|
||||||
|
|
||||||
|
properties: {
|
||||||
|
id: { type: 'string' },
|
||||||
|
hostname: { type: 'string' },
|
||||||
|
isEnabled: { type: 'boolean', default: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static get jsonAttributes () {
|
||||||
|
return ['config']
|
||||||
|
}
|
||||||
|
|
||||||
|
static async createSite (hostname, config) {
|
||||||
|
const newSite = await WIKI.models.sites.query().insertAndFetch({
|
||||||
|
hostname,
|
||||||
|
isEnabled: true,
|
||||||
|
config: _.defaultsDeep(config, {
|
||||||
|
title: 'My Wiki Site',
|
||||||
|
description: '',
|
||||||
|
company: '',
|
||||||
|
contentLicense: '',
|
||||||
|
defaults: {
|
||||||
|
timezone: 'America/New_York',
|
||||||
|
dateFormat: 'YYYY-MM-DD',
|
||||||
|
timeFormat: '12h'
|
||||||
|
},
|
||||||
|
features: {
|
||||||
|
ratings: false,
|
||||||
|
ratingsMode: 'off',
|
||||||
|
comments: false,
|
||||||
|
contributions: false,
|
||||||
|
profile: true,
|
||||||
|
search: true
|
||||||
|
},
|
||||||
|
logoUrl: '',
|
||||||
|
logoText: true,
|
||||||
|
robots: {
|
||||||
|
index: true,
|
||||||
|
follow: true
|
||||||
|
},
|
||||||
|
locale: 'en',
|
||||||
|
localeNamespacing: false,
|
||||||
|
localeNamespaces: [],
|
||||||
|
theme: {
|
||||||
|
dark: false,
|
||||||
|
colorPrimary: '#1976d2',
|
||||||
|
colorSecondary: '#02c39a',
|
||||||
|
colorAccent: '#f03a47',
|
||||||
|
colorHeader: '#000000',
|
||||||
|
colorSidebar: '#1976d2',
|
||||||
|
injectCSS: '',
|
||||||
|
injectHead: '',
|
||||||
|
injectBody: '',
|
||||||
|
sidebarPosition: 'left',
|
||||||
|
tocPosition: 'right',
|
||||||
|
showSharingMenu: true,
|
||||||
|
showPrintBtn: true
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
await WIKI.models.storage.query().insert({
|
||||||
|
module: 'db',
|
||||||
|
siteId: newSite.id,
|
||||||
|
isEnabled: true,
|
||||||
|
contentTypes: {
|
||||||
|
activeTypes: ['pages', 'images', 'documents', 'others', 'large'],
|
||||||
|
largeThreshold: '5MB'
|
||||||
|
},
|
||||||
|
assetDelivery: {
|
||||||
|
streaming: true,
|
||||||
|
directAccess: false
|
||||||
|
},
|
||||||
|
state: {
|
||||||
|
current: 'ok'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return newSite
|
||||||
|
}
|
||||||
|
|
||||||
|
static async updateSite (id, patch) {
|
||||||
|
return WIKI.models.sites.query().findById(id).patch(patch)
|
||||||
|
}
|
||||||
|
|
||||||
|
static async deleteSite (id) {
|
||||||
|
await WIKI.models.storage.query().delete().where('siteId', id)
|
||||||
|
return WIKI.models.sites.query().deleteById(id)
|
||||||
|
}
|
||||||
|
}
|
@ -1,6 +0,0 @@
|
|||||||
key: api
|
|
||||||
title: API Docs
|
|
||||||
description: REST / GraphQL Editor
|
|
||||||
contentType: yml
|
|
||||||
author: requarks.io
|
|
||||||
props: {}
|
|
@ -1,6 +0,0 @@
|
|||||||
key: ckeditor
|
|
||||||
title: Visual Editor
|
|
||||||
description: Rich-text WYSIWYG Editor
|
|
||||||
contentType: html
|
|
||||||
author: requarks.io
|
|
||||||
props: {}
|
|
@ -1,6 +0,0 @@
|
|||||||
key: code
|
|
||||||
title: Code
|
|
||||||
description: Raw HTML editor
|
|
||||||
contentType: html
|
|
||||||
author: requarks.io
|
|
||||||
props: {}
|
|
@ -1,6 +0,0 @@
|
|||||||
key: markdown
|
|
||||||
title: Markdown
|
|
||||||
description: Basic Markdown editor
|
|
||||||
contentType: markdown
|
|
||||||
author: requarks.io
|
|
||||||
props: {}
|
|
@ -1,6 +0,0 @@
|
|||||||
key: redirect
|
|
||||||
title: Redirection
|
|
||||||
description: Redirect the user
|
|
||||||
contentType: redirect
|
|
||||||
author: requarks.io
|
|
||||||
props: {}
|
|
@ -1,6 +0,0 @@
|
|||||||
key: wysiwyg
|
|
||||||
title: WYSIWYG
|
|
||||||
description: Advanced Visual HTML Builder
|
|
||||||
contentType: html
|
|
||||||
author: requarks.io
|
|
||||||
props: {}
|
|
@ -1,44 +1,56 @@
|
|||||||
key: azure
|
|
||||||
title: Azure Blob Storage
|
title: Azure Blob Storage
|
||||||
description: Azure Blob Storage by Microsoft provides massively scalable object storage for unstructured data.
|
icon: '/_assets/icons/ultraviolet-azure.svg'
|
||||||
author: requarks.io
|
banner: '/_assets/storage/azure.jpg'
|
||||||
logo: https://static.requarks.io/logo/azure.svg
|
description: Azure Blob Storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data.
|
||||||
website: https://azure.microsoft.com/services/storage/blobs/
|
vendor: Microsoft Corporation
|
||||||
isAvailable: true
|
website: 'https://azure.microsoft.com'
|
||||||
supportedModes:
|
assetDelivery:
|
||||||
- push
|
isStreamingSupported: true
|
||||||
defaultMode: push
|
isDirectAccessSupported: true
|
||||||
schedule: false
|
defaultStreamingEnabled: true
|
||||||
|
defaultDirectAccessEnabled: true
|
||||||
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: false
|
||||||
|
defaultEnabled: false
|
||||||
|
sync: false
|
||||||
props:
|
props:
|
||||||
accountName:
|
accountName:
|
||||||
type: String
|
type: String
|
||||||
title: Account Name
|
title: Account Name
|
||||||
default: ''
|
default: ''
|
||||||
hint: Your unique account name.
|
hint: Your unique account name.
|
||||||
|
icon: 3d-touch
|
||||||
order: 1
|
order: 1
|
||||||
accountKey:
|
accountKey:
|
||||||
type: String
|
type: String
|
||||||
title: Account Access Key
|
title: Account Access Key
|
||||||
default: ''
|
default: ''
|
||||||
hint: Either key 1 or key 2.
|
hint: Either key 1 or key 2.
|
||||||
|
icon: key
|
||||||
sensitive: true
|
sensitive: true
|
||||||
order: 2
|
order: 2
|
||||||
containerName:
|
containerName:
|
||||||
type: String
|
type: String
|
||||||
title: Container Name
|
title: Container Name
|
||||||
default: 'wiki'
|
default: wiki
|
||||||
hint: Will automatically be created if it doesn't exist yet.
|
hint: Will automatically be created if it doesn't exist yet.
|
||||||
|
icon: shipping-container
|
||||||
order: 3
|
order: 3
|
||||||
storageTier:
|
storageTier:
|
||||||
type: String
|
type: String
|
||||||
title: Storage Tier
|
title: Storage Tier
|
||||||
hint: Represents the access tier on a blob. Use Cool for lower storage costs but at higher retrieval costs.
|
hint: Represents the access tier on a blob. Use Cool for lower storage costs but at higher retrieval costs.
|
||||||
|
icon: scan-stock
|
||||||
order: 4
|
order: 4
|
||||||
default: 'Cool'
|
default: cool
|
||||||
enum:
|
enum:
|
||||||
- 'Hot'
|
- hot|Hot
|
||||||
- 'Cool'
|
- cool|Cool
|
||||||
actions:
|
actions:
|
||||||
- handler: exportAll
|
- handler: exportAll
|
||||||
label: Export All
|
label: Export All DB Assets to Azure
|
||||||
hint: Output all content from the DB to Azure Blog Storage, overwriting any existing data. If you enabled Azure Blog Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
hint: Output all content from the DB to Azure Blog Storage, overwriting any existing data. If you enabled Azure Blog Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
||||||
|
icon: this-way-up
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
key: box
|
|
||||||
title: Box
|
|
||||||
description: Box is a cloud content management and file sharing service for businesses.
|
|
||||||
author: requarks.io
|
|
||||||
logo: https://static.requarks.io/logo/box.svg
|
|
||||||
website: https://www.box.com/platform
|
|
||||||
props:
|
|
||||||
clientId: String
|
|
||||||
clientSecret: String
|
|
||||||
rootFolder: String
|
|
@ -1,26 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
async activated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deactivated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async init() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async created() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async updated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deleted() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async renamed() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async getLocalLocation () {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,25 @@
|
|||||||
|
title: 'Database'
|
||||||
|
icon: '/_assets/icons/ultraviolet-database.svg'
|
||||||
|
banner: '/_assets/storage/database.jpg'
|
||||||
|
description: 'The local PostgreSQL database can store any assets. It is however not recommended to store large files directly in the database as this can cause performance issues.'
|
||||||
|
vendor: 'Wiki.js'
|
||||||
|
website: 'https://js.wiki'
|
||||||
|
assetDelivery:
|
||||||
|
isStreamingSupported: true
|
||||||
|
isDirectAccessSupported: false
|
||||||
|
defaultStreamingEnabled: true
|
||||||
|
defaultDirectAccessEnabled: false
|
||||||
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: true
|
||||||
|
defaultEnabled: false
|
||||||
|
sync: false
|
||||||
|
props: {}
|
||||||
|
actions:
|
||||||
|
- handler: purge
|
||||||
|
label: Purge All Assets
|
||||||
|
hint: Delete all asset data from the database (not the metadata). Useful if you moved assets to another storage target and want to reduce the size of the database.
|
||||||
|
warn: This is a destructive action! Make sure all asset files are properly stored on another storage module! This action cannot be undone!
|
||||||
|
icon: explosion
|
@ -0,0 +1,14 @@
|
|||||||
|
module.exports = {
|
||||||
|
async activated () { },
|
||||||
|
async deactivated () { },
|
||||||
|
async init () { },
|
||||||
|
async created (page) { },
|
||||||
|
async updated (page) { },
|
||||||
|
async deleted (page) { },
|
||||||
|
async renamed (page) { },
|
||||||
|
async assetUploaded (asset) { },
|
||||||
|
async assetDeleted (asset) { },
|
||||||
|
async assetRenamed (asset) { },
|
||||||
|
async getLocalLocation () { },
|
||||||
|
async exportAll () { }
|
||||||
|
}
|
@ -1,45 +0,0 @@
|
|||||||
key: digitalocean
|
|
||||||
title: DigitalOcean Spaces
|
|
||||||
description: DigitalOcean provides developers and businesses a reliable, easy-to-use cloud computing platform of virtual servers (Droplets), object storage (Spaces) and more.
|
|
||||||
author: andrewsim
|
|
||||||
logo: https://static.requarks.io/logo/digitalocean.svg
|
|
||||||
website: https://www.digitalocean.com/products/spaces/
|
|
||||||
isAvailable: true
|
|
||||||
supportedModes:
|
|
||||||
- push
|
|
||||||
defaultMode: push
|
|
||||||
schedule: false
|
|
||||||
props:
|
|
||||||
endpoint:
|
|
||||||
type: String
|
|
||||||
title: Endpoint
|
|
||||||
hint: The DigitalOcean spaces endpoint that has the form ${REGION}.digitaloceanspaces.com
|
|
||||||
default: nyc3.digitaloceanspaces.com
|
|
||||||
enum:
|
|
||||||
- ams3.digitaloceanspaces.com
|
|
||||||
- fra1.digitaloceanspaces.com
|
|
||||||
- nyc3.digitaloceanspaces.com
|
|
||||||
- sfo2.digitaloceanspaces.com
|
|
||||||
- sgp1.digitaloceanspaces.com
|
|
||||||
order: 1
|
|
||||||
bucket:
|
|
||||||
type: String
|
|
||||||
title: Space Unique Name
|
|
||||||
hint: The unique space name to create (e.g. wiki-johndoe)
|
|
||||||
order: 2
|
|
||||||
accessKeyId:
|
|
||||||
type: String
|
|
||||||
title: Access Key ID
|
|
||||||
hint: The Access Key (Generated in API > Tokens/Keys > Spaces access keys).
|
|
||||||
order: 3
|
|
||||||
secretAccessKey :
|
|
||||||
type: String
|
|
||||||
title: Access Key Secret
|
|
||||||
hint: The Access Key Secret for the Access Key ID you created above.
|
|
||||||
sensitive: true
|
|
||||||
order: 4
|
|
||||||
actions:
|
|
||||||
- handler: exportAll
|
|
||||||
label: Export All
|
|
||||||
hint: Output all content from the DB to DigitalOcean Spaces, overwriting any existing data. If you enabled DigitalOcean Spaces after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
|||||||
const S3CompatibleStorage = require('../s3/common')
|
|
||||||
|
|
||||||
module.exports = new S3CompatibleStorage('Digitalocean')
|
|
@ -1,34 +1,46 @@
|
|||||||
key: disk
|
|
||||||
title: Local File System
|
title: Local File System
|
||||||
description: Local storage on disk or network shares.
|
icon: '/_assets/icons/ultraviolet-hdd.svg'
|
||||||
author: requarks.io
|
banner: '/_assets/storage/disk.jpg'
|
||||||
logo: https://static.requarks.io/logo/local-fs.svg
|
description: Store files on the local file system or over network attached storage. Note that you must use replicated storage if using high-availability instances.
|
||||||
website: https://wiki.js.org
|
vendor: Wiki.js
|
||||||
isAvailable: true
|
website: 'https://js.wiki'
|
||||||
supportedModes:
|
assetDelivery:
|
||||||
- push
|
isStreamingSupported: true
|
||||||
defaultMode: push
|
isDirectAccessSupported: false
|
||||||
schedule: false
|
defaultStreamingEnabled: true
|
||||||
|
defaultDirectAccessEnabled: false
|
||||||
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: false
|
||||||
|
defaultEnabled: false
|
||||||
|
sync: false
|
||||||
internalSchedule: P1D
|
internalSchedule: P1D
|
||||||
props:
|
props:
|
||||||
path:
|
path:
|
||||||
type: String
|
type: String
|
||||||
title: Path
|
title: Path
|
||||||
hint: Absolute path without a trailing slash (e.g. /home/wiki/backup, C:\wiki\backup)
|
hint: Absolute path without a trailing slash (e.g. /home/wiki/backup, C:\wiki\backup)
|
||||||
|
icon: symlink-directory
|
||||||
order: 1
|
order: 1
|
||||||
createDailyBackups:
|
createDailyBackups:
|
||||||
type: Boolean
|
type: Boolean
|
||||||
default: false
|
default: false
|
||||||
title: Create Daily Backups
|
title: Create Daily Backups
|
||||||
hint: A tar.gz archive containing all content will be created daily in subfolder named _daily. Archives are kept for a month.
|
hint: A tar.gz archive containing all content will be created daily in subfolder named _daily. Archives are kept for a month.
|
||||||
|
icon: archive-folder
|
||||||
order: 2
|
order: 2
|
||||||
actions:
|
actions:
|
||||||
- handler: dump
|
- handler: dump
|
||||||
label: Dump all content to disk
|
label: Dump all content to disk
|
||||||
hint: Output all content from the DB to the local disk. If you enabled this module after content was created or you temporarily disabled this module, you'll want to execute this action to add the missing files.
|
hint: Output all content from the DB to the local disk. If you enabled this module after content was created or you temporarily disabled this module, you'll want to execute this action to add the missing files.
|
||||||
|
icon: downloads
|
||||||
- handler: backup
|
- handler: backup
|
||||||
label: Create Backup
|
label: Create Backup
|
||||||
hint: Will create a manual backup archive at this point in time, in a subfolder named _manual, from the contents currently on disk.
|
hint: Will create a manual backup archive at this point in time, in a subfolder named _manual, from the contents currently on disk.
|
||||||
|
icon: archive-folder
|
||||||
- handler: importAll
|
- handler: importAll
|
||||||
label: Import Everything
|
label: Import Everything
|
||||||
hint: Will import all content currently in the local disk folder.
|
hint: Will import all content currently in the local disk folder.
|
||||||
|
icon: database-daily-import
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
key: dropbox
|
|
||||||
title: Dropbox
|
|
||||||
description: Dropbox is a file hosting service that offers cloud storage, file synchronization, personal cloud, and client software.
|
|
||||||
author: requarks.io
|
|
||||||
logo: https://static.requarks.io/logo/dropbox.svg
|
|
||||||
website: https://dropbox.com
|
|
||||||
props:
|
|
||||||
appKey: String
|
|
||||||
appSecret: String
|
|
@ -1,26 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
async activated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deactivated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async init() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async created() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async updated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deleted() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async renamed() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async getLocalLocation () {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@ -0,0 +1,65 @@
|
|||||||
|
title: Google Cloud Storage
|
||||||
|
icon: '/_assets/icons/ultraviolet-google.svg'
|
||||||
|
banner: '/_assets/storage/gcs.jpg'
|
||||||
|
description: Google Cloud Storage is an online file storage web service for storing and accessing data on Google Cloud Platform infrastructure.
|
||||||
|
vendor: Alphabet Inc.
|
||||||
|
website: 'https://cloud.google.com'
|
||||||
|
assetDelivery:
|
||||||
|
isStreamingSupported: true
|
||||||
|
isDirectAccessSupported: true
|
||||||
|
defaultStreamingEnabled: true
|
||||||
|
defaultDirectAccessEnabled: true
|
||||||
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: false
|
||||||
|
defaultEnabled: false
|
||||||
|
sync: false
|
||||||
|
props:
|
||||||
|
accountName:
|
||||||
|
type: String
|
||||||
|
title: Project ID
|
||||||
|
hint: The project ID from the Google Developer's Console (e.g. grape-spaceship-123).
|
||||||
|
icon: 3d-touch
|
||||||
|
default: ''
|
||||||
|
order: 1
|
||||||
|
credentialsJSON:
|
||||||
|
type: String
|
||||||
|
title: JSON Credentials
|
||||||
|
hint: Contents of the JSON credentials file for the service account having Cloud Storage permissions.
|
||||||
|
icon: key
|
||||||
|
default: ''
|
||||||
|
multiline: true
|
||||||
|
sensitive: true
|
||||||
|
order: 2
|
||||||
|
bucket:
|
||||||
|
type: String
|
||||||
|
title: Unique bucket name
|
||||||
|
hint: The unique bucket name to create (e.g. wiki-johndoe).
|
||||||
|
icon: open-box
|
||||||
|
order: 3
|
||||||
|
storageTier:
|
||||||
|
type: String
|
||||||
|
title: Storage Tier
|
||||||
|
hint: Select the storage class to use when uploading new assets.
|
||||||
|
icon: scan-stock
|
||||||
|
order: 4
|
||||||
|
default: STANDARD
|
||||||
|
enum:
|
||||||
|
- STANDARD|Standard
|
||||||
|
- NEARLINE|Nearline
|
||||||
|
- COLDLINE|Coldline
|
||||||
|
- ARCHIVE|Archive
|
||||||
|
apiEndpoint:
|
||||||
|
type: String
|
||||||
|
title: API Endpoint
|
||||||
|
hint: The API endpoint of the service used to make requests.
|
||||||
|
icon: api
|
||||||
|
default: storage.google.com
|
||||||
|
order: 5
|
||||||
|
actions:
|
||||||
|
- handler: exportAll
|
||||||
|
label: Export All DB Assets to GCS
|
||||||
|
hint: Output all content from the DB to Google Cloud Storage, overwriting any existing data. If you enabled Google Cloud Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
||||||
|
icon: this-way-up
|
@ -0,0 +1,164 @@
|
|||||||
|
const { BlobServiceClient, StorageSharedKeyCredential } = require('@azure/storage-blob')
|
||||||
|
const stream = require('stream')
|
||||||
|
const Promise = require('bluebird')
|
||||||
|
const pipeline = Promise.promisify(stream.pipeline)
|
||||||
|
const pageHelper = require('../../../helpers/page.js')
|
||||||
|
const _ = require('lodash')
|
||||||
|
|
||||||
|
/* global WIKI */
|
||||||
|
|
||||||
|
const getFilePath = (page, pathKey) => {
|
||||||
|
const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
|
||||||
|
const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
|
||||||
|
return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
async activated() {
|
||||||
|
|
||||||
|
},
|
||||||
|
async deactivated() {
|
||||||
|
|
||||||
|
},
|
||||||
|
async init() {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Initializing...`)
|
||||||
|
const { accountName, accountKey, containerName } = this.config
|
||||||
|
this.client = new BlobServiceClient(
|
||||||
|
`https://${accountName}.blob.core.windows.net`,
|
||||||
|
new StorageSharedKeyCredential(accountName, accountKey)
|
||||||
|
)
|
||||||
|
this.container = this.client.getContainerClient(containerName)
|
||||||
|
try {
|
||||||
|
await this.container.create()
|
||||||
|
} catch (err) {
|
||||||
|
if (err.statusCode !== 409) {
|
||||||
|
WIKI.logger.warn(err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Initialization completed.`)
|
||||||
|
},
|
||||||
|
async created (page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Creating file ${page.path}...`)
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
const pageContent = page.injectMetadata()
|
||||||
|
const blockBlobClient = this.container.getBlockBlobClient(filePath)
|
||||||
|
await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
|
||||||
|
},
|
||||||
|
async updated (page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Updating file ${page.path}...`)
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
const pageContent = page.injectMetadata()
|
||||||
|
const blockBlobClient = this.container.getBlockBlobClient(filePath)
|
||||||
|
await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
|
||||||
|
},
|
||||||
|
async deleted (page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Deleting file ${page.path}...`)
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
const blockBlobClient = this.container.getBlockBlobClient(filePath)
|
||||||
|
await blockBlobClient.delete({
|
||||||
|
deleteSnapshots: 'include'
|
||||||
|
})
|
||||||
|
},
|
||||||
|
async renamed(page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
|
||||||
|
let sourceFilePath = getFilePath(page, 'path')
|
||||||
|
let destinationFilePath = getFilePath(page, 'destinationPath')
|
||||||
|
if (WIKI.config.lang.namespacing) {
|
||||||
|
if (WIKI.config.lang.code !== page.localeCode) {
|
||||||
|
sourceFilePath = `${page.localeCode}/${sourceFilePath}`
|
||||||
|
}
|
||||||
|
if (WIKI.config.lang.code !== page.destinationLocaleCode) {
|
||||||
|
destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const sourceBlockBlobClient = this.container.getBlockBlobClient(sourceFilePath)
|
||||||
|
const destBlockBlobClient = this.container.getBlockBlobClient(destinationFilePath)
|
||||||
|
await destBlockBlobClient.syncCopyFromURL(sourceBlockBlobClient.url)
|
||||||
|
await sourceBlockBlobClient.delete({
|
||||||
|
deleteSnapshots: 'include'
|
||||||
|
})
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* ASSET UPLOAD
|
||||||
|
*
|
||||||
|
* @param {Object} asset Asset to upload
|
||||||
|
*/
|
||||||
|
async assetUploaded (asset) {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Creating new file ${asset.path}...`)
|
||||||
|
const blockBlobClient = this.container.getBlockBlobClient(asset.path)
|
||||||
|
await blockBlobClient.upload(asset.data, asset.data.length, { tier: this.config.storageTier })
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* ASSET DELETE
|
||||||
|
*
|
||||||
|
* @param {Object} asset Asset to delete
|
||||||
|
*/
|
||||||
|
async assetDeleted (asset) {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Deleting file ${asset.path}...`)
|
||||||
|
const blockBlobClient = this.container.getBlockBlobClient(asset.path)
|
||||||
|
await blockBlobClient.delete({
|
||||||
|
deleteSnapshots: 'include'
|
||||||
|
})
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* ASSET RENAME
|
||||||
|
*
|
||||||
|
* @param {Object} asset Asset to rename
|
||||||
|
*/
|
||||||
|
async assetRenamed (asset) {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
|
||||||
|
const sourceBlockBlobClient = this.container.getBlockBlobClient(asset.path)
|
||||||
|
const destBlockBlobClient = this.container.getBlockBlobClient(asset.destinationPath)
|
||||||
|
await destBlockBlobClient.syncCopyFromURL(sourceBlockBlobClient.url)
|
||||||
|
await sourceBlockBlobClient.delete({
|
||||||
|
deleteSnapshots: 'include'
|
||||||
|
})
|
||||||
|
},
|
||||||
|
async getLocalLocation () {
|
||||||
|
|
||||||
|
},
|
||||||
|
/**
|
||||||
|
* HANDLERS
|
||||||
|
*/
|
||||||
|
async exportAll() {
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Exporting all content to Azure Blob Storage...`)
|
||||||
|
|
||||||
|
// -> Pages
|
||||||
|
await pipeline(
|
||||||
|
WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
|
||||||
|
isPrivate: false
|
||||||
|
}).stream(),
|
||||||
|
new stream.Transform({
|
||||||
|
objectMode: true,
|
||||||
|
transform: async (page, enc, cb) => {
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Adding page ${filePath}...`)
|
||||||
|
const pageContent = pageHelper.injectPageMetadata(page)
|
||||||
|
const blockBlobClient = this.container.getBlockBlobClient(filePath)
|
||||||
|
await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// -> Assets
|
||||||
|
const assetFolders = await WIKI.models.assetFolders.getAllPaths()
|
||||||
|
|
||||||
|
await pipeline(
|
||||||
|
WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
|
||||||
|
new stream.Transform({
|
||||||
|
objectMode: true,
|
||||||
|
transform: async (asset, enc, cb) => {
|
||||||
|
const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
|
||||||
|
WIKI.logger.info(`(STORAGE/AZURE) Adding asset ${filename}...`)
|
||||||
|
const blockBlobClient = this.container.getBlockBlobClient(filename)
|
||||||
|
await blockBlobClient.upload(asset.data, asset.data.length, { tier: this.config.storageTier })
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
WIKI.logger.info('(STORAGE/AZURE) All content has been pushed to Azure Blob Storage.')
|
||||||
|
}
|
||||||
|
}
|
@ -1,9 +0,0 @@
|
|||||||
key: gdrive
|
|
||||||
title: Google Drive
|
|
||||||
description: Google Drive is a file storage and synchronization service developed by Google.
|
|
||||||
author: requarks.io
|
|
||||||
logo: https://static.requarks.io/logo/google-drive.svg
|
|
||||||
website: https://www.google.com/drive/
|
|
||||||
props:
|
|
||||||
clientId: String
|
|
||||||
clientSecret: String
|
|
@ -1,26 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
async activated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deactivated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async init() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async created() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async updated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deleted() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async renamed() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async getLocalLocation () {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,108 +1,151 @@
|
|||||||
key: git
|
|
||||||
title: Git
|
title: Git
|
||||||
description: Git is a version control system for tracking changes in computer files and coordinating work on those files among multiple people.
|
icon: '/_assets/icons/ultraviolet-git.svg'
|
||||||
author: requarks.io
|
banner: '/_assets/storage/git.jpg'
|
||||||
logo: https://static.requarks.io/logo/git-alt.svg
|
description: Git is a version control system for tracking changes in computer files and coordinating work on those files among multiple people. If using GitHub, use the GitHub module instead!
|
||||||
website: https://git-scm.com/
|
vendor: Software Freedom Conservancy, Inc.
|
||||||
isAvailable: true
|
website: 'https://git-scm.com'
|
||||||
supportedModes:
|
assetDelivery:
|
||||||
- sync
|
isStreamingSupported: true
|
||||||
- push
|
isDirectAccessSupported: false
|
||||||
- pull
|
defaultStreamingEnabled: true
|
||||||
defaultMode: sync
|
defaultDirectAccessEnabled: false
|
||||||
schedule: PT5M
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: true
|
||||||
|
defaultEnabled: true
|
||||||
|
isForceEnabled: true
|
||||||
|
sync:
|
||||||
|
supportedModes:
|
||||||
|
- sync
|
||||||
|
- push
|
||||||
|
- pull
|
||||||
|
defaultMode: sync
|
||||||
|
schedule: PT5M
|
||||||
props:
|
props:
|
||||||
authType:
|
authType:
|
||||||
type: String
|
type: String
|
||||||
default: 'ssh'
|
default: 'ssh'
|
||||||
title: Authentication Type
|
title: Authentication Type
|
||||||
hint: Use SSH for maximum security.
|
hint: Use SSH for maximum security.
|
||||||
|
icon: security-configuration
|
||||||
enum:
|
enum:
|
||||||
- 'basic'
|
- basic|Basic
|
||||||
- 'ssh'
|
- ssh|SSH
|
||||||
|
enumDisplay: buttons
|
||||||
order: 1
|
order: 1
|
||||||
repoUrl:
|
repoUrl:
|
||||||
type: String
|
type: String
|
||||||
title: Repository URI
|
title: Repository URI
|
||||||
hint: Git-compliant URI (e.g. git@github.com:org/repo.git for ssh, https://github.com/org/repo.git for basic)
|
hint: Git-compliant URI (e.g. git@server.com:org/repo.git for ssh, https://server.com/org/repo.git for basic)
|
||||||
|
icon: dns
|
||||||
order: 2
|
order: 2
|
||||||
branch:
|
branch:
|
||||||
type: String
|
type: String
|
||||||
default: 'master'
|
default: 'main'
|
||||||
|
title: Branch
|
||||||
hint: The branch to use during pull / push
|
hint: The branch to use during pull / push
|
||||||
|
icon: code-fork
|
||||||
order: 3
|
order: 3
|
||||||
sshPrivateKeyMode:
|
sshPrivateKeyMode:
|
||||||
type: String
|
type: String
|
||||||
title: SSH Private Key Mode
|
title: SSH Private Key Mode
|
||||||
hint: SSH Authentication Only - The mode to use to load the private key. Fill in the corresponding field below.
|
hint: The mode to use to load the private key. Fill in the corresponding field below.
|
||||||
|
icon: grand-master-key
|
||||||
order: 11
|
order: 11
|
||||||
default: 'path'
|
default: inline
|
||||||
enum:
|
enum:
|
||||||
- 'path'
|
- path|File Path
|
||||||
- 'contents'
|
- inline|Inline Contents
|
||||||
|
enumDisplay: buttons
|
||||||
|
if:
|
||||||
|
- { key: 'authType', eq: 'ssh' }
|
||||||
sshPrivateKeyPath:
|
sshPrivateKeyPath:
|
||||||
type: String
|
type: String
|
||||||
title: A - SSH Private Key Path
|
title: SSH Private Key Path
|
||||||
hint: SSH Authentication Only - Absolute path to the key. The key must NOT be passphrase-protected. Mode must be set to path to use this option.
|
hint: Absolute path to the key. The key must NOT be passphrase-protected.
|
||||||
|
icon: key
|
||||||
order: 12
|
order: 12
|
||||||
|
if:
|
||||||
|
- { key: 'authType', eq: 'ssh' }
|
||||||
|
- { key: 'sshPrivateKeyMode', eq: 'path' }
|
||||||
sshPrivateKeyContent:
|
sshPrivateKeyContent:
|
||||||
type: String
|
type: String
|
||||||
title: B - SSH Private Key Contents
|
title: SSH Private Key Contents
|
||||||
hint: SSH Authentication Only - Paste the contents of the private key. The key must NOT be passphrase-protected. Mode must be set to contents to use this option.
|
hint: Paste the contents of the private key. The key must NOT be passphrase-protected.
|
||||||
|
icon: key
|
||||||
multiline: true
|
multiline: true
|
||||||
sensitive: true
|
sensitive: true
|
||||||
order: 13
|
order: 13
|
||||||
|
if:
|
||||||
|
- { key: 'sshPrivateKeyMode', eq: 'inline' }
|
||||||
verifySSL:
|
verifySSL:
|
||||||
type: Boolean
|
type: Boolean
|
||||||
default: true
|
default: true
|
||||||
title: Verify SSL Certificate
|
title: Verify SSL Certificate
|
||||||
hint: Some hosts requires SSL certificate checking to be disabled. Leave enabled for proper security.
|
hint: Some hosts requires SSL certificate checking to be disabled. Leave enabled for proper security.
|
||||||
|
icon: security-ssl
|
||||||
order: 14
|
order: 14
|
||||||
basicUsername:
|
basicUsername:
|
||||||
type: String
|
type: String
|
||||||
title: Username
|
title: Username
|
||||||
hint: Basic Authentication Only
|
hint: Basic Authentication Only
|
||||||
|
icon: test-account
|
||||||
order: 20
|
order: 20
|
||||||
|
if:
|
||||||
|
- { key: 'authType', eq: 'basic' }
|
||||||
basicPassword:
|
basicPassword:
|
||||||
type: String
|
type: String
|
||||||
title: Password / PAT
|
title: Password / PAT
|
||||||
hint: Basic Authentication Only
|
hint: Basic Authentication Only
|
||||||
|
icon: password
|
||||||
sensitive: true
|
sensitive: true
|
||||||
order: 21
|
order: 21
|
||||||
|
if:
|
||||||
|
- { key: 'authType', eq: 'basic' }
|
||||||
defaultEmail:
|
defaultEmail:
|
||||||
type: String
|
type: String
|
||||||
title: Default Author Email
|
title: Default Author Email
|
||||||
default: 'name@company.com'
|
default: 'name@company.com'
|
||||||
hint: 'Used as fallback in case the author of the change is not present.'
|
hint: 'Used as fallback in case the author of the change is not present.'
|
||||||
order: 22
|
icon: email
|
||||||
|
order: 30
|
||||||
defaultName:
|
defaultName:
|
||||||
type: String
|
type: String
|
||||||
title: Default Author Name
|
title: Default Author Name
|
||||||
default: 'John Smith'
|
default: 'John Smith'
|
||||||
hint: 'Used as fallback in case the author of the change is not present.'
|
hint: 'Used as fallback in case the author of the change is not present.'
|
||||||
order: 23
|
icon: customer
|
||||||
|
order: 31
|
||||||
localRepoPath:
|
localRepoPath:
|
||||||
type: String
|
type: String
|
||||||
title: Local Repository Path
|
title: Local Repository Path
|
||||||
default: './data/repo'
|
default: './data/repo'
|
||||||
hint: 'Path where the local git repository will be created.'
|
hint: 'Path where the local git repository will be created.'
|
||||||
order: 30
|
icon: symlink-directory
|
||||||
|
order: 32
|
||||||
gitBinaryPath:
|
gitBinaryPath:
|
||||||
type: String
|
type: String
|
||||||
title: Git Binary Path
|
title: Git Binary Path
|
||||||
default: ''
|
default: ''
|
||||||
hint: Optional - Absolute path to the Git binary, when not available in PATH. Leave empty to use the default PATH location (recommended).
|
hint: Optional - Absolute path to the Git binary, when not available in PATH. Leave empty to use the default PATH location (recommended).
|
||||||
|
icon: run-command
|
||||||
order: 50
|
order: 50
|
||||||
actions:
|
actions:
|
||||||
- handler: syncUntracked
|
- handler: syncUntracked
|
||||||
label: Add Untracked Changes
|
label: Add Untracked Changes
|
||||||
hint: Output all content from the DB to the local Git repository to ensure all untracked content is saved. If you enabled Git after content was created or you temporarily disabled Git, you'll want to execute this action to add the missing untracked changes.
|
hint: Output all content from the DB to the local Git repository to ensure all untracked content is saved. If you enabled Git after content was created or you temporarily disabled Git, you'll want to execute this action to add the missing untracked changes.
|
||||||
|
icon: database-daily-export
|
||||||
- handler: sync
|
- handler: sync
|
||||||
label: Force Sync
|
label: Force Sync
|
||||||
hint: Will trigger an immediate sync operation, regardless of the current sync schedule. The sync direction is respected.
|
hint: Will trigger an immediate sync operation, regardless of the current sync schedule. The sync direction is respected.
|
||||||
|
icon: synchronize
|
||||||
- handler: importAll
|
- handler: importAll
|
||||||
label: Import Everything
|
label: Import Everything
|
||||||
hint: Will import all content currently in the local Git repository, regardless of the latest commit state. Useful for importing content from the remote repository created before git was enabled.
|
hint: Will import all content currently in the local Git repository, regardless of the latest commit state. Useful for importing content from the remote repository created before git was enabled.
|
||||||
|
icon: database-daily-import
|
||||||
- handler: purge
|
- handler: purge
|
||||||
label: Purge Local Repository
|
label: Purge Local Repository
|
||||||
hint: If you have unrelated merge histories, clearing the local repository can resolve this issue. This will not affect the remote repository or perform any commit.
|
hint: If you have unrelated merge histories, clearing the local repository can resolve this issue. This will not affect the remote repository or perform any commit.
|
||||||
|
icon: trash
|
||||||
|
@ -0,0 +1,49 @@
|
|||||||
|
title: GitHub
|
||||||
|
icon: '/_assets/icons/ultraviolet-github.svg'
|
||||||
|
banner: '/_assets/storage/github.jpg'
|
||||||
|
description: Millions of developers and companies build, ship, and maintain their software on GitHub - the largest and most advanced development platform in the world.
|
||||||
|
vendor: GitHub, Inc.
|
||||||
|
website: 'https://github.com'
|
||||||
|
assetDelivery:
|
||||||
|
isStreamingSupported: false
|
||||||
|
isDirectAccessSupported: false
|
||||||
|
defaultStreamingEnabled: false
|
||||||
|
defaultDirectAccessEnabled: false
|
||||||
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: true
|
||||||
|
defaultEnabled: true
|
||||||
|
isForceEnabled: true
|
||||||
|
sync: false
|
||||||
|
setup:
|
||||||
|
handler: github
|
||||||
|
defaultValues:
|
||||||
|
accountType: org
|
||||||
|
org: ''
|
||||||
|
publicUrl: https://
|
||||||
|
props:
|
||||||
|
appName:
|
||||||
|
readOnly: true
|
||||||
|
type: String
|
||||||
|
title: App Name
|
||||||
|
hint: Name of the generated app in GitHub.
|
||||||
|
icon: 3d-touch
|
||||||
|
repoFullName:
|
||||||
|
readOnly: true
|
||||||
|
type: String
|
||||||
|
title: GitHub Repository
|
||||||
|
hint: The GitHub repository used for content synchronization.
|
||||||
|
icon: github
|
||||||
|
repoDefaultBranch:
|
||||||
|
readOnly: true
|
||||||
|
type: String
|
||||||
|
title: Default Branch
|
||||||
|
hint: The repository default branch.
|
||||||
|
icon: code-fork
|
||||||
|
actions:
|
||||||
|
- handler: exportAll
|
||||||
|
label: Export All DB Assets to GitHub
|
||||||
|
hint: Output all content from the DB to GitHub, overwriting any existing data. If you enabled GitHub after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
||||||
|
icon: this-way-up
|
@ -0,0 +1,211 @@
|
|||||||
|
const { Octokit, App } = require('octokit')
|
||||||
|
|
||||||
|
/* global WIKI */
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
async activated () { },
|
||||||
|
async deactivated () { },
|
||||||
|
async init () { },
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SETUP FUNCTIONS
|
||||||
|
*/
|
||||||
|
async setup (id, state) {
|
||||||
|
try {
|
||||||
|
switch (state.step) {
|
||||||
|
// --------------------------------------------
|
||||||
|
// -> VALIDATE CALLBACK CODE AFTER APP CREATION
|
||||||
|
// --------------------------------------------
|
||||||
|
case 'connect': {
|
||||||
|
const gh = new Octokit({
|
||||||
|
userAgent: 'wikijs'
|
||||||
|
})
|
||||||
|
const resp = await gh.request('POST /app-manifests/{code}/conversions', {
|
||||||
|
code: state.code
|
||||||
|
})
|
||||||
|
if (resp.status > 200 && resp.status < 300) {
|
||||||
|
await WIKI.models.storage.query().patch({
|
||||||
|
config: {
|
||||||
|
appId: resp.data.id,
|
||||||
|
appName: resp.data.name,
|
||||||
|
appSlug: resp.data.slug,
|
||||||
|
appClientId: resp.data.client_id,
|
||||||
|
appClientSecret: resp.data.client_secret,
|
||||||
|
appWebhookSecret: resp.data.webhook_secret,
|
||||||
|
appPem: resp.data.pem,
|
||||||
|
appPermissions: resp.data.permissions,
|
||||||
|
appEvents: resp.data.events,
|
||||||
|
ownerLogin: resp.data.owner?.login,
|
||||||
|
ownerId: resp.data.owner?.id
|
||||||
|
},
|
||||||
|
state: {
|
||||||
|
current: 'ok',
|
||||||
|
setup: 'pendinginstall'
|
||||||
|
}
|
||||||
|
}).where('id', id)
|
||||||
|
return {
|
||||||
|
nextStep: 'installApp',
|
||||||
|
url: `https://github.com/apps/${resp.data.slug}/installations/new/permissions?target_id=${resp.data.owner?.id}`
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new Error('GitHub refused the code or could not be reached.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// -----------------------
|
||||||
|
// VERIFY APP INSTALLATION
|
||||||
|
// -----------------------
|
||||||
|
case 'verify': {
|
||||||
|
const tgt = await WIKI.models.storage.query().findById(id)
|
||||||
|
if (!tgt) {
|
||||||
|
throw new Error('Invalid Target ID')
|
||||||
|
}
|
||||||
|
|
||||||
|
const ghApp = new App({
|
||||||
|
appId: tgt.config.appId,
|
||||||
|
privateKey: tgt.config.appPem,
|
||||||
|
Octokit: Octokit.defaults({
|
||||||
|
userAgent: 'wikijs'
|
||||||
|
}),
|
||||||
|
oauth: {
|
||||||
|
clientId: tgt.config.appClientId,
|
||||||
|
clientSecret: tgt.config.appClientSecret
|
||||||
|
},
|
||||||
|
webhooks: {
|
||||||
|
secret: tgt.config.appWebhookSecret
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// -> Find Installation ID
|
||||||
|
|
||||||
|
let installId = null
|
||||||
|
let installTotal = 0
|
||||||
|
for await (const { installation } of ghApp.eachInstallation.iterator()) {
|
||||||
|
if (installTotal < 1) {
|
||||||
|
installId = installation.id
|
||||||
|
WIKI.logger.debug(`Using GitHub App installation ID ${installId}`)
|
||||||
|
}
|
||||||
|
installTotal++
|
||||||
|
}
|
||||||
|
if (installTotal < 1) {
|
||||||
|
throw new Error('App is not installed on any GitHub account!')
|
||||||
|
} else if (installTotal > 1) {
|
||||||
|
WIKI.logger.warn(`GitHub App ${tgt.config.appName} is installed on more than 1 account. Only the first one ${installId} will be used.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// -> Fetch Repository Info
|
||||||
|
|
||||||
|
let repo = null
|
||||||
|
let repoTotal = 0
|
||||||
|
for await (const { repository } of ghApp.eachRepository.iterator({ installationId: installId })) {
|
||||||
|
if (repository.archived || repository.disabled) {
|
||||||
|
WIKI.logger.debug(`Skipping GitHub Repository ${repo.id} because of it is archived or disabled.`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (repoTotal < 1) {
|
||||||
|
repo = repository
|
||||||
|
WIKI.logger.debug(`Using GitHub Repository ${repo.id}`)
|
||||||
|
}
|
||||||
|
repoTotal++
|
||||||
|
}
|
||||||
|
if (repoTotal < 1) {
|
||||||
|
throw new Error('App is not installed on any GitHub repository!')
|
||||||
|
} else if (repoTotal > 1) {
|
||||||
|
WIKI.logger.warn(`GitHub App ${tgt.config.appName} is installed on more than 1 repository. Only the first one (${repo.full_name}) will be used.`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// -> Save install/repo info
|
||||||
|
|
||||||
|
await WIKI.models.storage.query().patch({
|
||||||
|
isEnabled: true,
|
||||||
|
config: {
|
||||||
|
...tgt.config,
|
||||||
|
installId,
|
||||||
|
repoId: repo.id,
|
||||||
|
repoName: repo.name,
|
||||||
|
repoOwner: repo.owner?.login,
|
||||||
|
repoDefaultBranch: repo.default_branch,
|
||||||
|
repoFullName: repo.full_name
|
||||||
|
},
|
||||||
|
state: {
|
||||||
|
current: 'ok',
|
||||||
|
setup: 'configured'
|
||||||
|
}
|
||||||
|
}).where('id', id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
nextStep: 'completed'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default: {
|
||||||
|
throw new Error('Invalid Setup Step')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
WIKI.logger.warn('GitHub Storage Module Setup Failed:')
|
||||||
|
WIKI.logger.warn(err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
},
|
||||||
|
async setupDestroy (id) {
|
||||||
|
try {
|
||||||
|
const tgt = await WIKI.models.storage.query().findById(id)
|
||||||
|
if (!tgt) {
|
||||||
|
throw new Error('Invalid Target ID')
|
||||||
|
}
|
||||||
|
|
||||||
|
WIKI.logger.info('Resetting GitHub storage configuration...')
|
||||||
|
|
||||||
|
const ghApp = new App({
|
||||||
|
appId: tgt.config.appId,
|
||||||
|
privateKey: tgt.config.appPem,
|
||||||
|
Octokit: Octokit.defaults({
|
||||||
|
userAgent: 'wikijs'
|
||||||
|
}),
|
||||||
|
oauth: {
|
||||||
|
clientId: tgt.config.appClientId,
|
||||||
|
clientSecret: tgt.config.appClientSecret
|
||||||
|
},
|
||||||
|
webhooks: {
|
||||||
|
secret: tgt.config.appWebhookSecret
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// -> Reset storage module config
|
||||||
|
|
||||||
|
await WIKI.models.storage.query().patch({
|
||||||
|
isEnabled: false,
|
||||||
|
config: {},
|
||||||
|
state: {
|
||||||
|
current: 'ok',
|
||||||
|
setup: 'notconfigured'
|
||||||
|
}
|
||||||
|
}).where('id', id)
|
||||||
|
|
||||||
|
// -> Try to delete installation on GitHub
|
||||||
|
|
||||||
|
if (tgt.config.installId) {
|
||||||
|
try {
|
||||||
|
await ghApp.octokit.request('DELETE /app/installations/{installation_id}', {
|
||||||
|
installation_id: tgt.config.installId
|
||||||
|
})
|
||||||
|
WIKI.logger.info('Deleted GitHub installation successfully.')
|
||||||
|
} catch (err) {
|
||||||
|
WIKI.logger.warn('Could not delete GitHub installation automatically. Please remove the installation on GitHub.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
WIKI.logger.warn('GitHub Storage Module Destroy Failed:')
|
||||||
|
WIKI.logger.warn(err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
},
|
||||||
|
async created (page) { },
|
||||||
|
async updated (page) { },
|
||||||
|
async deleted (page) { },
|
||||||
|
async renamed (page) { },
|
||||||
|
async assetUploaded (asset) { },
|
||||||
|
async assetDeleted (asset) { },
|
||||||
|
async assetRenamed (asset) { },
|
||||||
|
async getLocalLocation () { },
|
||||||
|
async exportAll () { }
|
||||||
|
}
|
@ -1,9 +0,0 @@
|
|||||||
key: onedrive
|
|
||||||
title: OneDrive
|
|
||||||
description: OneDrive is a file hosting service operated by Microsoft as part of its suite of Office Online services.
|
|
||||||
author: requarks.io
|
|
||||||
logo: https://static.requarks.io/logo/onedrive.svg
|
|
||||||
website: https://onedrive.live.com/about/
|
|
||||||
props:
|
|
||||||
clientId: String
|
|
||||||
clientSecret: String
|
|
@ -1,26 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
async activated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deactivated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async init() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async created() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async updated() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async deleted() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async renamed() {
|
|
||||||
|
|
||||||
},
|
|
||||||
async getLocalLocation () {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,168 +0,0 @@
|
|||||||
const S3 = require('aws-sdk/clients/s3')
|
|
||||||
const stream = require('stream')
|
|
||||||
const Promise = require('bluebird')
|
|
||||||
const pipeline = Promise.promisify(stream.pipeline)
|
|
||||||
const _ = require('lodash')
|
|
||||||
const pageHelper = require('../../../helpers/page.js')
|
|
||||||
|
|
||||||
/* global WIKI */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deduce the file path given the `page` object and the object's key to the page's path.
|
|
||||||
*/
|
|
||||||
const getFilePath = (page, pathKey) => {
|
|
||||||
const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
|
|
||||||
const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
|
|
||||||
return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Can be used with S3 compatible storage.
|
|
||||||
*/
|
|
||||||
module.exports = class S3CompatibleStorage {
|
|
||||||
constructor(storageName) {
|
|
||||||
this.storageName = storageName
|
|
||||||
this.bucketName = ""
|
|
||||||
}
|
|
||||||
async activated() {
|
|
||||||
// not used
|
|
||||||
}
|
|
||||||
async deactivated() {
|
|
||||||
// not used
|
|
||||||
}
|
|
||||||
async init() {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Initializing...`)
|
|
||||||
const { accessKeyId, secretAccessKey, bucket } = this.config
|
|
||||||
const s3Config = {
|
|
||||||
accessKeyId,
|
|
||||||
secretAccessKey,
|
|
||||||
params: { Bucket: bucket },
|
|
||||||
apiVersions: '2006-03-01'
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!_.isNil(this.config.region)) {
|
|
||||||
s3Config.region = this.config.region
|
|
||||||
}
|
|
||||||
if (!_.isNil(this.config.endpoint)) {
|
|
||||||
s3Config.endpoint = this.config.endpoint
|
|
||||||
}
|
|
||||||
if (!_.isNil(this.config.sslEnabled)) {
|
|
||||||
s3Config.sslEnabled = this.config.sslEnabled
|
|
||||||
}
|
|
||||||
if (!_.isNil(this.config.s3ForcePathStyle)) {
|
|
||||||
s3Config.s3ForcePathStyle = this.config.s3ForcePathStyle
|
|
||||||
}
|
|
||||||
if (!_.isNil(this.config.s3BucketEndpoint)) {
|
|
||||||
s3Config.s3BucketEndpoint = this.config.s3BucketEndpoint
|
|
||||||
}
|
|
||||||
|
|
||||||
this.s3 = new S3(s3Config)
|
|
||||||
this.bucketName = bucket
|
|
||||||
|
|
||||||
// determine if a bucket exists and you have permission to access it
|
|
||||||
await this.s3.headBucket().promise()
|
|
||||||
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Initialization completed.`)
|
|
||||||
}
|
|
||||||
async created(page) {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating file ${page.path}...`)
|
|
||||||
const filePath = getFilePath(page, 'path')
|
|
||||||
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
|
|
||||||
}
|
|
||||||
async updated(page) {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Updating file ${page.path}...`)
|
|
||||||
const filePath = getFilePath(page, 'path')
|
|
||||||
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
|
|
||||||
}
|
|
||||||
async deleted(page) {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${page.path}...`)
|
|
||||||
const filePath = getFilePath(page, 'path')
|
|
||||||
await this.s3.deleteObject({ Key: filePath }).promise()
|
|
||||||
}
|
|
||||||
async renamed(page) {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
|
|
||||||
let sourceFilePath = getFilePath(page, 'path')
|
|
||||||
let destinationFilePath = getFilePath(page, 'destinationPath')
|
|
||||||
if (WIKI.config.lang.namespacing) {
|
|
||||||
if (WIKI.config.lang.code !== page.localeCode) {
|
|
||||||
sourceFilePath = `${page.localeCode}/${sourceFilePath}`
|
|
||||||
}
|
|
||||||
if (WIKI.config.lang.code !== page.destinationLocaleCode) {
|
|
||||||
destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await this.s3.copyObject({ CopySource: `${this.bucketName}/${sourceFilePath}`, Key: destinationFilePath }).promise()
|
|
||||||
await this.s3.deleteObject({ Key: sourceFilePath }).promise()
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* ASSET UPLOAD
|
|
||||||
*
|
|
||||||
* @param {Object} asset Asset to upload
|
|
||||||
*/
|
|
||||||
async assetUploaded (asset) {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating new file ${asset.path}...`)
|
|
||||||
await this.s3.putObject({ Key: asset.path, Body: asset.data }).promise()
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* ASSET DELETE
|
|
||||||
*
|
|
||||||
* @param {Object} asset Asset to delete
|
|
||||||
*/
|
|
||||||
async assetDeleted (asset) {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${asset.path}...`)
|
|
||||||
await this.s3.deleteObject({ Key: asset.path }).promise()
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* ASSET RENAME
|
|
||||||
*
|
|
||||||
* @param {Object} asset Asset to rename
|
|
||||||
*/
|
|
||||||
async assetRenamed (asset) {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
|
|
||||||
await this.s3.copyObject({ CopySource: `${this.bucketName}/${asset.path}`, Key: asset.destinationPath }).promise()
|
|
||||||
await this.s3.deleteObject({ Key: asset.path }).promise()
|
|
||||||
}
|
|
||||||
async getLocalLocation () {
|
|
||||||
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* HANDLERS
|
|
||||||
*/
|
|
||||||
async exportAll() {
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Exporting all content to the cloud provider...`)
|
|
||||||
|
|
||||||
// -> Pages
|
|
||||||
await pipeline(
|
|
||||||
WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
|
|
||||||
isPrivate: false
|
|
||||||
}).stream(),
|
|
||||||
new stream.Transform({
|
|
||||||
objectMode: true,
|
|
||||||
transform: async (page, enc, cb) => {
|
|
||||||
const filePath = getFilePath(page, 'path')
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding page ${filePath}...`)
|
|
||||||
await this.s3.putObject({ Key: filePath, Body: pageHelper.injectPageMetadata(page) }).promise()
|
|
||||||
cb()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
// -> Assets
|
|
||||||
const assetFolders = await WIKI.models.assetFolders.getAllPaths()
|
|
||||||
|
|
||||||
await pipeline(
|
|
||||||
WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
|
|
||||||
new stream.Transform({
|
|
||||||
objectMode: true,
|
|
||||||
transform: async (asset, enc, cb) => {
|
|
||||||
const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding asset ${filename}...`)
|
|
||||||
await this.s3.putObject({ Key: filename, Body: asset.data }).promise()
|
|
||||||
cb()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
WIKI.logger.info(`(STORAGE/${this.storageName}) All content has been pushed to the cloud provider.`)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,37 +1,159 @@
|
|||||||
key: s3
|
title: AWS S3 / DigitalOcean Spaces
|
||||||
title: Amazon S3
|
icon: '/_assets/icons/ultraviolet-amazon-web-services.svg'
|
||||||
description: Amazon S3 is a cloud computing web service offered by Amazon Web Services which provides object storage.
|
banner: '/_assets/storage/s3.jpg'
|
||||||
author: andrewsim
|
description: Amazon Simple Storage Service (Amazon S3) is an object storage service offering industry-leading scalability, data availability, security, and performance.
|
||||||
logo: https://static.requarks.io/logo/aws-s3.svg
|
vendor: Amazon.com, Inc.
|
||||||
website: https://aws.amazon.com/s3/
|
website: 'https://aws.amazon.com'
|
||||||
isAvailable: true
|
assetDelivery:
|
||||||
supportedModes:
|
isStreamingSupported: true
|
||||||
- push
|
isDirectAccessSupported: true
|
||||||
defaultMode: push
|
defaultStreamingEnabled: true
|
||||||
schedule: false
|
defaultDirectAccessEnabled: true
|
||||||
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: false
|
||||||
|
defaultEnabled: false
|
||||||
|
sync: false
|
||||||
props:
|
props:
|
||||||
region:
|
mode:
|
||||||
|
type: String
|
||||||
|
title: Mode
|
||||||
|
hint: Select a preset configuration mode or define a custom one.
|
||||||
|
icon: tune
|
||||||
|
default: aws
|
||||||
|
order: 1
|
||||||
|
enum:
|
||||||
|
- aws|AWS S3
|
||||||
|
- do|DigitalOcean Spaces
|
||||||
|
- custom|Custom
|
||||||
|
awsRegion:
|
||||||
type: String
|
type: String
|
||||||
title: Region
|
title: Region
|
||||||
hint: The AWS datacenter region where the bucket will be created.
|
hint: The AWS datacenter region where the bucket will be created.
|
||||||
order: 1
|
icon: geography
|
||||||
|
default: us-east-1
|
||||||
|
enum:
|
||||||
|
- af-south-1|af-south-1 - Africa (Cape Town)
|
||||||
|
- ap-east-1|ap-east-1 - Asia Pacific (Hong Kong)
|
||||||
|
- ap-southeast-3|ap-southeast-3 - Asia Pacific (Jakarta)
|
||||||
|
- ap-south-1|ap-south-1 - Asia Pacific (Mumbai)
|
||||||
|
- ap-northeast-3|ap-northeast-3 - Asia Pacific (Osaka)
|
||||||
|
- ap-northeast-2|ap-northeast-2 - Asia Pacific (Seoul)
|
||||||
|
- ap-southeast-1|ap-southeast-1 - Asia Pacific (Singapore)
|
||||||
|
- ap-southeast-2|ap-southeast-2 - Asia Pacific (Sydney)
|
||||||
|
- ap-northeast-1|ap-northeast-1 - Asia Pacific (Tokyo)
|
||||||
|
- ca-central-1|ca-central-1 - Canada (Central)
|
||||||
|
- cn-north-1|cn-north-1 - China (Beijing)
|
||||||
|
- cn-northwest-1|cn-northwest-1 - China (Ningxia)
|
||||||
|
- eu-central-1|eu-central-1 - Europe (Frankfurt)
|
||||||
|
- eu-west-1|eu-west-1 - Europe (Ireland)
|
||||||
|
- eu-west-2|eu-west-2 - Europe (London)
|
||||||
|
- eu-south-1|eu-south-1 - Europe (Milan)
|
||||||
|
- eu-west-3|eu-west-3 - Europe (Paris)
|
||||||
|
- eu-north-1|eu-north-1 - Europe (Stockholm)
|
||||||
|
- me-south-1|me-south-1 - Middle East (Bahrain)
|
||||||
|
- sa-east-1|sa-east-1 - South America (São Paulo)
|
||||||
|
- us-east-1|us-east-1 - US East (N. Virginia)
|
||||||
|
- us-east-2|us-east-2 - US East (Ohio)
|
||||||
|
- us-west-1|us-west-1 - US West (N. California)
|
||||||
|
- us-west-2|us-west-2 - US West (Oregon)
|
||||||
|
order: 2
|
||||||
|
if:
|
||||||
|
- { key: 'mode', eq: 'aws' }
|
||||||
|
doRegion:
|
||||||
|
type: String
|
||||||
|
title: Region
|
||||||
|
hint: The DigitalOcean Spaces region
|
||||||
|
icon: geography
|
||||||
|
default: nyc3
|
||||||
|
enum:
|
||||||
|
- ams3|Amsterdam
|
||||||
|
- fra1|Frankfurt
|
||||||
|
- nyc3|New York
|
||||||
|
- sfo2|San Francisco 2
|
||||||
|
- sfo3|San Francisco 3
|
||||||
|
- sgp1|Singapore
|
||||||
|
order: 2
|
||||||
|
if:
|
||||||
|
- { key: 'mode', eq: 'do' }
|
||||||
|
endpoint:
|
||||||
|
type: String
|
||||||
|
title: Endpoint URI
|
||||||
|
hint: The full S3-compliant endpoint URI.
|
||||||
|
icon: dns
|
||||||
|
default: https://service.region.example.com
|
||||||
|
order: 2
|
||||||
|
if:
|
||||||
|
- { key: 'mode', eq: 'custom' }
|
||||||
bucket:
|
bucket:
|
||||||
type: String
|
type: String
|
||||||
title: Unique bucket name
|
title: Unique bucket name
|
||||||
hint: The unique bucket name to create (e.g. wiki-johndoe).
|
hint: The unique bucket name to create (e.g. wiki-johndoe).
|
||||||
order: 2
|
icon: open-box
|
||||||
|
order: 3
|
||||||
accessKeyId:
|
accessKeyId:
|
||||||
type: String
|
type: String
|
||||||
title: Access Key ID
|
title: Access Key ID
|
||||||
hint: The Access Key.
|
hint: The Access Key.
|
||||||
order: 3
|
icon: 3d-touch
|
||||||
|
order: 4
|
||||||
secretAccessKey:
|
secretAccessKey:
|
||||||
type: String
|
type: String
|
||||||
title: Secret Access Key
|
title: Secret Access Key
|
||||||
hint: The Secret Access Key for the Access Key ID you created above.
|
hint: The Secret Access Key for the Access Key ID you created above.
|
||||||
|
icon: key
|
||||||
sensitive: true
|
sensitive: true
|
||||||
order: 4
|
order: 5
|
||||||
|
storageTier:
|
||||||
|
type: String
|
||||||
|
title: Storage Tier
|
||||||
|
hint: The storage tier to use when adding files.
|
||||||
|
icon: scan-stock
|
||||||
|
order: 6
|
||||||
|
default: STANDARD
|
||||||
|
enum:
|
||||||
|
- STANDARD|Standard
|
||||||
|
- STANDARD_IA|Standard Infrequent Access
|
||||||
|
- INTELLIGENT_TIERING|Intelligent Tiering
|
||||||
|
- ONEZONE_IA|One Zone Infrequent Access
|
||||||
|
- REDUCED_REDUNDANCY|Reduced Redundancy
|
||||||
|
- GLACIER_IR|Glacier Instant Retrieval
|
||||||
|
- GLACIER|Glacier Flexible Retrieval
|
||||||
|
- DEEP_ARCHIVE|Glacier Deep Archive
|
||||||
|
- OUTPOSTS|Outposts
|
||||||
|
if:
|
||||||
|
- { key: 'mode', eq: 'aws' }
|
||||||
|
sslEnabled:
|
||||||
|
type: Boolean
|
||||||
|
title: Use SSL
|
||||||
|
hint: Whether to enable SSL for requests
|
||||||
|
icon: secure
|
||||||
|
default: true
|
||||||
|
order: 10
|
||||||
|
if:
|
||||||
|
- { key: 'mode', eq: 'custom' }
|
||||||
|
s3ForcePathStyle:
|
||||||
|
type: Boolean
|
||||||
|
title: Force Path Style for S3 objects
|
||||||
|
hint: Whether to force path style URLs for S3 objects.
|
||||||
|
icon: filtration
|
||||||
|
default: false
|
||||||
|
order: 11
|
||||||
|
if:
|
||||||
|
- { key: 'mode', eq: 'custom' }
|
||||||
|
s3BucketEndpoint:
|
||||||
|
type: Boolean
|
||||||
|
title: Single Bucket Endpoint
|
||||||
|
hint: Whether the provided endpoint addresses an individual bucket.
|
||||||
|
icon: swipe-right
|
||||||
|
default: false
|
||||||
|
order: 12
|
||||||
|
if:
|
||||||
|
- { key: 'mode', eq: 'custom' }
|
||||||
actions:
|
actions:
|
||||||
- handler: exportAll
|
- handler: exportAll
|
||||||
label: Export All
|
label: Export All DB Assets to S3
|
||||||
hint: Output all content from the DB to S3, overwriting any existing data. If you enabled S3 after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
hint: Output all content from the DB to S3, overwriting any existing data. If you enabled S3 after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
||||||
|
icon: this-way-up
|
||||||
|
@ -1,3 +1,166 @@
|
|||||||
const S3CompatibleStorage = require('./common')
|
const S3 = require('aws-sdk/clients/s3')
|
||||||
|
const stream = require('stream')
|
||||||
|
const Promise = require('bluebird')
|
||||||
|
const pipeline = Promise.promisify(stream.pipeline)
|
||||||
|
const _ = require('lodash')
|
||||||
|
const pageHelper = require('../../../helpers/page.js')
|
||||||
|
|
||||||
module.exports = new S3CompatibleStorage('S3')
|
/* global WIKI */
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deduce the file path given the `page` object and the object's key to the page's path.
|
||||||
|
*/
|
||||||
|
const getFilePath = (page, pathKey) => {
|
||||||
|
const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
|
||||||
|
const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
|
||||||
|
return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Can be used with S3 compatible storage.
|
||||||
|
*/
|
||||||
|
module.exports = class S3CompatibleStorage {
|
||||||
|
constructor(storageName) {
|
||||||
|
this.storageName = storageName
|
||||||
|
}
|
||||||
|
async activated() {
|
||||||
|
// not used
|
||||||
|
}
|
||||||
|
async deactivated() {
|
||||||
|
// not used
|
||||||
|
}
|
||||||
|
async init() {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Initializing...`)
|
||||||
|
const { accessKeyId, secretAccessKey, bucket } = this.config
|
||||||
|
const s3Config = {
|
||||||
|
accessKeyId,
|
||||||
|
secretAccessKey,
|
||||||
|
params: { Bucket: bucket },
|
||||||
|
apiVersions: '2006-03-01'
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!_.isNil(this.config.region)) {
|
||||||
|
s3Config.region = this.config.region
|
||||||
|
}
|
||||||
|
if (!_.isNil(this.config.endpoint)) {
|
||||||
|
s3Config.endpoint = this.config.endpoint
|
||||||
|
}
|
||||||
|
if (!_.isNil(this.config.sslEnabled)) {
|
||||||
|
s3Config.sslEnabled = this.config.sslEnabled
|
||||||
|
}
|
||||||
|
if (!_.isNil(this.config.s3ForcePathStyle)) {
|
||||||
|
s3Config.s3ForcePathStyle = this.config.s3ForcePathStyle
|
||||||
|
}
|
||||||
|
if (!_.isNil(this.config.s3BucketEndpoint)) {
|
||||||
|
s3Config.s3BucketEndpoint = this.config.s3BucketEndpoint
|
||||||
|
}
|
||||||
|
|
||||||
|
this.s3 = new S3(s3Config)
|
||||||
|
|
||||||
|
// determine if a bucket exists and you have permission to access it
|
||||||
|
await this.s3.headBucket().promise()
|
||||||
|
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Initialization completed.`)
|
||||||
|
}
|
||||||
|
async created(page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating file ${page.path}...`)
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
|
||||||
|
}
|
||||||
|
async updated(page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Updating file ${page.path}...`)
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
|
||||||
|
}
|
||||||
|
async deleted(page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${page.path}...`)
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
await this.s3.deleteObject({ Key: filePath }).promise()
|
||||||
|
}
|
||||||
|
async renamed(page) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
|
||||||
|
let sourceFilePath = getFilePath(page, 'path')
|
||||||
|
let destinationFilePath = getFilePath(page, 'destinationPath')
|
||||||
|
if (WIKI.config.lang.namespacing) {
|
||||||
|
if (WIKI.config.lang.code !== page.localeCode) {
|
||||||
|
sourceFilePath = `${page.localeCode}/${sourceFilePath}`
|
||||||
|
}
|
||||||
|
if (WIKI.config.lang.code !== page.destinationLocaleCode) {
|
||||||
|
destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await this.s3.copyObject({ CopySource: sourceFilePath, Key: destinationFilePath }).promise()
|
||||||
|
await this.s3.deleteObject({ Key: sourceFilePath }).promise()
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* ASSET UPLOAD
|
||||||
|
*
|
||||||
|
* @param {Object} asset Asset to upload
|
||||||
|
*/
|
||||||
|
async assetUploaded (asset) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Creating new file ${asset.path}...`)
|
||||||
|
await this.s3.putObject({ Key: asset.path, Body: asset.data }).promise()
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* ASSET DELETE
|
||||||
|
*
|
||||||
|
* @param {Object} asset Asset to delete
|
||||||
|
*/
|
||||||
|
async assetDeleted (asset) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${asset.path}...`)
|
||||||
|
await this.s3.deleteObject({ Key: asset.path }).promise()
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* ASSET RENAME
|
||||||
|
*
|
||||||
|
* @param {Object} asset Asset to rename
|
||||||
|
*/
|
||||||
|
async assetRenamed (asset) {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
|
||||||
|
await this.s3.copyObject({ CopySource: asset.path, Key: asset.destinationPath }).promise()
|
||||||
|
await this.s3.deleteObject({ Key: asset.path }).promise()
|
||||||
|
}
|
||||||
|
async getLocalLocation () {
|
||||||
|
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* HANDLERS
|
||||||
|
*/
|
||||||
|
async exportAll() {
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Exporting all content to the cloud provider...`)
|
||||||
|
|
||||||
|
// -> Pages
|
||||||
|
await pipeline(
|
||||||
|
WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
|
||||||
|
isPrivate: false
|
||||||
|
}).stream(),
|
||||||
|
new stream.Transform({
|
||||||
|
objectMode: true,
|
||||||
|
transform: async (page, enc, cb) => {
|
||||||
|
const filePath = getFilePath(page, 'path')
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding page ${filePath}...`)
|
||||||
|
await this.s3.putObject({ Key: filePath, Body: pageHelper.injectPageMetadata(page) }).promise()
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
// -> Assets
|
||||||
|
const assetFolders = await WIKI.models.assetFolders.getAllPaths()
|
||||||
|
|
||||||
|
await pipeline(
|
||||||
|
WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
|
||||||
|
new stream.Transform({
|
||||||
|
objectMode: true,
|
||||||
|
transform: async (asset, enc, cb) => {
|
||||||
|
const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) Adding asset ${filename}...`)
|
||||||
|
await this.s3.putObject({ Key: filename, Body: asset.data }).promise()
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
WIKI.logger.info(`(STORAGE/${this.storageName}) All content has been pushed to the cloud provider.`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,57 +0,0 @@
|
|||||||
key: s3generic
|
|
||||||
title: S3 Generic
|
|
||||||
description: Generic storage module for S3-compatible services.
|
|
||||||
author: requarks.io
|
|
||||||
logo: https://static.requarks.io/logo/aws-s3-alt.svg
|
|
||||||
website: https://wiki.js.org
|
|
||||||
isAvailable: true
|
|
||||||
supportedModes:
|
|
||||||
- push
|
|
||||||
defaultMode: push
|
|
||||||
schedule: false
|
|
||||||
props:
|
|
||||||
endpoint:
|
|
||||||
type: String
|
|
||||||
title: Endpoint URI
|
|
||||||
hint: The full S3-compliant endpoint URI.
|
|
||||||
default: https://service.region.example.com
|
|
||||||
order: 1
|
|
||||||
bucket:
|
|
||||||
type: String
|
|
||||||
title: Unique bucket name
|
|
||||||
hint: The unique bucket name to create (e.g. wiki-johndoe)
|
|
||||||
order: 2
|
|
||||||
accessKeyId:
|
|
||||||
type: String
|
|
||||||
title: Access Key ID
|
|
||||||
hint: The Access Key ID.
|
|
||||||
order: 3
|
|
||||||
secretAccessKey:
|
|
||||||
type: String
|
|
||||||
title: Access Key Secret
|
|
||||||
hint: The Access Key Secret for the Access Key ID above.
|
|
||||||
sensitive: true
|
|
||||||
order: 4
|
|
||||||
sslEnabled:
|
|
||||||
type: Boolean
|
|
||||||
title: Use SSL
|
|
||||||
hint: Whether to enable SSL for requests
|
|
||||||
default: true
|
|
||||||
order: 5
|
|
||||||
s3ForcePathStyle:
|
|
||||||
type: Boolean
|
|
||||||
title: Force Path Style for S3 objects
|
|
||||||
hint: Whether to force path style URLs for S3 objects.
|
|
||||||
default: false
|
|
||||||
order: 6
|
|
||||||
s3BucketEndpoint:
|
|
||||||
type: Boolean
|
|
||||||
title: Single Bucket Endpoint
|
|
||||||
hint: Whether the provided endpoint addresses an individual bucket.
|
|
||||||
default: false
|
|
||||||
order: 7
|
|
||||||
actions:
|
|
||||||
- handler: exportAll
|
|
||||||
label: Export All
|
|
||||||
hint: Output all content from the DB to the external service, overwriting any existing data. If you enabled this module after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
|
||||||
|
|
@ -1,3 +0,0 @@
|
|||||||
const S3CompatibleStorage = require('../s3/common')
|
|
||||||
|
|
||||||
module.exports = new S3CompatibleStorage('S3Generic')
|
|
@ -1,71 +1,94 @@
|
|||||||
key: sftp
|
title: 'SFTP'
|
||||||
title: SFTP
|
icon: '/_assets/icons/ultraviolet-nas.svg'
|
||||||
description: SFTP (SSH File Transfer Protocol) is a secure file transfer protocol. It runs over the SSH protocol. It supports the full security and authentication functionality of SSH.
|
banner: '/_assets/storage/ssh.jpg'
|
||||||
author: requarks.io
|
description: 'Store files over a remote connection using the SSH File Transfer Protocol.'
|
||||||
logo: https://static.requarks.io/logo/ssh.svg
|
vendor: 'Wiki.js'
|
||||||
website: https://www.ssh.com/ssh/sftp
|
website: 'https://js.wiki'
|
||||||
isAvailable: true
|
assetDelivery:
|
||||||
supportedModes:
|
isStreamingSupported: false
|
||||||
- push
|
isDirectAccessSupported: false
|
||||||
defaultMode: push
|
defaultStreamingEnabled: false
|
||||||
schedule: false
|
defaultDirectAccessEnabled: false
|
||||||
|
contentTypes:
|
||||||
|
defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
|
||||||
|
defaultLargeThreshold: '5MB'
|
||||||
|
versioning:
|
||||||
|
isSupported: false
|
||||||
|
defaultEnabled: false
|
||||||
|
sync: false
|
||||||
props:
|
props:
|
||||||
host:
|
host:
|
||||||
type: String
|
type: String
|
||||||
title: Host
|
title: Host
|
||||||
default: ''
|
default: ''
|
||||||
hint: Hostname or IP of the remote SSH server.
|
hint: Hostname or IP of the remote SSH server.
|
||||||
|
icon: dns
|
||||||
order: 1
|
order: 1
|
||||||
port:
|
port:
|
||||||
type: Number
|
type: Number
|
||||||
title: Port
|
title: Port
|
||||||
default: 22
|
default: 22
|
||||||
hint: SSH port of the remote server.
|
hint: SSH port of the remote server.
|
||||||
|
icon: ethernet-off
|
||||||
order: 2
|
order: 2
|
||||||
authMode:
|
authMode:
|
||||||
type: String
|
type: String
|
||||||
title: Authentication Method
|
title: Authentication Method
|
||||||
default: 'privateKey'
|
default: 'privateKey'
|
||||||
hint: Whether to use Private Key or Password-based authentication. A private key is highly recommended for best security.
|
hint: Whether to use Private Key or Password-based authentication. A private key is highly recommended for best security.
|
||||||
|
icon: grand-master-key
|
||||||
enum:
|
enum:
|
||||||
- privateKey
|
- privateKey|Private Key
|
||||||
- password
|
- password|Password
|
||||||
|
enumDisplay: buttons
|
||||||
order: 3
|
order: 3
|
||||||
username:
|
username:
|
||||||
type: String
|
type: String
|
||||||
title: Username
|
title: Username
|
||||||
default: ''
|
default: ''
|
||||||
hint: Username for authentication.
|
hint: Username for authentication.
|
||||||
|
icon: test-account
|
||||||
order: 4
|
order: 4
|
||||||
privateKey:
|
privateKey:
|
||||||
type: String
|
type: String
|
||||||
title: Private Key Contents
|
title: Private Key Contents
|
||||||
default: ''
|
default: ''
|
||||||
hint: (Private Key Authentication Only) - Contents of the private key
|
hint: Contents of the private key
|
||||||
|
icon: key
|
||||||
multiline: true
|
multiline: true
|
||||||
sensitive: true
|
sensitive: true
|
||||||
order: 5
|
order: 5
|
||||||
|
if:
|
||||||
|
- { key: 'authMode', eq: 'privateKey' }
|
||||||
passphrase:
|
passphrase:
|
||||||
type: String
|
type: String
|
||||||
title: Private Key Passphrase
|
title: Private Key Passphrase
|
||||||
default: ''
|
default: ''
|
||||||
hint: (Private Key Authentication Only) - Passphrase if the private key is encrypted, leave empty otherwise
|
hint: Passphrase if the private key is encrypted, leave empty otherwise
|
||||||
|
icon: password
|
||||||
sensitive: true
|
sensitive: true
|
||||||
order: 6
|
order: 6
|
||||||
|
if:
|
||||||
|
- { key: 'authMode', eq: 'privateKey' }
|
||||||
password:
|
password:
|
||||||
type: String
|
type: String
|
||||||
title: Password
|
title: Password
|
||||||
default: ''
|
default: ''
|
||||||
hint: (Password-based Authentication Only) - Password for authentication
|
hint: Password for authentication
|
||||||
|
icon: password
|
||||||
sensitive: true
|
sensitive: true
|
||||||
order: 6
|
order: 6
|
||||||
|
if:
|
||||||
|
- { key: 'authMode', eq: 'password' }
|
||||||
basePath:
|
basePath:
|
||||||
type: String
|
type: String
|
||||||
title: Base Directory Path
|
title: Base Directory Path
|
||||||
default: '/root/wiki'
|
default: '/root/wiki'
|
||||||
hint: Base directory where files will be transferred to. The path must already exists and be writable by the user.
|
hint: Base directory where files will be transferred to. The path must already exists and be writable by the user.
|
||||||
|
icon: symlink-directory
|
||||||
actions:
|
actions:
|
||||||
- handler: exportAll
|
- handler: exportAll
|
||||||
label: Export All
|
label: Export All DB Assets to Remote
|
||||||
hint: Output all content from the DB to the remote SSH server, overwriting any existing data. If you enabled SFTP after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
hint: Output all content from the DB to the remote SSH server, overwriting any existing data. If you enabled SFTP after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
|
||||||
|
icon: this-way-up
|
||||||
|
|
||||||
|
Loading…
Reference in new issue