feat: admin storage - save + list

pull/621/head
NGPixel 7 years ago
parent 2500d8b054
commit bb102c3356

@ -75,7 +75,7 @@
v-card-chin
v-btn(color='primary', @click='save')
v-icon(left) chevron_right
span Save
span Apply Configuration
v-spacer
v-btn(icon, @click='refresh')
v-icon.grey--text refresh
@ -126,7 +126,7 @@ export default {
}
})
this.$store.commit('showNotification', {
message: 'Strategies saved successfully.',
message: 'Authentication configuration saved successfully.',
style: 'success',
icon: 'check'
})

@ -1,55 +1,133 @@
<template lang='pug'>
v-card(flat)
v-card(flat, :color='$vuetify.dark ? "grey darken-4" : "grey lighten-5"').pa-3.pt-4
v-card(tile, :color='$vuetify.dark ? "grey darken-4" : "grey lighten-5"')
.pa-3.pt-4
.headline.primary--text Storage
.subheading.grey--text Set backup and sync targets for your content
v-tabs(:color='$vuetify.dark ? "primary" : "grey lighten-4"', fixed-tabs, :slider-color='$vuetify.dark ? "white" : "primary"', show-arrows)
v-tab(key='settings'): v-icon settings
v-tab(key='local') Local FS
v-tab(key='git') Git
v-tab(key='s3') Amazon S3
v-tab(key='azure') Azure Blob Storage
v-tab(key='digitalocean') DigitalOcean Spaces
v-tab(key='dropbox') Dropbox
v-tab(key='gdrive') Google Drive
v-tab(key='onedrive') OneDrive
v-tab(key='scp') SCP (SSH)
v-tab-item(key='settings')
v-card.pa-3
v-tab(v-for='tgt in activeTargets', :key='tgt.key') {{ tgt.title }}
v-tab-item(key='settings', :transition='false', :reverse-transition='false')
v-card.pa-3(flat, tile)
.body-2.grey--text.text--darken-1 Select which storage targets to enable:
.caption.grey--text.pb-2 Some storage targets require additional configuration in their dedicated tab (when selected).
v-form
v-checkbox(
v-for='(target, n) in targets',
v-model='auths',
:key='n',
:label='target.text',
:value='target.value',
color='primary',
v-for='tgt in targets'
v-model='tgt.isEnabled'
:key='tgt.key'
:label='tgt.title'
color='primary'
:disabled='tgt.key === `local`'
hide-details
)
v-tab-item(v-for='(tgt, n) in activeTargets', :key='tgt.key', :transition='false', :reverse-transition='false')
v-card.pa-3(flat, tile)
v-form
v-subheader.pl-0 Target Configuration
.body-1.ml-3(v-if='!tgt.config || tgt.config.length < 1') This storage target has no configuration options you can modify.
v-text-field(
v-else
v-for='cfg in tgt.config'
:key='cfg.key'
:label='cfg.key'
v-model='cfg.value'
prepend-icon='settings_applications'
)
v-divider
v-btn(color='primary')
v-subheader.pl-0 Sync Direction
.body-1.ml-3 Choose how content synchronization is handled for this storage target.
.pr-3.pt-3
v-radio-group.ml-3.py-0(v-model='tgt.mode')
v-radio(
label='Bi-directional'
color='primary'
value='sync'
)
v-radio(
label='Push to target'
color='primary'
value='push'
)
v-radio(
label='Pull from target'
color='primary'
value='pull'
)
.body-1.ml-3
strong Bi-directional
.pb-3 In bi-directional mode, content is first pulled from the storage target. Any newer content overwrites local content. New content since last sync is then pushed to the storage target, overwriting any content on target if present.
strong Push to target
.pb-3 Content is always pushed to the storage target, overwriting any existing content. This is the default and safest choice for backup scenarios.
strong Pull from target
.pb-3 Content is always pulled from the storage target, overwriting any local content which already exists. This choice is usually reserved for single-use content import. Caution with this option as any local content will always be overwritten!
v-card-chin
v-btn(color='primary', @click='save')
v-icon(left) chevron_right
| Set Backup Targets
span Apply Configuration
v-spacer
v-btn(icon, @click='refresh')
v-icon.grey--text refresh
</template>
<script>
import _ from 'lodash'
import targetsQuery from 'gql/admin/storage/storage-query-targets.gql'
import targetsSaveMutation from 'gql/admin/storage/storage-mutation-save-targets.gql'
export default {
data() {
return {
targets: [
{ text: 'Local Filesystem', value: 'local' },
{ text: 'Git', value: 'auth0' },
{ text: 'Amazon S3', value: 'algolia' },
{ text: 'Azure Blob Storage', value: 'elasticsearch' },
{ text: 'DigitalOcean Spaces', value: 'solr' },
{ text: 'Dropbox', value: 'solr' },
{ text: 'Google Drive', value: 'solr' },
{ text: 'OneDrive', value: 'solr' },
{ text: 'SCP (SSH)', value: 'solr' }
],
auths: ['local']
targets: []
}
},
computed: {
activeTargets() {
return _.filter(this.targets, 'isEnabled')
}
},
methods: {
async refresh() {
await this.$apollo.queries.targets.refetch()
this.$store.commit('showNotification', {
message: 'List of storage targets has been refreshed.',
style: 'success',
icon: 'cached'
})
},
async save() {
this.$store.commit(`loadingStart`, 'admin-storage-savetargets')
await this.$apollo.mutate({
mutation: targetsSaveMutation,
variables: {
targets: this.targets.map(tgt => _.pick(tgt, [
'isEnabled',
'key',
'config',
'mode'
]))
}
})
this.$store.commit('showNotification', {
message: 'Storage configuration saved successfully.',
style: 'success',
icon: 'check'
})
this.$store.commit(`loadingStop`, 'admin-storage-savetargets')
}
},
apollo: {
targets: {
query: targetsQuery,
fetchPolicy: 'network-only',
update: (data) => _.cloneDeep(data.storage.targets),
watchLoading (isLoading) {
this.$store.commit(`loading${isLoading ? 'Start' : 'Stop'}`, 'admin-storage-refresh')
}
}
}
}

@ -1,6 +1,6 @@
mutation($strategies: [AuthenticationStrategyInput]) {
authentication {
updateStrategies(strategies: $strategies) {
mutation($targets: [StorageTargetInput]) {
storage {
updateTargets(targets: $targets) {
responseResult {
succeeded
errorCode

@ -0,0 +1,12 @@
mutation($strategies: [AuthenticationStrategyInput]) {
authentication {
updateStrategies(strategies: $strategies) {
responseResult {
succeeded
errorCode
slug
message
}
}
}
}

@ -0,0 +1,14 @@
query {
storage {
targets(orderBy: "title ASC") {
isEnabled
key
title
mode
config {
key
value
}
}
}
}

@ -95,6 +95,7 @@ exports.up = knex => {
table.string('key').notNullable().unique()
table.string('title').notNullable()
table.boolean('isEnabled').notNullable().defaultTo(false)
table.enum('mode', ['sync', 'push', 'pull']).notNullable().defaultTo('push')
table.jsonb('config')
})
// TAGS --------------------------------

@ -0,0 +1,64 @@
const Model = require('objection').Model
const autoload = require('auto-load')
const path = require('path')
const _ = require('lodash')
/* global WIKI */
/**
* Storage model
*/
module.exports = class Storage extends Model {
static get tableName() { return 'storage' }
static get jsonSchema () {
return {
type: 'object',
required: ['key', 'title', 'isEnabled'],
properties: {
id: {type: 'integer'},
key: {type: 'string'},
title: {type: 'string'},
isEnabled: {type: 'boolean'},
mode: {type: 'string'},
config: {type: 'object'}
}
}
}
static async getTargets() {
return WIKI.db.storage.query()
}
static async refreshTargetsFromDisk() {
try {
const dbTargets = await WIKI.db.storage.query()
const diskTargets = autoload(path.join(WIKI.SERVERPATH, 'modules/storage'))
let newTargets = []
_.forOwn(diskTargets, (target, targetKey) => {
if (!_.some(dbTargets, ['key', target.key])) {
newTargets.push({
key: target.key,
title: target.title,
isEnabled: false,
mode: 'push',
config: _.reduce(target.props, (result, value, key) => {
_.set(result, value, '')
return result
}, {})
})
}
})
if (newTargets.length > 0) {
await WIKI.db.storage.query().insert(newTargets)
WIKI.logger.info(`Loaded ${newTargets.length} new storage targets: [ OK ]`)
} else {
WIKI.logger.info(`No new storage targets found: [ SKIPPED ]`)
}
} catch (err) {
WIKI.logger.error(`Failed to scan or load new storage providers: [ FAILED ]`)
WIKI.logger.error(err)
}
}
}

@ -0,0 +1,48 @@
const _ = require('lodash')
const graphHelper = require('../../helpers/graph')
/* global WIKI */
module.exports = {
Query: {
async storage() { return {} }
},
Mutation: {
async storage() { return {} }
},
StorageQuery: {
async targets(obj, args, context, info) {
let targets = await WIKI.db.storage.getTargets()
targets = targets.map(stg => ({
...stg,
config: _.transform(stg.config, (res, value, key) => {
res.push({ key, value })
}, [])
}))
if (args.filter) { targets = graphHelper.filter(targets, args.filter) }
if (args.orderBy) { targets = graphHelper.orderBy(targets, args.orderBy) }
return targets
}
},
StorageMutation: {
async updateTargets(obj, args, context) {
try {
for (let tgt of args.targets) {
await WIKI.db.storage.query().patch({
isEnabled: tgt.isEnabled,
mode: tgt.mode,
config: _.reduce(tgt.config, (result, value, key) => {
_.set(result, value.key, value.value)
return result
}, {})
}).where('key', tgt.key)
}
return {
responseResult: graphHelper.generateSuccess('Storage targets updated successfully')
}
} catch (err) {
return graphHelper.generateError(err)
}
}
}
}

@ -0,0 +1,51 @@
# ===============================================
# STORAGE
# ===============================================
extend type Query {
storage: StorageQuery
}
extend type Mutation {
storage: StorageMutation
}
# -----------------------------------------------
# QUERIES
# -----------------------------------------------
type StorageQuery {
targets(
filter: String
orderBy: String
): [StorageTarget]
}
# -----------------------------------------------
# MUTATIONS
# -----------------------------------------------
type StorageMutation {
updateTargets(
targets: [StorageTargetInput]
): DefaultResponse
}
# -----------------------------------------------
# TYPES
# -----------------------------------------------
type StorageTarget {
isEnabled: Boolean!
key: String!
title: String!
mode: String
config: [KeyValuePair]
}
input StorageTargetInput {
isEnabled: Boolean!
key: String!
mode: String!
config: [KeyValuePairInput]
}

@ -1,7 +1,7 @@
module.exports = {
key: 'digitalocean',
title: 'DigialOcean Spaces',
props: [],
props: ['accessKeyId', 'accessSecret', 'region', 'bucket'],
activate() {
},

@ -319,6 +319,9 @@ module.exports = () => {
await WIKI.db.editors.refreshEditorsFromDisk()
await WIKI.db.editors.query().patch({ isEnabled: true }).where('key', 'markdown')
// Load storage targets
await WIKI.db.storage.refreshTargetsFromDisk()
// Create root administrator
WIKI.logger.info('Creating root administrator...')
await WIKI.db.users.query().delete().where({

Loading…
Cancel
Save