diff --git a/dev.code-workspace b/dev.code-workspace
new file mode 100644
index 00000000..3a3de14b
--- /dev/null
+++ b/dev.code-workspace
@@ -0,0 +1,16 @@
+{
+ "folders": [
+ {
+ "path": "ux"
+ },
+ {
+ "path": "server"
+ }
+ ],
+ "settings": {
+ "i18n-ally.localesPaths": [
+ "src/i18n",
+ "src/i18n/locales"
+ ]
+ }
+}
\ No newline at end of file
diff --git a/package.json b/package.json
index 1a061117..3fa77c29 100644
--- a/package.json
+++ b/package.json
@@ -34,32 +34,32 @@
"node": ">=16.0"
},
"dependencies": {
- "@azure/storage-blob": "12.2.1",
+ "@azure/storage-blob": "12.9.0",
"@exlinc/keycloak-passport": "1.0.2",
+ "@graphql-tools/schema": "8.3.7",
+ "@graphql-tools/utils": "8.6.6",
"@joplin/turndown-plugin-gfm": "1.0.43",
"@root/csr": "0.8.1",
"@root/keypairs": "0.10.3",
"@root/pem": "1.0.4",
"acme": "3.0.3",
- "akismet-api": "5.2.1",
- "algoliasearch": "4.5.1",
+ "akismet-api": "5.3.0",
"apollo-fetch": "0.7.0",
- "apollo-server": "2.25.2",
- "apollo-server-express": "2.25.2",
+ "apollo-server": "3.6.7",
+ "apollo-server-express": "3.6.7",
"auto-load": "3.0.4",
"aws-sdk": "2.1111.0",
- "azure-search-client": "3.1.5",
"bcryptjs-then": "1.0.1",
"bluebird": "3.7.2",
- "body-parser": "1.19.1",
- "chalk": "4.1.0",
+ "body-parser": "1.20.0",
+ "chalk": "4.1.2",
"cheerio": "1.0.0-rc.5",
"chokidar": "3.5.3",
"chromium-pickle-js": "0.2.0",
"clean-css": "4.2.3",
"command-exists": "1.2.9",
"compression": "1.7.4",
- "connect-session-knex": "2.0.0",
+ "connect-session-knex": "2.1.1",
"cookie-parser": "1.4.6",
"cors": "2.8.5",
"cuint": "0.2.2",
@@ -69,8 +69,6 @@
"diff2html": "3.1.14",
"dompurify": "2.2.7",
"dotize": "0.3.0",
- "elasticsearch6": "npm:@elastic/elasticsearch@6",
- "elasticsearch7": "npm:@elastic/elasticsearch@7",
"emoji-regex": "9.2.2",
"eventemitter2": "6.4.5",
"express": "4.17.3",
@@ -80,11 +78,11 @@
"filesize": "6.1.0",
"fs-extra": "9.0.1",
"getos": "3.2.1",
- "graphql": "15.3.0",
+ "graphql": "16.3.0",
"graphql-list-fields": "2.0.2",
- "graphql-rate-limit-directive": "1.2.1",
- "graphql-subscriptions": "1.1.0",
- "graphql-tools": "7.0.0",
+ "graphql-rate-limit-directive": "2.0.2",
+ "graphql-tools": "8.2.5",
+ "graphql-upload": "13.0.0",
"he": "1.2.0",
"highlight.js": "10.3.1",
"i18next": "19.8.3",
@@ -93,14 +91,14 @@
"image-size": "0.9.2",
"js-base64": "3.7.2",
"js-binary": "1.2.0",
- "js-yaml": "3.14.0",
+ "js-yaml": "4.1.0",
"jsdom": "16.4.0",
"jsonwebtoken": "8.5.1",
"katex": "0.12.0",
- "klaw": "3.0.0",
+ "klaw": "4.0.1",
"knex": "1.0.5",
"lodash": "4.17.21",
- "luxon": "1.25.0",
+ "luxon": "2.3.1",
"markdown-it": "11.0.1",
"markdown-it-abbr": "1.0.4",
"markdown-it-attrs": "3.0.3",
@@ -116,15 +114,15 @@
"markdown-it-sup": "1.0.0",
"markdown-it-task-lists": "2.1.1",
"mathjax": "3.1.2",
- "mime-types": "2.1.34",
- "moment": "2.29.1",
+ "mime-types": "2.1.35",
+ "moment": "2.29.2",
"moment-timezone": "0.5.31",
"ms": "2.1.3",
"multer": "1.4.4",
"nanoid": "3.2.0",
"node-2fa": "1.1.2",
"node-cache": "5.1.2",
- "nodemailer": "6.7.2",
+ "nodemailer": "6.7.3",
"objection": "3.0.1",
"passport": "0.4.1",
"passport-auth0": "1.4.2",
@@ -155,7 +153,7 @@
"pug": "3.0.2",
"punycode": "2.1.1",
"qr-image": "3.2.0",
- "raven": "2.6.4",
+ "rate-limiter-flexible": "2.3.6",
"remove-markdown": "0.3.0",
"request": "2.88.2",
"request-promise": "4.2.6",
@@ -165,18 +163,15 @@
"semver": "7.3.6",
"serve-favicon": "2.5.0",
"simple-git": "2.21.0",
- "solr-node": "1.2.1",
- "ssh2": "1.5.0",
+ "ssh2": "1.9.0",
"ssh2-promise": "1.0.2",
"striptags": "3.2.0",
- "subscriptions-transport-ws": "0.9.18",
"tar-fs": "2.1.1",
"turndown": "7.1.1",
"twemoji": "13.1.0",
"uslug": "1.0.4",
"uuid": "8.3.2",
"validate.js": "0.13.1",
- "winston": "3.7.2",
"xss": "1.0.11",
"yargs": "16.1.0"
},
@@ -219,10 +214,10 @@
"babel-plugin-transform-imports": "2.0.0",
"cache-loader": "4.1.0",
"canvas-confetti": "1.3.1",
- "cash-dom": "8.1.0",
+ "cash-dom": "8.1.1",
"chart.js": "2.9.4",
"clean-webpack-plugin": "3.0.0",
- "clipboard": "2.0.8",
+ "clipboard": "2.0.10",
"codemirror": "5.58.2",
"copy-webpack-plugin": "6.2.1",
"core-js": "3.6.5",
@@ -321,10 +316,6 @@
"xterm": "4.9.0",
"zxcvbn": "4.4.2"
},
- "resolutions": {
- "apollo-server-express/**/graphql-tools": "4.0.8",
- "graphql": "15.3.0"
- },
"browserslist": [
"> 1%",
"last 2 major versions",
diff --git a/server/core/config.js b/server/core/config.js
index 26c242e8..80c8dc02 100644
--- a/server/core/config.js
+++ b/server/core/config.js
@@ -32,12 +32,12 @@ module.exports = {
let appdata = {}
try {
- appconfig = yaml.safeLoad(
+ appconfig = yaml.load(
cfgHelper.parseConfigValue(
fs.readFileSync(confPaths.config, 'utf8')
)
)
- appdata = yaml.safeLoad(fs.readFileSync(confPaths.data, 'utf8'))
+ appdata = yaml.load(fs.readFileSync(confPaths.data, 'utf8'))
appdata.regex = require(confPaths.dataRegex)
console.info(chalk.green.bold(`OK`))
} catch (err) {
diff --git a/server/core/db.js b/server/core/db.js
index 96065336..b7946d96 100644
--- a/server/core/db.js
+++ b/server/core/db.js
@@ -128,6 +128,8 @@ module.exports = {
},
// -> Migrate DB Schemas
async syncSchemas () {
+ WIKI.logger.info('Ensuring DB schema exists...')
+ await self.knex.raw(`CREATE SCHEMA IF NOT EXISTS ${WIKI.config.db.schemas.wiki}`)
WIKI.logger.info('Ensuring DB migrations have been applied...')
return self.knex.migrate.latest({
tableName: 'migrations',
diff --git a/server/core/kernel.js b/server/core/kernel.js
index 7b0303b1..dbbcd30c 100644
--- a/server/core/kernel.js
+++ b/server/core/kernel.js
@@ -52,14 +52,9 @@ module.exports = {
*/
async bootMaster() {
try {
- if (WIKI.config.setup) {
- WIKI.logger.info('Starting setup wizard...')
- require('../setup')()
- } else {
- await this.preBootMaster()
- await require('../master')()
- this.postBootMaster()
- }
+ await this.preBootMaster()
+ await require('../master')()
+ this.postBootMaster()
} catch (err) {
WIKI.logger.error(err)
process.exit(1)
@@ -73,16 +68,13 @@ module.exports = {
await WIKI.models.authentication.refreshStrategiesFromDisk()
await WIKI.models.commentProviders.refreshProvidersFromDisk()
await WIKI.models.editors.refreshEditorsFromDisk()
- await WIKI.models.loggers.refreshLoggersFromDisk()
await WIKI.models.renderers.refreshRenderersFromDisk()
- await WIKI.models.searchEngines.refreshSearchEnginesFromDisk()
await WIKI.models.storage.refreshTargetsFromDisk()
await WIKI.extensions.init()
await WIKI.auth.activateStrategies()
await WIKI.models.commentProviders.initProvider()
- await WIKI.models.searchEngines.initEngine()
await WIKI.models.storage.initTargets()
WIKI.scheduler.start()
diff --git a/server/core/logger.js b/server/core/logger.js
index d6cf0ab5..912f2171 100644
--- a/server/core/logger.js
+++ b/server/core/logger.js
@@ -1,44 +1,53 @@
-// const _ = require('lodash')
-const winston = require('winston')
+const chalk = require('chalk')
+const EventEmitter = require('events')
/* global WIKI */
-module.exports = {
- loggers: {},
- init(uid) {
- const loggerFormats = [
- winston.format.label({ label: uid }),
- winston.format.timestamp()
- ]
-
- if (WIKI.config.logFormat === 'json') {
- loggerFormats.push(winston.format.json())
- } else {
- loggerFormats.push(winston.format.colorize())
- loggerFormats.push(winston.format.printf(info => `${info.timestamp} [${info.label}] ${info.level}: ${info.message}`))
- }
-
- const logger = winston.createLogger({
- level: WIKI.config.logLevel,
- format: winston.format.combine(...loggerFormats)
- })
+const LEVELS = ['error', 'warn', 'info', 'debug']
+const LEVELSIGNORED = ['verbose', 'silly']
+const LEVELCOLORS = {
+ error: 'red',
+ warn: 'yellow',
+ info: 'green',
+ debug: 'cyan'
+}
+
+class Logger extends EventEmitter {}
+const primaryLogger = new Logger()
+
+let ignoreNextLevels = false
- // Init Console (default)
+LEVELS.forEach(lvl => {
+ primaryLogger[lvl] = (...args) => {
+ primaryLogger.emit(lvl, ...args)
+ }
- logger.add(new winston.transports.Console({
- level: WIKI.config.logLevel,
- prettyPrint: true,
- colorize: true,
- silent: false,
- timestamp: true
- }))
+ if (!ignoreNextLevels) {
+ primaryLogger.on(lvl, (msg) => {
+ if (WIKI.config.logFormat === 'json') {
+ console.log(JSON.stringify({
+ timestamp: new Date().toISOString(),
+ instance: WIKI.INSTANCE_ID,
+ level: lvl,
+ message: msg
+ }))
+ } else {
+ console.log(chalk`${new Date().toISOString()} {dim [${WIKI.INSTANCE_ID}]} {${LEVELCOLORS[lvl]}.bold ${lvl}}: ${msg}`)
+ }
+ })
+ }
+ if (lvl === WIKI.config.logLevel) {
+ ignoreNextLevels = true
+ }
+})
- // _.forOwn(_.omitBy(WIKI.config.logging.loggers, s => s.enabled === false), (loggerConfig, loggerKey) => {
- // let loggerModule = require(`../modules/logging/${loggerKey}`)
- // loggerModule.init(logger, loggerConfig)
- // this.loggers[logger.key] = loggerModule
- // })
+LEVELSIGNORED.forEach(lvl => {
+ primaryLogger[lvl] = () => {}
+})
- return logger
+module.exports = {
+ loggers: {},
+ init () {
+ return primaryLogger
}
}
diff --git a/server/core/servers.js b/server/core/servers.js
index 95046e65..74aa5bae 100644
--- a/server/core/servers.js
+++ b/server/core/servers.js
@@ -21,7 +21,6 @@ module.exports = {
async startHTTP () {
WIKI.logger.info(`HTTP Server on port: [ ${WIKI.config.port} ]`)
this.servers.http = http.createServer(WIKI.app)
- this.servers.graph.installSubscriptionHandlers(this.servers.http)
this.servers.http.listen(WIKI.config.port, WIKI.config.bindIP)
this.servers.http.on('error', (error) => {
@@ -83,7 +82,6 @@ module.exports = {
return process.exit(1)
}
this.servers.https = https.createServer(tlsOpts, WIKI.app)
- this.servers.graph.installSubscriptionHandlers(this.servers.https)
this.servers.https.listen(WIKI.config.ssl.port, WIKI.config.bindIP)
this.servers.https.on('error', (error) => {
@@ -121,15 +119,15 @@ module.exports = {
async startGraphQL () {
const graphqlSchema = require('../graph')
this.servers.graph = new ApolloServer({
- ...graphqlSchema,
+ schema: graphqlSchema,
+ uploads: false,
context: ({ req, res }) => ({ req, res }),
- subscriptions: {
- onConnect: (connectionParams, webSocket) => {
-
- },
- path: '/graphql-subscriptions'
- }
+ plugins: [
+ // ApolloServerPluginDrainHttpServer({ httpServer: this.servers.http })
+ // ...(this.servers.https && ApolloServerPluginDrainHttpServer({ httpServer: this.servers.https }))
+ ]
})
+ await this.servers.graph.start()
this.servers.graph.applyMiddleware({ app: WIKI.app, cors: false })
},
/**
diff --git a/server/db/migrations/3.0.0.js b/server/db/migrations/3.0.0.js
index 70e085e4..3fbae790 100644
--- a/server/db/migrations/3.0.0.js
+++ b/server/db/migrations/3.0.0.js
@@ -327,12 +327,6 @@ exports.up = async knex => {
// -> SYSTEM CONFIG
await knex('settings').insert([
- {
- key: 'update',
- value: {
- locales: true
- }
- },
{
key: 'mail',
value: {
@@ -372,6 +366,18 @@ exports.up = async knex => {
uploadMaxFiles: 20,
uploadScanSVG: true
}
+ },
+ {
+ key: 'system',
+ value: {
+ sessionSecret: crypto.randomBytes(32).toString('hex')
+ }
+ },
+ {
+ key: 'update',
+ value: {
+ locales: true
+ }
}
])
diff --git a/server/graph/index.js b/server/graph/index.js
index d62f329f..a617f08e 100644
--- a/server/graph/index.js
+++ b/server/graph/index.js
@@ -1,26 +1,22 @@
const _ = require('lodash')
const fs = require('fs')
-// const gqlTools = require('graphql-tools')
const path = require('path')
const autoload = require('auto-load')
-const PubSub = require('graphql-subscriptions').PubSub
-const { LEVEL, MESSAGE } = require('triple-beam')
-const Transport = require('winston-transport')
-const { createRateLimitTypeDef } = require('graphql-rate-limit-directive')
-// const { GraphQLUpload } = require('graphql-upload')
+const { makeExecutableSchema } = require('@graphql-tools/schema')
+const { rateLimitDirective } = require('graphql-rate-limit-directive')
+const { GraphQLUpload } = require('graphql-upload')
+const { rateLimitDirectiveTypeDefs, rateLimitDirectiveTransformer } = rateLimitDirective()
/* global WIKI */
WIKI.logger.info(`Loading GraphQL Schema...`)
-// Init Subscription PubSub
-
-WIKI.GQLEmitter = new PubSub()
-
// Schemas
-let typeDefs = [createRateLimitTypeDef()]
-let schemas = fs.readdirSync(path.join(WIKI.SERVERPATH, 'graph/schemas'))
+const typeDefs = [
+ rateLimitDirectiveTypeDefs
+]
+const schemas = fs.readdirSync(path.join(WIKI.SERVERPATH, 'graph/schemas'))
schemas.forEach(schema => {
typeDefs.push(fs.readFileSync(path.join(WIKI.SERVERPATH, `graph/schemas/${schema}`), 'utf8'))
})
@@ -28,47 +24,22 @@ schemas.forEach(schema => {
// Resolvers
let resolvers = {
- // Upload: GraphQLUpload
+ Upload: GraphQLUpload
}
const resolversObj = _.values(autoload(path.join(WIKI.SERVERPATH, 'graph/resolvers')))
resolversObj.forEach(resolver => {
_.merge(resolvers, resolver)
})
-// Directives
-
-let schemaDirectives = {
- ...autoload(path.join(WIKI.SERVERPATH, 'graph/directives'))
-}
-
-// Live Trail Logger (admin)
-
-class LiveTrailLogger extends Transport {
- constructor(opts) {
- super(opts)
+// Make executable schema
- this.name = 'liveTrailLogger'
- this.level = 'debug'
- }
-
- log (info, callback = () => {}) {
- WIKI.GQLEmitter.publish('livetrail', {
- loggingLiveTrail: {
- timestamp: new Date(),
- level: info[LEVEL],
- output: info[MESSAGE]
- }
- })
- callback(null, true)
- }
-}
+let schema = makeExecutableSchema({
+ typeDefs,
+ resolvers
+})
-WIKI.logger.add(new LiveTrailLogger({}))
+schema = rateLimitDirectiveTransformer(schema)
WIKI.logger.info(`GraphQL Schema: [ OK ]`)
-module.exports = {
- typeDefs,
- resolvers,
- schemaDirectives
-}
+module.exports = schema
diff --git a/server/graph/resolvers/logging.js b/server/graph/resolvers/logging.js
deleted file mode 100644
index e095f0b5..00000000
--- a/server/graph/resolvers/logging.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const _ = require('lodash')
-const graphHelper = require('../../helpers/graph')
-
-/* global WIKI */
-
-module.exports = {
- Query: {
- async logging() { return {} }
- },
- Mutation: {
- async logging() { return {} }
- },
- Subscription: {
- loggingLiveTrail: {
- subscribe: () => WIKI.GQLEmitter.asyncIterator('livetrail')
- }
- },
- LoggingQuery: {
- async loggers(obj, args, context, info) {
- let loggers = await WIKI.models.loggers.getLoggers()
- loggers = loggers.map(logger => {
- const loggerInfo = _.find(WIKI.data.loggers, ['key', logger.key]) || {}
- return {
- ...loggerInfo,
- ...logger,
- config: _.sortBy(_.transform(logger.config, (res, value, key) => {
- const configData = _.get(loggerInfo.props, key, {})
- res.push({
- key,
- value: JSON.stringify({
- ...configData,
- value
- })
- })
- }, []), 'key')
- }
- })
- // if (args.filter) { loggers = graphHelper.filter(loggers, args.filter) }
- if (args.orderBy) { loggers = _.sortBy(loggers, [args.orderBy]) }
- return loggers
- }
- },
- LoggingMutation: {
- async updateLoggers(obj, args, context) {
- try {
- for (let logger of args.loggers) {
- await WIKI.models.loggers.query().patch({
- isEnabled: logger.isEnabled,
- level: logger.level,
- config: _.reduce(logger.config, (result, value, key) => {
- _.set(result, `${value.key}`, value.value)
- return result
- }, {})
- }).where('key', logger.key)
- }
- return {
- responseResult: graphHelper.generateSuccess('Loggers updated successfully')
- }
- } catch (err) {
- return graphHelper.generateError(err)
- }
- }
- }
-}
diff --git a/server/graph/schemas/scalars.graphql b/server/graph/schemas/scalars.graphql
index 9f6833a7..8a87430a 100644
--- a/server/graph/schemas/scalars.graphql
+++ b/server/graph/schemas/scalars.graphql
@@ -2,5 +2,5 @@
scalar Date
scalar JSON
-# scalar Upload
+scalar Upload
scalar UUID
diff --git a/server/index.js b/server/index.js
index 87eff16b..f282d755 100644
--- a/server/index.js
+++ b/server/index.js
@@ -6,6 +6,12 @@
const path = require('path')
const { nanoid } = require('nanoid')
const { DateTime } = require('luxon')
+const semver = require('semver')
+
+if (!semver.satisfies(process.version, '>=16')) {
+ console.error('ERROR: Node.js 16.x or later required!')
+ process.exit(1)
+}
let WIKI = {
IS_DEBUG: process.env.NODE_ENV === 'development',
@@ -26,7 +32,7 @@ WIKI.configSvc.init()
// Init Logger
// ----------------------------------------
-WIKI.logger = require('./core/logger').init('MASTER')
+WIKI.logger = require('./core/logger').init()
// ----------------------------------------
// Start Kernel
diff --git a/server/master.js b/server/master.js
index 8274a2ed..3d334e68 100644
--- a/server/master.js
+++ b/server/master.js
@@ -77,7 +77,7 @@ module.exports = async () => {
app.use(cookieParser())
app.use(session({
- secret: WIKI.config.sessionSecret,
+ secret: WIKI.config.system.sessionSecret,
resave: false,
saveUninitialized: false,
store: new KnexSessionStore({
diff --git a/server/models/analytics.js b/server/models/analytics.js
index 17a3f6ab..ec8a72a4 100644
--- a/server/models/analytics.js
+++ b/server/models/analytics.js
@@ -17,11 +17,12 @@ module.exports = class Analytics extends Model {
static get jsonSchema () {
return {
type: 'object',
- required: ['key', 'isEnabled'],
+ required: ['module', 'isEnabled'],
properties: {
- key: {type: 'string'},
- isEnabled: {type: 'boolean'}
+ id: { type: 'string' },
+ module: { type: 'string' },
+ isEnabled: { type: 'boolean', default: false }
}
}
}
@@ -32,65 +33,27 @@ module.exports = class Analytics extends Model {
static async getProviders(isEnabled) {
const providers = await WIKI.models.analytics.query().where(_.isBoolean(isEnabled) ? { isEnabled } : {})
- return _.sortBy(providers, ['key'])
+ return _.sortBy(providers, ['module'])
}
static async refreshProvidersFromDisk() {
- let trx
try {
- const dbProviders = await WIKI.models.analytics.query()
-
// -> Fetch definitions from disk
const analyticsDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/analytics'))
- let diskProviders = []
- for (let dir of analyticsDirs) {
+ WIKI.data.analytics = []
+ for (const dir of analyticsDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/analytics', dir, 'definition.yml'), 'utf8')
- diskProviders.push(yaml.safeLoad(def))
+ const defParsed = yaml.load(def)
+ defParsed.key = dir
+ defParsed.props = commonHelper.parseModuleProps(defParsed.props)
+ WIKI.data.analytics.push(defParsed)
+ WIKI.logger.debug(`Loaded analytics module definition ${dir}: [ OK ]`)
}
- WIKI.data.analytics = diskProviders.map(provider => ({
- ...provider,
- props: commonHelper.parseModuleProps(provider.props)
- }))
- let newProviders = []
- for (let provider of WIKI.data.analytics) {
- if (!_.some(dbProviders, ['key', provider.key])) {
- newProviders.push({
- key: provider.key,
- isEnabled: false,
- config: _.transform(provider.props, (result, value, key) => {
- _.set(result, key, value.default)
- return result
- }, {})
- })
- } else {
- const providerConfig = _.get(_.find(dbProviders, ['key', provider.key]), 'config', {})
- await WIKI.models.analytics.query().patch({
- config: _.transform(provider.props, (result, value, key) => {
- if (!_.has(result, key)) {
- _.set(result, key, value.default)
- }
- return result
- }, providerConfig)
- }).where('key', provider.key)
- }
- }
- if (newProviders.length > 0) {
- trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
- for (let provider of newProviders) {
- await WIKI.models.analytics.query(trx).insert(provider)
- }
- await trx.commit()
- WIKI.logger.info(`Loaded ${newProviders.length} new analytics providers: [ OK ]`)
- } else {
- WIKI.logger.info(`No new analytics providers found: [ SKIPPED ]`)
- }
+ WIKI.logger.info(`Loaded ${WIKI.data.analytics.length} analytics module definitions: [ OK ]`)
} catch (err) {
WIKI.logger.error(`Failed to scan or load new analytics providers: [ FAILED ]`)
WIKI.logger.error(err)
- if (trx) {
- trx.rollback()
- }
}
}
diff --git a/server/models/loggers.js b/server/models/loggers.js
deleted file mode 100644
index cd6664c6..00000000
--- a/server/models/loggers.js
+++ /dev/null
@@ -1,113 +0,0 @@
-const Model = require('objection').Model
-const path = require('path')
-const fs = require('fs-extra')
-const _ = require('lodash')
-const yaml = require('js-yaml')
-const commonHelper = require('../helpers/common')
-
-/* global WIKI */
-
-/**
- * Logger model
- */
-module.exports = class Logger extends Model {
- static get tableName() { return 'loggers' }
- static get idColumn() { return 'key' }
-
- static get jsonSchema () {
- return {
- type: 'object',
- required: ['key', 'isEnabled'],
-
- properties: {
- key: {type: 'string'},
- isEnabled: {type: 'boolean'},
- level: {type: 'string'}
- }
- }
- }
-
- static get jsonAttributes() {
- return ['config']
- }
-
- static async getLoggers() {
- return WIKI.models.loggers.query()
- }
-
- static async refreshLoggersFromDisk() {
- let trx
- try {
- const dbLoggers = await WIKI.models.loggers.query()
-
- // -> Fetch definitions from disk
- const loggersDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/logging'))
- let diskLoggers = []
- for (let dir of loggersDirs) {
- const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/logging', dir, 'definition.yml'), 'utf8')
- diskLoggers.push(yaml.safeLoad(def))
- }
- WIKI.data.loggers = diskLoggers.map(logger => ({
- ...logger,
- props: commonHelper.parseModuleProps(logger.props)
- }))
-
- // -> Insert new loggers
- let newLoggers = []
- for (let logger of WIKI.data.loggers) {
- if (!_.some(dbLoggers, ['key', logger.key])) {
- newLoggers.push({
- key: logger.key,
- isEnabled: (logger.key === 'console'),
- level: logger.defaultLevel,
- config: _.transform(logger.props, (result, value, key) => {
- _.set(result, key, value.default)
- return result
- }, {})
- })
- } else {
- const loggerConfig = _.get(_.find(dbLoggers, ['key', logger.key]), 'config', {})
- await WIKI.models.loggers.query().patch({
- config: _.transform(logger.props, (result, value, key) => {
- if (!_.has(result, key)) {
- _.set(result, key, value.default)
- }
- return result
- }, loggerConfig)
- }).where('key', logger.key)
- }
- }
- if (newLoggers.length > 0) {
- trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
- for (let logger of newLoggers) {
- await WIKI.models.loggers.query(trx).insert(logger)
- }
- await trx.commit()
- WIKI.logger.info(`Loaded ${newLoggers.length} new loggers: [ OK ]`)
- } else {
- WIKI.logger.info(`No new loggers found: [ SKIPPED ]`)
- }
- } catch (err) {
- WIKI.logger.error(`Failed to scan or load new loggers: [ FAILED ]`)
- WIKI.logger.error(err)
- if (trx) {
- trx.rollback()
- }
- }
- }
-
- static async pageEvent({ event, page }) {
- const loggers = await WIKI.models.storage.query().where('isEnabled', true)
- if (loggers && loggers.length > 0) {
- _.forEach(loggers, logger => {
- WIKI.queue.job.syncStorage.add({
- event,
- logger,
- page
- }, {
- removeOnComplete: true
- })
- })
- }
- }
-}
diff --git a/server/models/searchEngines.js b/server/models/searchEngines.js
deleted file mode 100644
index 40f881d4..00000000
--- a/server/models/searchEngines.js
+++ /dev/null
@@ -1,125 +0,0 @@
-const Model = require('objection').Model
-const path = require('path')
-const fs = require('fs-extra')
-const _ = require('lodash')
-const yaml = require('js-yaml')
-const commonHelper = require('../helpers/common')
-
-/* global WIKI */
-
-/**
- * SearchEngine model
- */
-module.exports = class SearchEngine extends Model {
- static get tableName() { return 'searchEngines' }
- static get idColumn() { return 'key' }
-
- static get jsonSchema () {
- return {
- type: 'object',
- required: ['key', 'isEnabled'],
-
- properties: {
- key: {type: 'string'},
- isEnabled: {type: 'boolean'},
- level: {type: 'string'}
- }
- }
- }
-
- static get jsonAttributes() {
- return ['config']
- }
-
- static async getSearchEngines() {
- return WIKI.models.searchEngines.query()
- }
-
- static async refreshSearchEnginesFromDisk() {
- let trx
- try {
- const dbSearchEngines = await WIKI.models.searchEngines.query()
-
- // -> Fetch definitions from disk
- const searchEnginesDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/search'))
- let diskSearchEngines = []
- for (let dir of searchEnginesDirs) {
- const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/search', dir, 'definition.yml'), 'utf8')
- diskSearchEngines.push(yaml.safeLoad(def))
- }
- WIKI.data.searchEngines = diskSearchEngines.map(searchEngine => ({
- ...searchEngine,
- props: commonHelper.parseModuleProps(searchEngine.props)
- }))
-
- // -> Insert new searchEngines
- let newSearchEngines = []
- for (let searchEngine of WIKI.data.searchEngines) {
- if (!_.some(dbSearchEngines, ['key', searchEngine.key])) {
- newSearchEngines.push({
- key: searchEngine.key,
- isEnabled: false,
- config: _.transform(searchEngine.props, (result, value, key) => {
- _.set(result, key, value.default)
- return result
- }, {})
- })
- } else {
- const searchEngineConfig = _.get(_.find(dbSearchEngines, ['key', searchEngine.key]), 'config', {})
- await WIKI.models.searchEngines.query().patch({
- config: _.transform(searchEngine.props, (result, value, key) => {
- if (!_.has(result, key)) {
- _.set(result, key, value.default)
- }
- return result
- }, searchEngineConfig)
- }).where('key', searchEngine.key)
- }
- }
- if (newSearchEngines.length > 0) {
- trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
- for (let searchEngine of newSearchEngines) {
- await WIKI.models.searchEngines.query(trx).insert(searchEngine)
- }
- await trx.commit()
- WIKI.logger.info(`Loaded ${newSearchEngines.length} new search engines: [ OK ]`)
- } else {
- WIKI.logger.info(`No new search engines found: [ SKIPPED ]`)
- }
- } catch (err) {
- WIKI.logger.error(`Failed to scan or load new search engines: [ FAILED ]`)
- WIKI.logger.error(err)
- if (trx) {
- trx.rollback()
- }
- }
- }
-
- static async initEngine({ activate = false } = {}) {
- const searchEngine = await WIKI.models.searchEngines.query().findOne('isEnabled', true)
- if (searchEngine) {
- WIKI.data.searchEngine = require(`../modules/search/${searchEngine.key}/engine`)
- WIKI.data.searchEngine.key = searchEngine.key
- WIKI.data.searchEngine.config = searchEngine.config
- if (activate) {
- try {
- await WIKI.data.searchEngine.activate()
- } catch (err) {
- // -> Revert to basic engine
- if (err instanceof WIKI.Error.SearchActivationFailed) {
- await WIKI.models.searchEngines.query().patch({ isEnabled: false }).where('key', searchEngine.key)
- await WIKI.models.searchEngines.query().patch({ isEnabled: true }).where('key', 'db')
- await WIKI.models.searchEngines.initEngine()
- }
- throw err
- }
- }
-
- try {
- await WIKI.data.searchEngine.init()
- } catch (err) {
- WIKI.logger.warn(err)
- }
- }
- }
-}
diff --git a/ux/quasar.config.js b/ux/quasar.config.js
index a811fdc7..aae42970 100644
--- a/ux/quasar.config.js
+++ b/ux/quasar.config.js
@@ -30,6 +30,7 @@ module.exports = configure(function (/* ctx */) {
// https://v2.quasar.dev/quasar-cli/boot-files
boot: [
'apollo',
+ 'components',
'i18n'
],
diff --git a/ux/src/boot/components.js b/ux/src/boot/components.js
new file mode 100644
index 00000000..c230323c
--- /dev/null
+++ b/ux/src/boot/components.js
@@ -0,0 +1,7 @@
+import { boot } from 'quasar/wrappers'
+
+import BlueprintIcon from '../components/BlueprintIcon.vue'
+
+export default boot(({ app }) => {
+ app.component('BlueprintIcon', BlueprintIcon)
+})
diff --git a/ux/src/css/app.scss b/ux/src/css/app.scss
index 2a1680fc..2aa51b83 100644
--- a/ux/src/css/app.scss
+++ b/ux/src/css/app.scss
@@ -98,6 +98,17 @@ body::-webkit-scrollbar-thumb {
}
}
+// ------------------------------------------------------------------
+// ICONS SIZE FIX
+// ------------------------------------------------------------------
+
+.q-btn .q-icon {
+ &.fa-solid,
+ &.fa-regular {
+ font-size: 1.3em;
+ }
+}
+
.q-select__dropdown-icon {
font-size: 16px;
}
diff --git a/ux/src/layouts/AdminLayout.vue b/ux/src/layouts/AdminLayout.vue
index d6ac5d07..61439fe4 100644
--- a/ux/src/layouts/AdminLayout.vue
+++ b/ux/src/layouts/AdminLayout.vue
@@ -9,6 +9,11 @@ q-layout.admin(view='hHh Lpr lff')
q-toolbar-title.text-h6.font-poppins Wiki.js
q-toolbar.gt-sm.justify-center(style='height: 64px;', dark)
.text-overline.text-uppercase.text-grey {{t('admin.adminArea')}}
+ q-badge.q-ml-sm(
+ label='v3 Preview'
+ color='pink'
+ outline
+ )
q-toolbar(style='height: 64px;', dark)
q-space
q-spinner-tail(
diff --git a/ux/src/pages/AdminEditors.vue b/ux/src/pages/AdminEditors.vue
index 5c1b69c3..ffd04b93 100644
--- a/ux/src/pages/AdminEditors.vue
+++ b/ux/src/pages/AdminEditors.vue
@@ -4,8 +4,8 @@ q-page.admin-flags
.col-auto
img.admin-icon.animated.fadeInLeft(src='/_assets/icons/fluent-cashbook.svg')
.col.q-pl-md
- .text-h5.text-primary.animated.fadeInLeft {{ $t('admin.editors.title') }}
- .text-subtitle1.text-grey.animated.fadeInLeft.wait-p2s {{ $t('admin.editors.subtitle') }}
+ .text-h5.text-primary.animated.fadeInLeft {{ t('admin.editors.title') }}
+ .text-subtitle1.text-grey.animated.fadeInLeft.wait-p2s {{ t('admin.editors.subtitle') }}
.col-auto
q-btn.q-mr-sm.acrylic-btn(
icon='las la-question-circle'
@@ -16,7 +16,7 @@ q-page.admin-flags
type='a'
)
q-btn.q-mr-sm.acrylic-btn(
- icon='las la-redo-alt'
+ icon='fa-solid fa-rotate'
flat
color='secondary'
:loading='loading > 0'
@@ -24,8 +24,8 @@ q-page.admin-flags
)
q-btn(
unelevated
- icon='mdi-check'
- :label='$t(`common.actions.apply`)'
+ icon='fa-solid fa-check'
+ :label='t(`common.actions.apply`)'
color='secondary'
@click='save'
:disabled='loading > 0'
@@ -37,16 +37,16 @@ q-page.admin-flags
q-item(v-for='editor of editors', :key='editor.id')
blueprint-icon(:icon='editor.icon')
q-item-section
- q-item-label: strong {{$t(`admin.editors.` + editor.id + `Name`)}}
+ q-item-label: strong {{t(`admin.editors.` + editor.id + `Name`)}}
q-item-label.flex.items-center(caption)
- span {{$t(`admin.editors.` + editor.id + `Description`)}}
+ span {{t(`admin.editors.` + editor.id + `Description`)}}
template(v-if='editor.config')
q-item-section(
side
)
q-btn(
icon='las la-cog'
- :label='$t(`admin.editors.configuration`)'
+ :label='t(`admin.editors.configuration`)'
:color='$q.dark.isActive ? `blue-grey-3` : `blue-grey-8`'
outline
no-caps
@@ -59,69 +59,67 @@ q-page.admin-flags
:color='editor.isDisabled ? `grey` : `primary`'
checked-icon='las la-check'
unchecked-icon='las la-times'
- :label='$t(`admin.sites.isActive`)'
- :aria-label='$t(`admin.sites.isActive`)'
+ :label='t(`admin.sites.isActive`)'
+ :aria-label='t(`admin.sites.isActive`)'
:disabled='editor.isDisabled'
)
-