feat: loggers + search engines models

pull/662/head
Nicolas Giard 6 years ago
parent 9c6a4f6c20
commit 5919d14670

@ -51,7 +51,9 @@ module.exports = {
async postBootMaster() {
await WIKI.models.authentication.refreshStrategiesFromDisk()
await WIKI.models.editors.refreshEditorsFromDisk()
await WIKI.models.loggers.refreshLoggersFromDisk()
await WIKI.models.renderers.refreshRenderersFromDisk()
await WIKI.models.searchEngines.refreshSearchEnginesFromDisk()
await WIKI.models.storage.refreshTargetsFromDisk()
await WIKI.auth.activateStrategies()

@ -18,9 +18,13 @@ module.exports = {
// Init Console (default)
let loggerConsoleModule = require(`../modules/logging/console`)
loggerConsoleModule.init(logger)
this.loggers['console'] = loggerConsoleModule
logger.add(new winston.transports.Console({
level: WIKI.config.logLevel,
prettyPrint: true,
colorize: true,
silent: false,
timestamp: true
}))
// _.forOwn(_.omitBy(WIKI.config.logging.loggers, s => s.enabled === false), (loggerConfig, loggerKey) => {
// let loggerModule = require(`../modules/logging/${loggerKey}`)

@ -65,6 +65,14 @@ exports.up = knex => {
table.string('createdAt').notNullable()
table.string('updatedAt').notNullable()
})
// LOGGING ----------------------------
.createTable('loggers', table => {
table.increments('id').primary()
table.string('key').notNullable().unique()
table.boolean('isEnabled').notNullable().defaultTo(false)
table.string('level').notNullable().defaultTo('warn')
table.json('config')
})
// PAGE HISTORY ------------------------
.createTable('pageHistory', table => {
table.increments('id').primary()
@ -103,6 +111,13 @@ exports.up = knex => {
table.boolean('isEnabled').notNullable().defaultTo(false)
table.json('config')
})
// SEARCH ------------------------------
.createTable('searchEngines', table => {
table.increments('id').primary()
table.string('key').notNullable().unique()
table.boolean('isEnabled').notNullable().defaultTo(false)
table.json('config')
})
// SETTINGS ----------------------------
.createTable('settings', table => {
table.increments('id').primary()

@ -0,0 +1,110 @@
const Model = require('objection').Model
const path = require('path')
const fs = require('fs-extra')
const _ = require('lodash')
const yaml = require('js-yaml')
const commonHelper = require('../helpers/common')
/* global WIKI */
/**
* Logger model
*/
module.exports = class Logger extends Model {
static get tableName() { return 'loggers' }
static get jsonSchema () {
return {
type: 'object',
required: ['key', 'isEnabled'],
properties: {
id: {type: 'integer'},
key: {type: 'string'},
isEnabled: {type: 'boolean'},
level: {type: 'string'},
config: {type: 'object'}
}
}
}
static async getLoggers() {
return WIKI.models.loggers.query()
}
static async refreshLoggersFromDisk() {
let trx
try {
const dbLoggers = await WIKI.models.loggers.query()
// -> Fetch definitions from disk
const loggersDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/logging'))
let diskLoggers = []
for (let dir of loggersDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/logging', dir, 'definition.yml'), 'utf8')
diskLoggers.push(yaml.safeLoad(def))
}
WIKI.data.loggers = diskLoggers.map(logger => ({
...logger,
props: commonHelper.parseModuleProps(logger.props)
}))
// -> Insert new loggers
let newLoggers = []
for (let logger of WIKI.data.loggers) {
if (!_.some(dbLoggers, ['key', logger.key])) {
newLoggers.push({
key: logger.key,
isEnabled: (logger.key === 'console'),
level: logger.defaultLevel,
config: _.transform(logger.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {})
})
} else {
const loggerConfig = _.get(_.find(dbLoggers, ['key', logger.key]), 'config', {})
await WIKI.models.loggers.query().patch({
config: _.transform(logger.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, loggerConfig)
}).where('key', logger.key)
}
}
if (newLoggers.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let logger of newLoggers) {
await WIKI.models.loggers.query(trx).insert(logger)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newLoggers.length} new loggers: [ OK ]`)
} else {
WIKI.logger.info(`No new loggers found: [ SKIPPED ]`)
}
} catch (err) {
WIKI.logger.error(`Failed to scan or load new loggers: [ FAILED ]`)
WIKI.logger.error(err)
if (trx) {
trx.rollback()
}
}
}
static async pageEvent({ event, page }) {
const loggers = await WIKI.models.storage.query().where('isEnabled', true)
if (loggers && loggers.length > 0) {
_.forEach(loggers, logger => {
WIKI.queue.job.syncStorage.add({
event,
logger,
page
}, {
removeOnComplete: true
})
})
}
}
}

@ -0,0 +1,109 @@
const Model = require('objection').Model
const path = require('path')
const fs = require('fs-extra')
const _ = require('lodash')
const yaml = require('js-yaml')
const commonHelper = require('../helpers/common')
/* global WIKI */
/**
* SearchEngine model
*/
module.exports = class SearchEngine extends Model {
static get tableName() { return 'searchEngines' }
static get jsonSchema () {
return {
type: 'object',
required: ['key', 'isEnabled'],
properties: {
id: {type: 'integer'},
key: {type: 'string'},
isEnabled: {type: 'boolean'},
level: {type: 'string'},
config: {type: 'object'}
}
}
}
static async getSearchEngines() {
return WIKI.models.searchEngines.query()
}
static async refreshSearchEnginesFromDisk() {
let trx
try {
const dbSearchEngines = await WIKI.models.searchEngines.query()
// -> Fetch definitions from disk
const searchEnginesDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/search'))
let diskSearchEngines = []
for (let dir of searchEnginesDirs) {
const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/search', dir, 'definition.yml'), 'utf8')
diskSearchEngines.push(yaml.safeLoad(def))
}
WIKI.data.searchEngines = diskSearchEngines.map(searchEngine => ({
...searchEngine,
props: commonHelper.parseModuleProps(searchEngine.props)
}))
// -> Insert new searchEngines
let newSearchEngines = []
for (let searchEngine of WIKI.data.searchEngines) {
if (!_.some(dbSearchEngines, ['key', searchEngine.key])) {
newSearchEngines.push({
key: searchEngine.key,
isEnabled: false,
config: _.transform(searchEngine.props, (result, value, key) => {
_.set(result, key, value.default)
return result
}, {})
})
} else {
const searchEngineConfig = _.get(_.find(dbSearchEngines, ['key', searchEngine.key]), 'config', {})
await WIKI.models.searchEngines.query().patch({
config: _.transform(searchEngine.props, (result, value, key) => {
if (!_.has(result, key)) {
_.set(result, key, value.default)
}
return result
}, searchEngineConfig)
}).where('key', searchEngine.key)
}
}
if (newSearchEngines.length > 0) {
trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
for (let searchEngine of newSearchEngines) {
await WIKI.models.searchEngines.query(trx).insert(searchEngine)
}
await trx.commit()
WIKI.logger.info(`Loaded ${newSearchEngines.length} new search engines: [ OK ]`)
} else {
WIKI.logger.info(`No new search engines found: [ SKIPPED ]`)
}
} catch (err) {
WIKI.logger.error(`Failed to scan or load new search engines: [ FAILED ]`)
WIKI.logger.error(err)
if (trx) {
trx.rollback()
}
}
}
static async pageEvent({ event, page }) {
const searchEngines = await WIKI.models.storage.query().where('isEnabled', true)
if (searchEngines && searchEngines.length > 0) {
_.forEach(searchEngines, logger => {
WIKI.queue.job.syncStorage.add({
event,
logger,
page
}, {
removeOnComplete: true
})
})
}
}
}

@ -0,0 +1,8 @@
key: airbrake
title: Airbrake
description: Airbrake is the leading exception reporting service, currently providing error monitoring for 50,000 applications with support for 18 programming languages.
author: requarks.io
logo: https://static.requarks.io/logo/airbrake.svg
website: https://airbrake.io/
defaultLevel: warn
props: {}

@ -0,0 +1,9 @@
// ------------------------------------
// Airbrake
// ------------------------------------
module.exports = {
init (logger, conf) {
}
}

@ -0,0 +1,12 @@
key: bugsnag
title: Bugsnag
description: Bugsnag monitors apps for errors that impact customers & reports all diagnostic data.
author: requarks.io
logo: https://static.requarks.io/logo/bugsnag.svg
website: https://www.bugsnag.com/
defaultLevel: warn
props:
key:
type: String
title: Key
hint: Bugsnag Project Notifier key

@ -7,9 +7,6 @@ const _ = require('lodash')
// ------------------------------------
module.exports = {
key: 'bugsnag',
title: 'Bugsnag',
props: ['key'],
init (logger, conf) {
let BugsnagLogger = winston.transports.BugsnagLogger = function (options) {
this.name = 'bugsnagLogger'

@ -1,22 +0,0 @@
const winston = require('winston')
/* global WIKI */
// ------------------------------------
// Console
// ------------------------------------
module.exports = {
key: 'console',
title: 'Console',
props: [],
init (logger, conf) {
logger.add(new winston.transports.Console({
level: WIKI.config.logLevel,
prettyPrint: true,
colorize: true,
silent: false,
timestamp: true
}))
}
}

@ -0,0 +1,8 @@
key: disk
title: Log Files
description: Outputs log files on local disk.
author: requarks.io
logo: https://static.requarks.io/logo/local-fs.svg
website: https://wiki.js.org
defaultLevel: info
props: {}

@ -0,0 +1,9 @@
// ------------------------------------
// Disk
// ------------------------------------
module.exports = {
init (logger, conf) {
}
}

@ -0,0 +1,8 @@
key: eventlog
title: Windows Event Log
description: Report logs to the Windows Event Log
author: requarks.io
logo: https://static.requarks.io/logo/windows.svg
website: https://wiki.js.org
defaultLevel: warn
props: {}

@ -0,0 +1,9 @@
// ------------------------------------
// Windows Event Log
// ------------------------------------
module.exports = {
init (logger, conf) {
}
}

@ -0,0 +1,16 @@
key: loggly
title: Loggly
description: Log Analysis / Log Management by Loggly, the world's most popular log analysis & monitoring in the cloud.
author: requarks.io
logo: https://static.requarks.io/logo/loggly.svg
website: https://www.loggly.com/
defaultLevel: warn
props:
token:
type: String
title: Token
hint: Loggly Token
subdomain:
type: String
title: Subdomain
hint: Loggly Subdomain

@ -5,9 +5,6 @@ const winston = require('winston')
// ------------------------------------
module.exports = {
key: 'loggly',
title: 'Loggly',
props: ['token', 'subdomain'],
init (logger, conf) {
require('winston-loggly-bulk')
logger.add(new winston.transports.Loggly({

@ -0,0 +1,8 @@
key: logstash
title: Logstash
description: Logstash is an open source tool for collecting, parsing, and storing logs for future use.
author: requarks.io
logo: https://static.requarks.io/logo/logstash.svg
website: https://www.elastic.co/products/logstash
defaultLevel: warn
props: {}

@ -0,0 +1,9 @@
// ------------------------------------
// Logstash
// ------------------------------------
module.exports = {
init (logger, conf) {
}
}

@ -0,0 +1,8 @@
key: newrelic
title: New Relic
description: New Relic's digital intelligence platform lets developers, ops, and tech teams measure and monitor the performance of their applications and infrastructure.
author: requarks.io
logo: https://static.requarks.io/logo/newrelic.svg
website: https://newrelic.com/
defaultLevel: warn
props: {}

@ -0,0 +1,9 @@
// ------------------------------------
// New Relic
// ------------------------------------
module.exports = {
init (logger, conf) {
}
}

@ -0,0 +1,14 @@
key: papertrail
title: Papertrail
description: Frustration-free log management.
author: requarks.io
logo: https://static.requarks.io/logo/papertrail.svg
website: https://papertrailapp.com/
defaultLevel: warn
props:
host:
type: String
title: Host
port:
type: Number
title: Port

@ -5,9 +5,6 @@ const winston = require('winston')
// ------------------------------------
module.exports = {
key: 'papertrail',
title: 'Papertrail',
props: ['host', 'port'],
init (logger, conf) {
require('winston-papertrail').Papertrail // eslint-disable-line no-unused-expressions
logger.add(new winston.transports.Papertrail({

@ -0,0 +1,8 @@
key: raygun
title: Raygun
description: Error, crash and performance monitoring for software teams.
author: requarks.io
logo: https://static.requarks.io/logo/raygun.svg
website: https://raygun.com/
defaultLevel: warn
props: {}

@ -0,0 +1,9 @@
// ------------------------------------
// Raygun
// ------------------------------------
module.exports = {
init (logger, conf) {
}
}

@ -0,0 +1,11 @@
key: rollbar
title: Rollbar
description: Rollbar provides real-time error alerting & debugging tools for developers.
author: requarks.io
logo: https://static.requarks.io/logo/rollbar.svg
website: https://rollbar.com/
defaultLevel: warn
props:
key:
type: String
title: Key

@ -7,9 +7,6 @@ const _ = require('lodash')
// ------------------------------------
module.exports = {
key: 'rollbar',
title: 'Rollbar',
props: ['key'],
init (logger, conf) {
let RollbarLogger = winston.transports.RollbarLogger = function (options) {
this.name = 'rollbarLogger'

@ -0,0 +1,11 @@
key: sentry
title: Sentry
description: Open-source error tracking that helps developers monitor and fix crashes in real time.
author: requarks.io
logo: https://static.requarks.io/logo/sentry.svg
website: https://sentry.io/
defaultLevel: warn
props:
key:
type: String
title: Key

@ -6,9 +6,6 @@ const winston = require('winston')
// ------------------------------------
module.exports = {
key: 'sentry',
title: 'Sentry',
props: ['key'],
init (logger, conf) {
let SentryLogger = winston.transports.SentryLogger = function (options) {
this.name = 'sentryLogger'

@ -0,0 +1,8 @@
key: syslog
title: Syslog
description: Syslog is a way for network devices to send event messages to a logging server.
author: requarks.io
logo: https://static.requarks.io/logo/syslog.svg
website: https://wiki.js.org
defaultLevel: warn
props: {}

@ -0,0 +1,9 @@
// ------------------------------------
// Syslog
// ------------------------------------
module.exports = {
init (logger, conf) {
}
}

@ -142,9 +142,16 @@ module.exports = () => {
await WIKI.models.editors.refreshEditorsFromDisk()
await WIKI.models.editors.query().patch({ isEnabled: true }).where('key', 'markdown')
// Load loggers
await WIKI.models.loggers.refreshLoggersFromDisk()
// Load renderers
await WIKI.models.renderers.refreshRenderersFromDisk()
// Load search engines + enable default
await WIKI.models.searchEngines.refreshSearchEnginesFromDisk()
await WIKI.models.searchEngines.query().patch({ isEnabled: true }).where('key', 'db')
// Load storage targets
await WIKI.models.storage.refreshTargetsFromDisk()

Loading…
Cancel
Save