refactor: remove .mjs + fix scheduler + create,delete site

scarlett
NGPixel 2 weeks ago
parent 68e6a2787a
commit 6f91c2e052
No known key found for this signature in database

@ -0,0 +1,17 @@
/**
* API Routes
*/
async function routes(app, options) {
app.register(import('./authentication.js'))
app.register(import('./locales.js'), { prefix: '/locales' })
app.register(import('./pages.js'))
app.register(import('./sites.js'), { prefix: '/sites' })
app.register(import('./system.js'), { prefix: '/system' })
app.register(import('./users.js'), { prefix: '/users' })
app.get('/', async (req, reply) => {
return { ok: true }
})
}
export default routes

@ -1,17 +0,0 @@
/**
* API Routes
*/
async function routes (app, options) {
app.register(import('./authentication.mjs'))
app.register(import('./locales.mjs'), { prefix: '/locales' })
app.register(import('./pages.mjs'))
app.register(import('./sites.mjs'), { prefix: '/sites' })
app.register(import('./system.mjs'), { prefix: '/system' })
app.register(import('./users.mjs'), { prefix: '/users' })
app.get('/', async (req, reply) => {
return { ok: true }
})
}
export default routes

@ -1,9 +1,10 @@
import { validate as uuidValidate } from 'uuid'
import { CustomError } from '../helpers/common.js'
/**
* Sites API Routes
*/
async function routes(app, options) {
async function routes(app) {
app.get(
'/',
{
@ -15,7 +16,7 @@ async function routes(app, options) {
tags: ['Sites']
}
},
async (req, reply) => {
async () => {
const sites = await WIKI.models.sites.getAllSites()
return sites.map((s) => ({
...s.config,
@ -46,7 +47,7 @@ async function routes(app, options) {
}
}
},
async (req, reply) => {
async (req) => {
let site
if (req.params.siteId === 'current' && req.hostname) {
site = await WIKI.models.sites.getSiteByHostname({ hostname: req.hostname })
@ -73,7 +74,7 @@ async function routes(app, options) {
'/',
{
config: {
// permissions: ['create:sites', 'manage:sites']
permissions: ['create:sites', 'manage:sites']
},
schema: {
summary: 'Create a new site',
@ -106,6 +107,9 @@ async function routes(app, options) {
description: 'Site created successfully',
type: 'object',
properties: {
ok: {
type: 'boolean'
},
message: {
type: 'string'
},
@ -119,12 +123,46 @@ async function routes(app, options) {
}
},
async (req, reply) => {
const result = await WIKI.models.sites.createSite(req.body.hostname, {
title: req.body.title
})
return {
message: 'Site created successfully.',
id: result.id
// -> Validate inputs
if (
!req.body.hostname ||
req.body.hostname.length < 1 ||
!/^(\\*)|([a-z0-9\-.:]+)$/.test(req.body.hostname)
) {
throw new CustomError('siteCreateInvalidHostname', 'Invalid Site Hostname')
}
if (!req.body.title || req.body.title.length < 1 || !/^[^<>"]+$/.test(req.body.title)) {
throw new CustomError('siteCreateInvalidTitle', 'Invalid Site Title')
}
// -> Check for duplicate hostname
if (!(await WIKI.models.sites.isHostnameUnique(req.body.hostname))) {
if (req.body.hostname === '*') {
throw new CustomError(
'siteCreateDuplicateCatchAll',
'A site with a catch-all hostname already exists! Cannot have 2 catch-all hostnames.'
)
} else {
throw new CustomError(
'siteCreateDuplicateHostname',
'A site with a this hostname already exists! Cannot have duplicate hostnames.'
)
}
}
// -> Create site
try {
const result = await WIKI.models.sites.createSite(req.body.hostname, {
title: req.body.title
})
return {
ok: true,
message: 'Site created successfully.',
id: result.id
}
} catch (err) {
WIKI.logger.warn(err)
return reply.internalServerError()
}
}
)

@ -9,7 +9,7 @@ import {
pages as pagesTable,
tags as tagsTable,
users as usersTable
} from '../db/schema.mjs'
} from '../db/schema.js'
/**
* System API Routes
@ -19,7 +19,7 @@ async function routes(app, options) {
'/info',
{
config: {
permissions: ['read:dashboard', 'manage:sites']
permissions: ['read:dashboard']
},
schema: {
summary: 'System Info',
@ -69,6 +69,32 @@ async function routes(app, options) {
return WIKI.config.flags
}
)
app.get(
'/checkForUpdate',
{
config: {
permissions: ['read:dashboard']
},
schema: {
summary: 'Check for Updates',
tags: ['System']
}
},
async (request, reply) => {
const renderJob = await WIKI.scheduler.addJob({
task: 'checkVersion',
maxRetries: 0,
promise: true
})
await renderJob.promise
return {
current: WIKI.version,
latest: WIKI.config.update.version,
latestDate: WIKI.config.update.versionDate
}
}
)
}
export default routes

@ -1,11 +1,11 @@
import { validate as uuidValidate } from 'uuid'
import { replyWithFile } from '../helpers/common.mjs'
import { replyWithFile } from '../helpers/common.js'
import path from 'node:path'
/**
* _site Routes
*/
async function routes (app, options) {
async function routes(app, options) {
const siteAssetsPath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, 'assets')
app.get('/:siteId/:resource', async (req, reply) => {
@ -24,7 +24,10 @@ async function routes (app, options) {
case 'logo': {
if (site.config.assets.logo) {
// TODO: Fetch from db if not in disk cache
return replyWithFile(reply, path.join(siteAssetsPath, `logo-${site.id}.${site.config.assets.logoExt}`))
return replyWithFile(
reply,
path.join(siteAssetsPath, `logo-${site.id}.${site.config.assets.logoExt}`)
)
} else {
return replyWithFile(reply, path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg'))
}
@ -32,7 +35,10 @@ async function routes (app, options) {
case 'favicon': {
if (site.config.assets.favicon) {
// TODO: Fetch from db if not in disk cache
return replyWithFile(reply, path.join(siteAssetsPath, `favicon-${site.id}.${site.config.assets.faviconExt}`))
return replyWithFile(
reply,
path.join(siteAssetsPath, `favicon-${site.id}.${site.config.assets.faviconExt}`)
)
} else {
return replyWithFile(reply, path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg'))
}

@ -1,7 +1,7 @@
import { toMerged } from 'es-toolkit/object'
import { isPlainObject } from 'es-toolkit/predicate'
import chalk from 'chalk'
import cfgHelper from '../helpers/config.mjs'
import cfgHelper from '../helpers/config.js'
import fs from 'node:fs/promises'
import path from 'node:path'
import yaml from 'js-yaml'
@ -11,7 +11,7 @@ export default {
/**
* Load root config from disk
*/
async init (silent = false) {
async init(silent = false) {
const confPaths = {
config: path.join(WIKI.ROOTPATH, 'config.yml'),
data: path.join(WIKI.SERVERPATH, 'base.yml')
@ -29,11 +29,7 @@ export default {
let appdata = {}
try {
appconfig = yaml.load(
cfgHelper.parseConfigValue(
await fs.readFile(confPaths.config, 'utf8')
)
)
appconfig = yaml.load(cfgHelper.parseConfigValue(await fs.readFile(confPaths.config, 'utf8')))
appdata = yaml.load(await fs.readFile(confPaths.data, 'utf8'))
if (!silent) {
console.info(chalk.green.bold('OK'))
@ -42,7 +38,9 @@ export default {
console.error(chalk.red.bold('FAILED'))
console.error(err.message)
console.error(chalk.red.bold('>>> Unable to read configuration file! Did you create the config.yml file?'))
console.error(
chalk.red.bold('>>> Unable to read configuration file! Did you create the config.yml file?')
)
process.exit(1)
}
@ -62,7 +60,9 @@ export default {
// Load package info
const packageInfo = JSON.parse(await fs.readFile(path.join(WIKI.SERVERPATH, 'package.json'), 'utf-8'))
const packageInfo = JSON.parse(
await fs.readFile(path.join(WIKI.SERVERPATH, 'package.json'), 'utf-8')
)
// Load DB Password from Docker Secret File
if (process.env.DB_PASS_FILE) {
@ -72,7 +72,11 @@ export default {
try {
appconfig.db.pass = await fs.readFile(process.env.DB_PASS_FILE, 'utf8').trim()
} catch (err) {
console.error(chalk.red.bold('>>> Failed to read Docker Secret File using path defined in DB_PASS_FILE env variable!'))
console.error(
chalk.red.bold(
'>>> Failed to read Docker Secret File using path defined in DB_PASS_FILE env variable!'
)
)
console.error(err.message)
process.exit(1)
}
@ -82,13 +86,13 @@ export default {
WIKI.data = appdata
WIKI.version = packageInfo.version
WIKI.releaseDate = packageInfo.releaseDate
WIKI.devMode = (packageInfo.dev === true)
WIKI.devMode = packageInfo.dev === true
},
/**
* Load config from DB
*/
async loadFromDb () {
async loadFromDb() {
WIKI.logger.info('Loading settings from DB...')
const conf = await WIKI.models.settings.getConfig()
if (conf) {
@ -104,7 +108,7 @@ export default {
* @param {Array} keys Array of keys to save
* @returns Promise
*/
async saveToDb (keys, propagate = true) {
async saveToDb(keys, propagate = true) {
try {
for (const key of keys) {
let value = WIKI.config[key] ?? null
@ -126,7 +130,7 @@ export default {
/**
* Initialize DB tables with default values
*/
async initDbValues () {
async initDbValues() {
const ids = {
groupAdminId: uuid(),
groupUserId: WIKI.data.systemIds.usersGroupId,
@ -142,11 +146,12 @@ export default {
await WIKI.models.groups.init(ids)
await WIKI.models.authentication.init(ids)
await WIKI.models.users.init(ids)
await WIKI.models.jobs.init()
},
/**
* Subscribe to HA propagation events
*/
subscribeToEvents () {
subscribeToEvents() {
WIKI.events.inbound.on('reloadConfig', async () => {
await WIKI.configSvc.loadFromDb()
})

@ -5,12 +5,11 @@ import { setTimeout } from 'node:timers/promises'
import { drizzle } from 'drizzle-orm/node-postgres'
import { migrate } from 'drizzle-orm/node-postgres/migrator'
import { Pool } from 'pg'
import PGPubSub from 'pg-pubsub'
import semver from 'semver'
import { relations } from '../db/relations.mjs'
import { createDeferred } from '../helpers/common.mjs'
// import migrationSource from '../db/migrator-source.mjs'
import { relations } from '../db/relations.js'
import { createDeferred } from '../helpers/common.js'
// import migrationSource from '../db/migrator-source.js'
// const migrateFromLegacy = require('../db/legacy')
/**
@ -18,7 +17,7 @@ import { createDeferred } from '../helpers/common.mjs'
*/
export default {
pool: null,
listener: null,
pubsubClient: null,
config: null,
VERSION: null,
LEGACY: false,
@ -92,7 +91,7 @@ export default {
// Initialize Postgres Pool
this.pool = new Pool({
application_name: 'Wiki.js',
application_name: `Wiki.js - ${WIKI.INSTANCE_ID}:MAIN`,
...this.config,
...(workerMode ? { min: 0, max: 1 } : WIKI.config.pool),
options: `-c search_path=${WIKI.config.db.schema}`
@ -136,49 +135,45 @@ export default {
* Subscribe to database LISTEN / NOTIFY for multi-instances events
*/
async subscribeToNotifications() {
let connSettings = this.knex.client.connectionSettings
if (typeof connSettings === 'string') {
const encodedName = encodeURIComponent(`Wiki.js - ${WIKI.INSTANCE_ID}:PSUB`)
if (connSettings.indexOf('?') > 0) {
connSettings = `${connSettings}&ApplicationName=${encodedName}`
} else {
connSettings = `${connSettings}?ApplicationName=${encodedName}`
}
} else {
connSettings.application_name = `Wiki.js - ${WIKI.INSTANCE_ID}:PSUB`
}
this.listener = new PGPubSub(connSettings, {
log(ev) {
WIKI.logger.debug(ev)
}
})
const connectionAppName = `Wiki.js - ${WIKI.INSTANCE_ID}:EVENTS`
this.pubsubClient = await this.pool.connect()
await this.pubsubClient.query(`SET application_name = '${connectionAppName}'`)
// -> Outbound events handling
this.listener.addChannel('wiki', (payload) => {
if ('event' in payload && payload.source !== WIKI.INSTANCE_ID) {
WIKI.logger.info(`Received event ${payload.event} from instance ${payload.source}: [ OK ]`)
WIKI.events.inbound.emit(payload.event, payload.value)
this.pubsubClient.query('LISTEN wiki')
this.pubsubClient.on('notification', (msg) => {
if (msg.channel !== 'wiki') {
return
}
try {
const decoded = JSON.parse(msg.payload)
if ('event' in decoded && decoded.source !== WIKI.INSTANCE_ID) {
WIKI.logger.info(
`Received event ${decoded.event} from instance ${decoded.source}: [ OK ]`
)
WIKI.events.inbound.emit(decoded.event, decoded.value)
}
} catch {}
})
WIKI.events.outbound.onAny(this.notifyViaDB)
// -> Listen to inbound events
WIKI.auth.subscribeToEvents()
// WIKI.auth.subscribeToEvents()
WIKI.configSvc.subscribeToEvents()
WIKI.db.pages.subscribeToEvents()
// WIKI.db.pages.subscribeToEvents()
WIKI.logger.info('PG PubSub Listener initialized successfully: [ OK ]')
WIKI.logger.info('Event Listener initialized successfully: [ OK ]')
},
/**
* Unsubscribe from database LISTEN / NOTIFY
*/
async unsubscribeToNotifications() {
if (this.listener) {
async unsubscribeFromNotifications() {
if (this.pubsubClient) {
WIKI.events.outbound.offAny(this.notifyViaDB)
WIKI.events.inbound.removeAllListeners()
this.listener.close()
this.pubsubClient.release(true)
}
},
/**
@ -188,11 +183,14 @@ export default {
* @param {object} value Payload of the event
*/
notifyViaDB(event, value) {
WIKI.db.listener.publish('wiki', {
source: WIKI.INSTANCE_ID,
event,
value
})
this.pubsubClient.query(`SELECT pg_notify($1, $2)`, [
'wiki',
JSON.stringify({
source: WIKI.INSTANCE_ID,
event,
value
})
])
},
/**
* Attempt initial connection

@ -1,119 +0,0 @@
import { padEnd } from 'lodash-es'
import eventemitter2 from 'eventemitter2'
import NodeCache from 'node-cache'
import asar from './asar.mjs'
import db from './db.mjs'
import extensions from './extensions.mjs'
import scheduler from './scheduler.mjs'
import servers from './servers.mjs'
import metrics from './metrics.mjs'
let isShuttingDown = false
export default {
async init () {
WIKI.logger.info('=======================================')
WIKI.logger.info(`= Wiki.js ${padEnd(WIKI.version + ' ', 29, '=')}`)
WIKI.logger.info('=======================================')
WIKI.logger.info('Initializing...')
WIKI.logger.info(`Running node.js ${process.version}`)
WIKI.db = await db.init()
try {
await WIKI.configSvc.loadFromDb()
await WIKI.configSvc.applyFlags()
} catch (err) {
WIKI.logger.error('Database Initialization Error: ' + err.message)
if (WIKI.IS_DEBUG) {
WIKI.logger.error(err)
}
process.exit(1)
}
this.bootWeb()
},
/**
* Pre-Web Boot Sequence
*/
async preBootWeb () {
try {
WIKI.cache = new NodeCache({ checkperiod: 0 })
WIKI.scheduler = await scheduler.init()
WIKI.servers = servers
WIKI.events = {
inbound: new eventemitter2.EventEmitter2(),
outbound: new eventemitter2.EventEmitter2()
}
WIKI.extensions = extensions
WIKI.asar = asar
WIKI.metrics = await metrics.init()
} catch (err) {
WIKI.logger.error(err)
process.exit(1)
}
},
/**
* Boot Web Process
*/
async bootWeb () {
try {
await this.preBootWeb()
await (await import('../web.mjs')).init()
this.postBootWeb()
} catch (err) {
WIKI.logger.error(err)
process.exit(1)
}
},
/**
* Post-Web Boot Sequence
*/
async postBootWeb () {
await WIKI.db.locales.refreshFromDisk()
await WIKI.db.analytics.refreshProvidersFromDisk()
await WIKI.db.authentication.refreshStrategiesFromDisk()
await WIKI.db.commentProviders.refreshProvidersFromDisk()
await WIKI.db.renderers.refreshRenderersFromDisk()
await WIKI.db.storage.refreshTargetsFromDisk()
await WIKI.extensions.init()
await WIKI.auth.activateStrategies()
await WIKI.db.commentProviders.initProvider()
await WIKI.db.locales.reloadCache()
await WIKI.db.sites.reloadCache()
await WIKI.db.storage.initTargets()
await WIKI.db.subscribeToNotifications()
await WIKI.scheduler.start()
},
/**
* Graceful shutdown
*/
async shutdown (devMode = false) {
if (isShuttingDown) { return }
isShuttingDown = true
if (WIKI.servers) {
await WIKI.servers.stopServers()
}
if (WIKI.scheduler) {
await WIKI.scheduler.stop()
}
if (WIKI.models) {
await WIKI.db.unsubscribeToNotifications()
if (WIKI.db.knex) {
await WIKI.db.knex.destroy()
}
}
if (WIKI.asar) {
await WIKI.asar.unload()
}
if (!devMode) {
WIKI.logger.info('Terminating process...')
process.exit(0)
}
}
}

@ -0,0 +1,378 @@
import { DynamicThreadPool } from 'poolifier'
import os from 'node:os'
import fs from 'node:fs/promises'
import path from 'node:path'
import { CronExpressionParser } from 'cron-parser'
import { DateTime } from 'luxon'
import { v4 as uuid } from 'uuid'
import { createDeferred } from '../helpers/common.js'
import { camelCase } from 'es-toolkit/string'
import { remove } from 'es-toolkit/array'
import {
jobs as jobsTable,
jobLock as jobLockTable,
jobSchedule as jobScheduleTable
} from '../db/schema.js'
import { eq, inArray, sql } from 'drizzle-orm'
export default {
workerPool: null,
pubsubClient: null,
maxWorkers: 1,
activeWorkers: 0,
pollingRef: null,
scheduledRef: null,
tasks: null,
completionPromises: [],
async init() {
this.maxWorkers =
WIKI.config.scheduler.workers === 'auto'
? os.cpus().length - 1
: WIKI.config.scheduler.workers
if (this.maxWorkers < 1) {
this.maxWorkers = 1
}
WIKI.logger.info(`Initializing Worker Pool (Limit: ${this.maxWorkers})...`)
this.workerPool = new DynamicThreadPool(
1,
this.maxWorkers,
path.join(WIKI.SERVERPATH, 'worker.js'),
{
errorHandler: (err) => WIKI.logger.warn(err),
exitHandler: () => WIKI.logger.debug('A worker has gone offline.'),
onlineHandler: () => WIKI.logger.debug('New worker is online.')
}
)
this.tasks = {}
for (const f of await fs.readdir(path.join(WIKI.SERVERPATH, 'tasks/simple'))) {
const taskName = camelCase(f.replace('.js', ''))
this.tasks[taskName] = (await import(path.join(WIKI.SERVERPATH, 'tasks/simple', f))).task
}
return this
},
async start() {
WIKI.logger.info('Starting Scheduler...')
const connectionAppName = `Wiki.js - ${WIKI.INSTANCE_ID}:SCHEDULER`
this.pubsubClient = await WIKI.dbManager.pool.connect()
await this.pubsubClient.query(`SET application_name = '${connectionAppName}'`)
// -> Outbound events handling
this.pubsubClient.query('LISTEN scheduler')
this.pubsubClient.on('notification', async (msg) => {
if (msg.channel !== 'scheduler') {
return
}
try {
const decoded = JSON.parse(msg.payload)
switch (decoded?.event) {
case 'newJob': {
if (this.activeWorkers < this.maxWorkers) {
this.activeWorkers++
await this.processJob()
this.activeWorkers--
}
break
}
case 'jobCompleted': {
const jobPromise = this.completionPromises.find((p) => p.id === decoded.id)
if (jobPromise) {
if (decoded.state === 'success') {
jobPromise.resolve()
} else {
jobPromise.reject(new Error(decoded.errorMessage))
}
setTimeout(() => {
remove(this.completionPromises, (p) => p.id === decoded.id)
})
}
break
}
}
} catch {}
})
// -> Start scheduled jobs check
this.scheduledRef = setInterval(async () => {
this.addScheduled()
}, WIKI.config.scheduler.scheduledCheck * 1000)
// -> Add scheduled jobs on init
await this.addScheduled()
// -> Start job polling
this.pollingRef = setInterval(async () => {
this.processJob()
}, WIKI.config.scheduler.pollingCheck * 1000)
WIKI.logger.info('Scheduler: [ STARTED ]')
},
/**
* Add a job to the scheduler
* @param {Object} opts - Job options
* @param {string} opts.task - The task name to execute.
* @param {Object} [opts.payload={}] - An optional data object to pass to the job.
* @param {Date} [opts.waitUntil] - An optional datetime after which the task is allowed to run.
* @param {Number} [opts.maxRetries] - The number of times this job can be restarted upon failure. Uses server defaults if not provided.
* @param {Boolean} [opts.isScheduled=false] - Whether this is a scheduled job.
* @param {Boolean} [opts.notify=true] - Whether to notify all instances that a new job is available.
* @param {Boolean} [opts.promise=false] - Whether to return a promise property that resolves when the job completes.
* @returns {Promise}
*/
async addJob({
task,
payload = {},
waitUntil,
maxRetries,
isScheduled = false,
notify = true,
promise = false
}) {
try {
const jobId = uuid()
const jobDefer = createDeferred()
if (promise) {
this.completionPromises.push({
id: jobId,
added: DateTime.utc(),
resolve: jobDefer.resolve,
reject: jobDefer.reject
})
}
await WIKI.db.insert(jobsTable).values({
id: jobId,
task,
useWorker: !(typeof this.tasks[task] === 'function'),
payload,
maxRetries: maxRetries ?? WIKI.config.scheduler.maxRetries,
isScheduled,
waitUntil,
createdBy: WIKI.INSTANCE_ID
})
if (notify) {
this.pubsubClient.query(`SELECT pg_notify($1, $2)`, [
'scheduler',
JSON.stringify({
source: WIKI.INSTANCE_ID,
event: 'newJob',
id: jobId
})
])
}
return {
id: jobId,
...(promise && { promise: jobDefer.promise })
}
} catch (err) {
WIKI.logger.warn(`Failed to add job to scheduler: ${err.message}`)
}
},
async processJob() {
const jobIds = []
try {
const availableWorkers = this.maxWorkers - this.activeWorkers
if (availableWorkers < 1) {
WIKI.logger.debug('All workers are busy. Cannot process more jobs at the moment.')
return
}
await WIKI.db.transaction(async (trx) => {
const jobs = await trx
.delete(jobsTable)
.where(
inArray(
jobsTable.id,
sql`(SELECT id FROM jobs WHERE ("waitUntil" IS NULL OR "waitUntil" <= NOW()) ORDER BY id FOR UPDATE SKIP LOCKED LIMIT ${availableWorkers})`
)
)
.returning()
if (jobs && jobs.length > 0) {
for (const job of jobs) {
WIKI.logger.info(`Processing new job ${job.id}: ${job.task}...`)
// -> Add to Job History
await WIKI.db
.knex('jobHistory')
.insert({
id: job.id,
task: job.task,
state: 'active',
useWorker: job.useWorker,
wasScheduled: job.isScheduled,
payload: job.payload,
attempt: job.retries + 1,
maxRetries: job.maxRetries,
executedBy: WIKI.INSTANCE_ID,
createdAt: job.createdAt
})
.onConflict('id')
.merge({
executedBy: WIKI.INSTANCE_ID,
startedAt: new Date()
})
jobIds.push(job.id)
// -> Start working on it
try {
if (job.useWorker) {
await this.workerPool.execute({
...job,
INSTANCE_ID: `${WIKI.INSTANCE_ID}:WKR`
})
} else {
await this.tasks[job.task](job.payload)
}
// -> Update job history (success)
await WIKI.db
.knex('jobHistory')
.where({
id: job.id
})
.update({
state: 'completed',
completedAt: new Date()
})
WIKI.logger.info(`Completed job ${job.id}: ${job.task}`)
this.pubsubClient.query(`SELECT pg_notify($1, $2)`, [
'scheduler',
JSON.stringify({
source: WIKI.INSTANCE_ID,
event: 'jobCompleted',
state: 'success',
id: job.id
})
])
} catch (err) {
WIKI.logger.warn(`Failed to complete job ${job.id}: ${job.task} [ FAILED ]`)
WIKI.logger.warn(err)
// -> Update job history (fail)
await WIKI.db
.knex('jobHistory')
.where({
id: job.id
})
.update({
attempt: job.retries + 1,
state: 'failed',
lastErrorMessage: err.message
})
this.pubsubClient.query(`SELECT pg_notify($1, $2)`, [
'scheduler',
JSON.stringify({
source: WIKI.INSTANCE_ID,
event: 'jobCompleted',
state: 'failed',
id: job.id,
errorMessage: err.message
})
])
// -> Reschedule for retry
if (job.retries < job.maxRetries) {
const backoffDelay = 2 ** job.retries * WIKI.config.scheduler.retryBackoff
await trx('jobs').insert({
...job,
retries: job.retries + 1,
waitUntil: DateTime.utc().plus({ seconds: backoffDelay }).toJSDate(),
updatedAt: new Date()
})
WIKI.logger.warn(`Rescheduling new attempt for job ${job.id}: ${job.task}...`)
}
}
}
}
})
} catch (err) {
WIKI.logger.warn(err)
if (jobIds && jobIds.length > 0) {
WIKI.db.knex('jobHistory').whereIn('id', jobIds).update({
state: 'interrupted',
lastErrorMessage: err.message
})
}
}
},
async addScheduled() {
try {
await WIKI.db.transaction(async (trx) => {
// -> Acquire lock
const jobLock = await trx
.update(jobLockTable)
.set({
lastCheckedBy: WIKI.INSTANCE_ID,
lastCheckedAt: DateTime.utc().toISO()
})
.where(
eq(
jobLockTable.key,
sql`(SELECT "jobLock"."key" FROM "jobLock" WHERE "jobLock"."key" = 'cron' AND "jobLock"."lastCheckedAt" <= ${DateTime.utc().minus({ minutes: 5 }).toISO()} FOR UPDATE SKIP LOCKED LIMIT 1)`
)
)
if (jobLock.rowCount > 0) {
WIKI.logger.info('Scheduling future planned jobs...')
const scheduledJobs = await WIKI.db.select().from(jobScheduleTable)
if (scheduledJobs?.length > 0) {
// -> Get existing scheduled jobs
const existingJobs = await WIKI.db
.select()
.from(jobsTable)
.where(eq(jobsTable.isScheduled, true))
let totalAdded = 0
for (const job of scheduledJobs) {
// -> Get next planned iterations
const plannedIterations = CronExpressionParser.parse(job.cron, {
startDate: DateTime.utc().toISO(),
endDate: DateTime.utc().plus({ days: 1, minutes: 5 }).toISO(),
tz: 'UTC'
})
// -> Add a maximum of 10 future iterations for a single task
let addedFutureJobs = 0
while (true) {
try {
const next = plannedIterations.next()
// -> Ensure this iteration isn't already scheduled
if (
!existingJobs.some(
(j) => j.task === job.task && j.waitUntil.getTime() === next.value.getTime()
)
) {
this.addJob({
task: job.task,
useWorker: !(typeof this.tasks[job.task] === 'function'),
payload: job.payload,
isScheduled: true,
waitUntil: next.toISOString(),
notify: false
})
addedFutureJobs++
totalAdded++
}
// -> No more iterations for this period or max iterations count reached
if (next.done || addedFutureJobs >= 10) {
break
}
} catch {
break
}
}
}
if (totalAdded > 0) {
WIKI.logger.info(`Scheduled ${totalAdded} new future planned jobs: [ OK ]`)
} else {
WIKI.logger.info('No new future planned jobs to schedule: [ OK ]')
}
}
}
})
} catch (err) {
WIKI.logger.warn(err)
}
},
async stop() {
WIKI.logger.info('Stopping Scheduler...')
clearInterval(this.scheduledRef)
clearInterval(this.pollingRef)
await this.workerPool.destroy()
WIKI.logger.info('Scheduler: [ STOPPED ]')
}
}

@ -73,7 +73,7 @@ CREATE TABLE "jobHistory" (
"state" "jobHistoryState" NOT NULL,
"useWorker" boolean DEFAULT false NOT NULL,
"wasScheduled" boolean DEFAULT false NOT NULL,
"payload" jsonb NOT NULL,
"payload" jsonb,
"attempt" integer DEFAULT 1 NOT NULL,
"maxRetries" integer DEFAULT 0 NOT NULL,
"lastErrorMessage" text,
@ -94,7 +94,7 @@ CREATE TABLE "jobSchedule" (
"task" varchar(255) NOT NULL,
"cron" varchar(255) NOT NULL,
"type" varchar(255) DEFAULT 'system' NOT NULL,
"payload" jsonb NOT NULL,
"payload" jsonb,
"createdAt" timestamp DEFAULT now() NOT NULL,
"updatedAt" timestamp DEFAULT now() NOT NULL
);
@ -103,7 +103,7 @@ CREATE TABLE "jobs" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid(),
"task" varchar(255) NOT NULL,
"useWorker" boolean DEFAULT false NOT NULL,
"payload" jsonb NOT NULL,
"payload" jsonb,
"retries" integer DEFAULT 0 NOT NULL,
"maxRetries" integer DEFAULT 0 NOT NULL,
"waitUntil" timestamp,

@ -1,7 +1,7 @@
{
"version": "8",
"dialect": "postgres",
"id": "ded00708-02ea-49a3-8d2b-647fe5fcda7e",
"id": "1491778a-3cba-43df-815f-398b66d95ed5",
"prevIds": [
"061e8c84-e05e-40b0-a074-7a56bd794fc7"
],
@ -890,7 +890,7 @@
{
"type": "jsonb",
"typeSchema": null,
"notNull": true,
"notNull": false,
"dimensions": 0,
"default": null,
"generated": null,
@ -1085,7 +1085,7 @@
{
"type": "jsonb",
"typeSchema": null,
"notNull": true,
"notNull": false,
"dimensions": 0,
"default": null,
"generated": null,
@ -1163,7 +1163,7 @@
{
"type": "jsonb",
"typeSchema": null,
"notNull": true,
"notNull": false,
"dimensions": 0,
"default": null,
"generated": null,

@ -1,5 +1,5 @@
import { defineRelations } from 'drizzle-orm'
import * as schema from './schema.mjs'
import * as schema from './schema.js'
export const relations = defineRelations(schema, (r) => ({
users: {

@ -0,0 +1,429 @@
import { sql } from 'drizzle-orm'
import {
bigint,
boolean,
bytea,
customType,
index,
integer,
jsonb,
pgEnum,
pgTable,
primaryKey,
text,
timestamp,
uniqueIndex,
uuid,
varchar
} from 'drizzle-orm/pg-core'
// == CUSTOM TYPES =====================
const ltree = customType({
dataType() {
return 'ltree'
}
})
const tsvector = customType({
dataType() {
return 'tsvector'
}
})
// == TABLES ===========================
// API KEYS ----------------------------
export const apiKeys = pgTable('apiKeys', {
id: uuid().primaryKey().defaultRandom(),
name: varchar({ length: 255 }).notNull(),
key: text().notNull(),
expiration: timestamp().notNull().defaultNow(),
isRevoked: boolean().notNull().default(false),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// ASSETS ------------------------------
export const assetKindEnum = pgEnum('assetKind', ['document', 'image', 'other'])
export const assets = pgTable(
'assets',
{
id: uuid().primaryKey().defaultRandom(),
fileName: varchar({ length: 255 }).notNull(),
fileExt: varchar({ length: 255 }).notNull(),
isSystem: boolean().notNull().default(false),
kind: assetKindEnum().notNull().default('other'),
mimeType: varchar({ length: 255 }).notNull().default('application/octet-stream'),
fileSize: bigint({ mode: 'number' }), // in bytes
meta: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
data: bytea(),
preview: bytea(),
storageInfo: jsonb(),
authorId: uuid()
.notNull()
.references(() => users.id),
siteId: uuid()
.notNull()
.references(() => sites.id)
},
(table) => [index('assets_siteId_idx').on(table.siteId)]
)
// AUTHENTICATION ----------------------
export const authentication = pgTable('authentication', {
id: uuid().primaryKey().defaultRandom(),
module: varchar({ length: 255 }).notNull(),
isEnabled: boolean().notNull().default(false),
displayName: varchar({ length: 255 }).notNull().default(''),
config: jsonb().notNull().default({}),
registration: boolean().notNull().default(false),
allowedEmailRegex: varchar({ length: 255 }).notNull().default(''),
autoEnrollGroups: uuid().array().default([])
})
// BLOCKS ------------------------------
export const blocks = pgTable(
'blocks',
{
id: uuid().primaryKey().defaultRandom(),
block: varchar({ length: 255 }).notNull(),
name: varchar({ length: 255 }).notNull(),
description: varchar({ length: 255 }).notNull(),
icon: varchar({ length: 255 }).notNull(),
isEnabled: boolean().notNull().default(false),
isCustom: boolean().notNull().default(false),
config: jsonb().notNull().default({}),
siteId: uuid()
.notNull()
.references(() => sites.id)
},
(table) => [index('blocks_siteId_idx').on(table.siteId)]
)
// GROUPS ------------------------------
export const groups = pgTable('groups', {
id: uuid().primaryKey().defaultRandom(),
name: varchar({ length: 255 }).notNull(),
permissions: jsonb().notNull(),
rules: jsonb().notNull(),
redirectOnLogin: varchar({ length: 255 }).notNull().default(''),
redirectOnFirstLogin: varchar({ length: 255 }).notNull().default(''),
redirectOnLogout: varchar({ length: 255 }).notNull().default(''),
isSystem: boolean().notNull().default(false),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// JOB HISTORY -------------------------
export const jobHistoryStateEnum = pgEnum('jobHistoryState', [
'active',
'completed',
'failed',
'interrupted'
])
export const jobHistory = pgTable('jobHistory', {
id: uuid().primaryKey().defaultRandom(),
task: varchar({ length: 255 }).notNull(),
state: jobHistoryStateEnum().notNull(),
useWorker: boolean().notNull().default(false),
wasScheduled: boolean().notNull().default(false),
payload: jsonb(),
attempt: integer().notNull().default(1),
maxRetries: integer().notNull().default(0),
lastErrorMessage: text(),
executedBy: varchar({ length: 255 }),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
completedAt: timestamp().notNull()
})
// JOB SCHEDULE ------------------------
export const jobSchedule = pgTable('jobSchedule', {
id: uuid().primaryKey().defaultRandom(),
task: varchar({ length: 255 }).notNull(),
cron: varchar({ length: 255 }).notNull(),
type: varchar({ length: 255 }).notNull().default('system'),
payload: jsonb(),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// JOB LOCK ----------------------------
export const jobLock = pgTable('jobLock', {
key: varchar({ length: 255 }).primaryKey(),
lastCheckedBy: varchar({ length: 255 }),
lastCheckedAt: timestamp().notNull().defaultNow()
})
// JOBS --------------------------------
export const jobs = pgTable('jobs', {
id: uuid().primaryKey().defaultRandom(),
task: varchar({ length: 255 }).notNull(),
useWorker: boolean().notNull().default(false),
payload: jsonb(),
retries: integer().notNull().default(0),
maxRetries: integer().notNull().default(0),
waitUntil: timestamp(),
isScheduled: boolean().notNull().default(false),
createdBy: varchar({ length: 255 }),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// LOCALES -----------------------------
export const locales = pgTable(
'locales',
{
code: varchar({ length: 255 }).primaryKey(),
name: varchar({ length: 255 }).notNull(),
nativeName: varchar({ length: 255 }).notNull(),
language: varchar({ length: 8 }).notNull(), // Unicode language subtag
region: varchar({ length: 3 }).notNull(), // Unicode region subtag
script: varchar({ length: 4 }).notNull(), // Unicode script subtag
isRTL: boolean().notNull().default(false),
strings: jsonb().notNull().default([]),
completeness: integer().notNull().default(0),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
},
(table) => [index('locales_language_idx').on(table.language)]
)
// NAVIGATION --------------------------
export const navigation = pgTable(
'navigation',
{
id: uuid().primaryKey().defaultRandom(),
items: jsonb().notNull().default([]),
siteId: uuid()
.notNull()
.references(() => sites.id)
},
(table) => [index('navigation_siteId_idx').on(table.siteId)]
)
// PAGES ------------------------------
export const pagePublishStateEnum = pgEnum('pagePublishState', ['draft', 'published', 'scheduled'])
export const pages = pgTable(
'pages',
{
id: uuid().primaryKey().defaultRandom(),
locale: ltree('locale').notNull(),
path: varchar({ length: 255 }).notNull(),
hash: varchar({ length: 255 }).notNull(),
alias: varchar({ length: 255 }),
title: varchar({ length: 255 }).notNull(),
description: varchar({ length: 255 }),
icon: varchar({ length: 255 }),
publishState: pagePublishStateEnum('publishState').notNull().default('draft'),
publishStartDate: timestamp(),
publishEndDate: timestamp(),
config: jsonb().notNull().default({}),
relations: jsonb().notNull().default([]),
content: text(),
render: text(),
searchContent: text(),
ts: tsvector('ts'),
tags: text()
.array()
.notNull()
.default(sql`ARRAY[]::text[]`),
toc: jsonb(),
editor: varchar({ length: 255 }).notNull(),
contentType: varchar({ length: 255 }).notNull(),
isBrowsable: boolean().notNull().default(true),
isSearchable: boolean().notNull().default(true),
isSearchableComputed: boolean('isSearchableComputed').generatedAlwaysAs(
() => sql`${pages.publishState} != 'draft' AND ${pages.isSearchable}`
),
password: varchar({ length: 255 }),
ratingScore: integer().notNull().default(0),
ratingCount: timestamp().notNull().defaultNow(),
scripts: jsonb().notNull().default({}),
historyData: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
authorId: uuid()
.notNull()
.references(() => users.id),
creatorId: uuid()
.notNull()
.references(() => users.id),
ownerId: uuid()
.notNull()
.references(() => users.id),
siteId: uuid()
.notNull()
.references(() => sites.id)
},
(table) => [
index('pages_authorId_idx').on(table.authorId),
index('pages_creatorId_idx').on(table.creatorId),
index('pages_ownerId_idx').on(table.ownerId),
index('pages_siteId_idx').on(table.siteId),
index('pages_ts_idx').using('gin', table.ts),
index('pages_tags_idx').using('gin', table.tags),
index('pages_isSearchableComputed_idx').on(table.isSearchableComputed)
]
)
// SETTINGS ----------------------------
export const settings = pgTable('settings', {
key: varchar({ length: 255 }).notNull().primaryKey(),
value: jsonb().notNull().default({})
})
// SESSIONS ----------------------------
export const sessions = pgTable(
'sessions',
{
id: varchar({ length: 255 }).primaryKey(),
userId: uuid().references(() => users.id),
data: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
},
(table) => [index('sessions_userId_idx').on(table.userId)]
)
// SITES -------------------------------
export const sites = pgTable('sites', {
id: uuid().primaryKey().defaultRandom(),
hostname: varchar({ length: 255 }).notNull().unique(),
isEnabled: boolean().notNull().default(false),
config: jsonb().notNull(),
createdAt: timestamp().notNull().defaultNow()
})
// TAGS --------------------------------
export const tags = pgTable(
'tags',
{
id: uuid().primaryKey().defaultRandom(),
tag: varchar({ length: 255 }).notNull(),
usageCount: integer().notNull().default(0),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
siteId: uuid()
.notNull()
.references(() => sites.id)
},
(table) => [
index('tags_siteId_idx').on(table.siteId),
uniqueIndex('tags_composite_idx').on(table.siteId, table.tag)
]
)
// TREE --------------------------------
export const treeTypeEnum = pgEnum('treeType', ['folder', 'page', 'asset'])
export const treeNavigationModeEnum = pgEnum('treeNavigationMode', [
'inherit',
'override',
'overrideExact',
'hide',
'hideExact'
])
export const tree = pgTable(
'tree',
{
id: uuid().primaryKey().defaultRandom(),
folderPath: ltree('folderPath'),
fileName: varchar({ length: 255 }).notNull(),
hash: varchar({ length: 255 }).notNull(),
type: treeTypeEnum('tree').notNull(),
locale: ltree('locale').notNull(),
title: varchar({ length: 255 }).notNull(),
navigationMode: treeNavigationModeEnum('navigationMode').notNull().default('inherit'),
navigationId: uuid(),
tags: text()
.array()
.notNull()
.default(sql`ARRAY[]::text[]`),
meta: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
siteId: uuid()
.notNull()
.references(() => sites.id)
},
(table) => [
index('tree_folderpath_idx').on(table.folderPath),
index('tree_folderpath_gist_idx').using('gist', table.folderPath),
index('tree_fileName_idx').on(table.fileName),
index('tree_hash_idx').on(table.hash),
index('tree_type_idx').on(table.type),
index('tree_locale_idx').using('gist', table.locale),
index('tree_navigationMode_idx').on(table.navigationMode),
index('tree_navigationId_idx').on(table.navigationId),
index('tree_tags_idx').using('gin', table.tags),
index('tree_siteId_idx').on(table.siteId)
]
)
// USER AVATARS ------------------------
export const userAvatars = pgTable('userAvatars', {
id: uuid().primaryKey(),
data: bytea().notNull()
})
// USER KEYS ---------------------------
export const userKeys = pgTable(
'userKeys',
{
id: uuid().primaryKey().defaultRandom(),
kind: varchar({ length: 255 }).notNull(),
token: varchar({ length: 255 }).notNull(),
meta: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
validUntil: timestamp().notNull(),
userId: uuid()
.notNull()
.references(() => users.id)
},
(table) => [index('userKeys_userId_idx').on(table.userId)]
)
// USERS -------------------------------
export const users = pgTable(
'users',
{
id: uuid().primaryKey().defaultRandom(),
email: varchar({ length: 255 }).notNull().unique(),
name: varchar({ length: 255 }).notNull(),
auth: jsonb().notNull().default({}),
meta: jsonb().notNull().default({}),
passkeys: jsonb().notNull().default({}),
prefs: jsonb().notNull().default({}),
hasAvatar: boolean().notNull().default(false),
isActive: boolean().notNull().default(false),
isSystem: boolean().notNull().default(false),
isVerified: boolean().notNull().default(false),
lastLoginAt: timestamp(),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
},
(table) => [index('users_lastLoginAt_idx').on(table.lastLoginAt)]
)
// == RELATION TABLES ==================
// USER GROUPS -------------------------
export const userGroups = pgTable(
'userGroups',
{
userId: uuid()
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
groupId: uuid()
.notNull()
.references(() => groups.id, { onDelete: 'cascade' })
},
(table) => [
primaryKey({ columns: [table.userId, table.groupId] }),
index('userGroups_userId_idx').on(table.userId),
index('userGroups_groupId_idx').on(table.groupId),
index('userGroups_composite_idx').on(table.userId, table.groupId)
]
)

@ -1,338 +0,0 @@
import { sql } from 'drizzle-orm'
import { bigint, boolean, bytea, customType, index, integer, jsonb, pgEnum, pgTable, primaryKey, text, timestamp, uniqueIndex, uuid, varchar } from 'drizzle-orm/pg-core'
// == CUSTOM TYPES =====================
const ltree = customType({
dataType () {
return 'ltree'
}
})
const tsvector = customType({
dataType () {
return 'tsvector'
}
})
// == TABLES ===========================
// API KEYS ----------------------------
export const apiKeys = pgTable('apiKeys', {
id: uuid().primaryKey().defaultRandom(),
name: varchar({ length: 255 }).notNull(),
key: text().notNull(),
expiration: timestamp().notNull().defaultNow(),
isRevoked: boolean().notNull().default(false),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// ASSETS ------------------------------
export const assetKindEnum = pgEnum('assetKind', ['document', 'image', 'other'])
export const assets = pgTable('assets', {
id: uuid().primaryKey().defaultRandom(),
fileName: varchar({ length: 255 }).notNull(),
fileExt: varchar({ length: 255 }).notNull(),
isSystem: boolean().notNull().default(false),
kind: assetKindEnum().notNull().default('other'),
mimeType: varchar({ length: 255 }).notNull().default('application/octet-stream'),
fileSize: bigint({ mode: 'number' }), // in bytes
meta: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
data: bytea(),
preview: bytea(),
storageInfo: jsonb(),
authorId: uuid().notNull().references(() => users.id),
siteId: uuid().notNull().references(() => sites.id),
}, (table) => [
index('assets_siteId_idx').on(table.siteId)
])
// AUTHENTICATION ----------------------
export const authentication = pgTable('authentication', {
id: uuid().primaryKey().defaultRandom(),
module: varchar({ length: 255 }).notNull(),
isEnabled: boolean().notNull().default(false),
displayName: varchar({ length: 255 }).notNull().default(''),
config: jsonb().notNull().default({}),
registration: boolean().notNull().default(false),
allowedEmailRegex: varchar({ length: 255 }).notNull().default(''),
autoEnrollGroups: uuid().array().default([])
})
// BLOCKS ------------------------------
export const blocks = pgTable('blocks', {
id: uuid().primaryKey().defaultRandom(),
block: varchar({ length: 255 }).notNull(),
name: varchar({ length: 255 }).notNull(),
description: varchar({ length: 255 }).notNull(),
icon: varchar({ length: 255 }).notNull(),
isEnabled: boolean().notNull().default(false),
isCustom: boolean().notNull().default(false),
config: jsonb().notNull().default({}),
siteId: uuid().notNull().references(() => sites.id),
}, (table) => [
index('blocks_siteId_idx').on(table.siteId)
])
// GROUPS ------------------------------
export const groups = pgTable('groups', {
id: uuid().primaryKey().defaultRandom(),
name: varchar({ length: 255 }).notNull(),
permissions: jsonb().notNull(),
rules: jsonb().notNull(),
redirectOnLogin: varchar({ length: 255 }).notNull().default(''),
redirectOnFirstLogin: varchar({ length: 255 }).notNull().default(''),
redirectOnLogout: varchar({ length: 255 }).notNull().default(''),
isSystem: boolean().notNull().default(false),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// JOB HISTORY -------------------------
export const jobHistoryStateEnum = pgEnum('jobHistoryState', ['active', 'completed', 'failed', 'interrupted'])
export const jobHistory = pgTable('jobHistory', {
id: uuid().primaryKey().defaultRandom(),
task: varchar({ length: 255 }).notNull(),
state: jobHistoryStateEnum().notNull(),
useWorker: boolean().notNull().default(false),
wasScheduled: boolean().notNull().default(false),
payload: jsonb().notNull(),
attempt: integer().notNull().default(1),
maxRetries: integer().notNull().default(0),
lastErrorMessage: text(),
executedBy: varchar({ length: 255 }),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
completedAt: timestamp().notNull()
})
// JOB SCHEDULE ------------------------
export const jobSchedule = pgTable('jobSchedule', {
id: uuid().primaryKey().defaultRandom(),
task: varchar({ length: 255 }).notNull(),
cron: varchar({ length: 255 }).notNull(),
type: varchar({ length: 255 }).notNull().default('system'),
payload: jsonb().notNull(),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// JOB LOCK ----------------------------
export const jobLock = pgTable('jobLock', {
key: varchar({ length: 255 }).primaryKey(),
lastCheckedBy: varchar({ length: 255 }),
lastCheckedAt: timestamp().notNull().defaultNow()
})
// JOBS --------------------------------
export const jobs = pgTable('jobs', {
id: uuid().primaryKey().defaultRandom(),
task: varchar({ length: 255 }).notNull(),
useWorker: boolean().notNull().default(false),
payload: jsonb().notNull(),
retries: integer().notNull().default(0),
maxRetries: integer().notNull().default(0),
waitUntil: timestamp(),
isScheduled: boolean().notNull().default(false),
createdBy: varchar({ length: 255 }),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
})
// LOCALES -----------------------------
export const locales = pgTable('locales', {
code: varchar({ length: 255 }).primaryKey(),
name: varchar({ length: 255 }).notNull(),
nativeName: varchar({ length: 255 }).notNull(),
language: varchar({ length: 8 }).notNull(), // Unicode language subtag
region: varchar({ length: 3 }).notNull(), // Unicode region subtag
script: varchar({ length: 4 }).notNull(), // Unicode script subtag
isRTL: boolean().notNull().default(false),
strings: jsonb().notNull().default([]),
completeness: integer().notNull().default(0),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
}, (table) => [
index('locales_language_idx').on(table.language)
])
// NAVIGATION --------------------------
export const navigation = pgTable('navigation', {
id: uuid().primaryKey().defaultRandom(),
items: jsonb().notNull().default([]),
siteId: uuid().notNull().references(() => sites.id),
}, (table) => [
index('navigation_siteId_idx').on(table.siteId)
])
// PAGES ------------------------------
export const pagePublishStateEnum = pgEnum('pagePublishState', ['draft', 'published', 'scheduled'])
export const pages = pgTable('pages', {
id: uuid().primaryKey().defaultRandom(),
locale: ltree('locale').notNull(),
path: varchar({ length: 255 }).notNull(),
hash: varchar({ length: 255 }).notNull(),
alias: varchar({ length: 255 }),
title: varchar({ length: 255 }).notNull(),
description: varchar({ length: 255 }),
icon: varchar({ length: 255 }),
publishState: pagePublishStateEnum('publishState').notNull().default('draft'),
publishStartDate: timestamp(),
publishEndDate: timestamp(),
config: jsonb().notNull().default({}),
relations: jsonb().notNull().default([]),
content: text(),
render: text(),
searchContent: text(),
ts: tsvector('ts'),
tags: text().array().notNull().default(sql`ARRAY[]::text[]`),
toc: jsonb(),
editor: varchar({ length: 255 }).notNull(),
contentType: varchar({ length: 255 }).notNull(),
isBrowsable: boolean().notNull().default(true),
isSearchable: boolean().notNull().default(true),
isSearchableComputed: boolean('isSearchableComputed').generatedAlwaysAs(() => sql`${pages.publishState} != 'draft' AND ${pages.isSearchable}`),
password: varchar({ length: 255 }),
ratingScore: integer().notNull().default(0),
ratingCount: timestamp().notNull().defaultNow(),
scripts: jsonb().notNull().default({}),
historyData: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
authorId: uuid().notNull().references(() => users.id),
creatorId: uuid().notNull().references(() => users.id),
ownerId: uuid().notNull().references(() => users.id),
siteId: uuid().notNull().references(() => sites.id),
}, (table) => [
index('pages_authorId_idx').on(table.authorId),
index('pages_creatorId_idx').on(table.creatorId),
index('pages_ownerId_idx').on(table.ownerId),
index('pages_siteId_idx').on(table.siteId),
index('pages_ts_idx').using('gin', table.ts),
index('pages_tags_idx').using('gin', table.tags),
index('pages_isSearchableComputed_idx').on(table.isSearchableComputed)
])
// SETTINGS ----------------------------
export const settings = pgTable('settings', {
key: varchar({ length: 255 }).notNull().primaryKey(),
value: jsonb().notNull().default({})
})
// SESSIONS ----------------------------
export const sessions = pgTable('sessions', {
id: varchar({ length: 255 }).primaryKey(),
userId: uuid().references(() => users.id),
data: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
}, (table) => [
index('sessions_userId_idx').on(table.userId)
])
// SITES -------------------------------
export const sites = pgTable('sites', {
id: uuid().primaryKey().defaultRandom(),
hostname: varchar({ length: 255 }).notNull().unique(),
isEnabled: boolean().notNull().default(false),
config: jsonb().notNull(),
createdAt: timestamp().notNull().defaultNow()
})
// TAGS --------------------------------
export const tags = pgTable('tags', {
id: uuid().primaryKey().defaultRandom(),
tag: varchar({ length: 255 }).notNull(),
usageCount: integer().notNull().default(0),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
siteId: uuid().notNull().references(() => sites.id)
}, (table) => [
index('tags_siteId_idx').on(table.siteId),
uniqueIndex('tags_composite_idx').on(table.siteId, table.tag)
])
// TREE --------------------------------
export const treeTypeEnum = pgEnum('treeType', ['folder', 'page', 'asset'])
export const treeNavigationModeEnum = pgEnum('treeNavigationMode', ['inherit', 'override', 'overrideExact', 'hide', 'hideExact'])
export const tree = pgTable('tree', {
id: uuid().primaryKey().defaultRandom(),
folderPath: ltree('folderPath'),
fileName: varchar({ length: 255 }).notNull(),
hash: varchar({ length: 255 }).notNull(),
type: treeTypeEnum('tree').notNull(),
locale: ltree('locale').notNull(),
title: varchar({ length: 255 }).notNull(),
navigationMode: treeNavigationModeEnum('navigationMode').notNull().default('inherit'),
navigationId: uuid(),
tags: text().array().notNull().default(sql`ARRAY[]::text[]`),
meta: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow(),
siteId: uuid().notNull().references(() => sites.id)
}, (table) => [
index('tree_folderpath_idx').on(table.folderPath),
index('tree_folderpath_gist_idx').using('gist', table.folderPath),
index('tree_fileName_idx').on(table.fileName),
index('tree_hash_idx').on(table.hash),
index('tree_type_idx').on(table.type),
index('tree_locale_idx').using('gist', table.locale),
index('tree_navigationMode_idx').on(table.navigationMode),
index('tree_navigationId_idx').on(table.navigationId),
index('tree_tags_idx').using('gin', table.tags),
index('tree_siteId_idx').on(table.siteId)
])
// USER AVATARS ------------------------
export const userAvatars = pgTable('userAvatars', {
id: uuid().primaryKey(),
data: bytea().notNull()
})
// USER KEYS ---------------------------
export const userKeys = pgTable('userKeys', {
id: uuid().primaryKey().defaultRandom(),
kind: varchar({ length: 255 }).notNull(),
token: varchar({ length: 255 }).notNull(),
meta: jsonb().notNull().default({}),
createdAt: timestamp().notNull().defaultNow(),
validUntil: timestamp().notNull(),
userId: uuid().notNull().references(() => users.id)
}, (table) => [
index('userKeys_userId_idx').on(table.userId)
])
// USERS -------------------------------
export const users = pgTable('users', {
id: uuid().primaryKey().defaultRandom(),
email: varchar({ length: 255 }).notNull().unique(),
name: varchar({ length: 255 }).notNull(),
auth: jsonb().notNull().default({}),
meta: jsonb().notNull().default({}),
passkeys: jsonb().notNull().default({}),
prefs: jsonb().notNull().default({}),
hasAvatar: boolean().notNull().default(false),
isActive: boolean().notNull().default(false),
isSystem: boolean().notNull().default(false),
isVerified: boolean().notNull().default(false),
lastLoginAt: timestamp(),
createdAt: timestamp().notNull().defaultNow(),
updatedAt: timestamp().notNull().defaultNow()
}, (table) => [
index('users_lastLoginAt_idx').on(table.lastLoginAt)
])
// == RELATION TABLES ==================
// USER GROUPS -------------------------
export const userGroups = pgTable('userGroups', {
userId: uuid().notNull().references(() => users.id, { onDelete: 'cascade' }),
groupId: uuid().notNull().references(() => groups.id, { onDelete: 'cascade' })
}, (table) => [
primaryKey({ columns: [table.userId, table.groupId] }),
index('userGroups_userId_idx').on(table.userId),
index('userGroups_groupId_idx').on(table.groupId),
index('userGroups_composite_idx').on(table.userId, table.groupId)
])

@ -5,21 +5,29 @@ import mime from 'mime'
import fs from 'node:fs'
/* eslint-disable promise/param-names */
export function createDeferred () {
export function createDeferred() {
let result, resolve, reject
return {
resolve: function (value) {
if (resolve) {
resolve(value)
} else {
result = result || new Promise(function (r) { r(value) })
result =
result ||
new Promise(function (r) {
r(value)
})
}
},
reject: function (reason) {
if (reject) {
reject(reason)
} else {
result = result || new Promise(function (x, j) { j(reason) })
result =
result ||
new Promise(function (x, j) {
j(reason)
})
}
},
promise: new Promise(function (r, j) {
@ -39,7 +47,7 @@ export function createDeferred () {
* @param {string} str String to decode
* @returns Decoded tree path
*/
export function decodeTreePath (str) {
export function decodeTreePath(str) {
return str?.replaceAll('.', '/')
}
@ -49,7 +57,7 @@ export function decodeTreePath (str) {
* @param {string} str String to encode
* @returns Encoded tree path
*/
export function encodeTreePath (str) {
export function encodeTreePath(str) {
return str?.toLowerCase()?.replaceAll('/', '.') || ''
}
@ -59,7 +67,7 @@ export function encodeTreePath (str) {
* @param {string} str String to hash
* @returns Hashed string
*/
export function generateHash (str) {
export function generateHash(str) {
return crypto.createHash('sha1').update(str).digest('hex')
}
@ -69,7 +77,7 @@ export function generateHash (str) {
* @param {any} type primitive type name
* @returns Default value
*/
export function getTypeDefaultValue (type) {
export function getTypeDefaultValue(type) {
switch (type.toLowerCase()) {
case 'string':
return ''
@ -80,7 +88,7 @@ export function getTypeDefaultValue (type) {
}
}
export function parseModuleProps (props) {
export function parseModuleProps(props) {
const result = {}
for (const [key, value] of Object.entries(props)) {
let defaultValue = ''
@ -106,7 +114,7 @@ export function parseModuleProps (props) {
return result
}
export function getDictNameFromLocale (locale) {
export function getDictNameFromLocale(locale) {
const loc = locale.length > 2 ? locale.substring(0, 2) : locale
if (loc in WIKI.config.search.dictOverrides) {
return WIKI.config.search.dictOverrides[loc]
@ -115,8 +123,16 @@ export function getDictNameFromLocale (locale) {
}
}
export function replyWithFile (reply, filePath) {
export function replyWithFile(reply, filePath) {
const stream = fs.createReadStream(filePath)
reply.header('Content-Type', mime.getType(filePath))
return reply.send(stream)
}
export class CustomError extends Error {
constructor(name, message, statusCode = 400) {
super(message)
this.name = name
this.statusCode = statusCode
}
}

@ -26,11 +26,13 @@ import fastifyView from '@fastify/view'
import gracefulServer from '@gquittet/graceful-server'
import ajvFormats from 'ajv-formats'
import pug from 'pug'
import eventemitter2 from 'eventemitter2'
import NodeCache from 'node-cache'
import configSvc from './core/config.mjs'
import dbManager from './core/db.mjs'
import logger from './core/logger.mjs'
import configSvc from './core/config.js'
import dbManager from './core/db.js'
import logger from './core/logger.js'
import scheduler from './core/scheduler.js'
const nanoid = customAlphabet('1234567890abcdef', 10)
@ -93,9 +95,9 @@ WIKI.logger.info(`Running node.js ${process.version} [ OK ]`)
// ----------------------------------------
async function preBoot() {
WIKI.dbManager = (await import('./core/db.mjs')).default
WIKI.dbManager = (await import('./core/db.js')).default
WIKI.db = await dbManager.init()
WIKI.models = (await import('./models/index.mjs')).default
WIKI.models = (await import('./models/index.js')).default
try {
if (await WIKI.configSvc.loadFromDb()) {
@ -117,6 +119,11 @@ async function preBoot() {
}
WIKI.cache = new NodeCache({ checkperiod: 0 })
WIKI.scheduler = await scheduler.init()
WIKI.events = {
inbound: new eventemitter2.EventEmitter2(),
outbound: new eventemitter2.EventEmitter2()
}
}
// ----------------------------------------
@ -131,6 +138,9 @@ async function postBoot() {
await WIKI.models.authentication.activateStrategies()
await WIKI.models.locales.reloadCache()
await WIKI.models.sites.reloadCache()
await WIKI.dbManager.subscribeToNotifications()
await WIKI.scheduler.start()
}
// ----------------------------------------
@ -179,6 +189,7 @@ async function initHTTPServer() {
WIKI.server.on(gracefulServer.SHUTTING_DOWN, () => {
WIKI.logger.info('Shutting down HTTP Server... [ STOPPING ]')
WIKI.dbManager.unsubscribeFromNotifications()
})
WIKI.server.on(gracefulServer.SHUTDOWN, (err) => {
@ -414,17 +425,31 @@ async function initHTTPServer() {
// done()
// })
app.register(import('./api/index.mjs'), { prefix: '/_api' })
app.register(import('./controllers/site.mjs'), { prefix: '/_site' })
app.register(import('./api/index.js'), { prefix: '/_api' })
app.register(import('./controllers/site.js'), { prefix: '/_site' })
// ----------------------------------------
// Error handling
// ----------------------------------------
app.setErrorHandler((error, req, reply) => {
if (error instanceof fastify.errorCodes.FST_ERR_BAD_STATUS_CODE) {
WIKI.logger.warn(error)
reply.status(500).send({ ok: false })
if (req.url.includes('/_api/')) {
if (error.statusCode) {
reply.code(error.statusCode).type('application/json').send({
ok: false,
error: error.name,
statusCode: error.statusCode,
message: error.message
})
} else {
WIKI.logger.warn(error)
reply.code(500).type('application/json').send({
ok: false,
error: 'Internal Server Error',
statusCode: 500,
message: 'Internal Server error'
})
}
} else {
reply.send(error)
}

@ -2,8 +2,8 @@ import fs from 'node:fs/promises'
import path from 'node:path'
import yaml from 'js-yaml'
import { eq } from 'drizzle-orm'
import { parseModuleProps } from '../helpers/common.mjs'
import { authentication as authenticationTable } from '../db/schema.mjs'
import { parseModuleProps } from '../helpers/common.js'
import { authentication as authenticationTable } from '../db/schema.js'
/**
* Authentication model
@ -72,7 +72,7 @@ class Authentication {
for (const stg of enabledStrategies) {
try {
const StrategyModule = (
await import(`../modules/authentication/${stg.module}/authentication.mjs`)
await import(`../modules/authentication/${stg.module}/authentication.js`)
).default
WIKI.auth.strategies[stg.id] = new StrategyModule(stg.id, stg.config)
WIKI.auth.strategies[stg.id].module = stg.module

@ -1,11 +1,11 @@
import { v4 as uuid } from 'uuid'
import { groups as groupsTable } from '../db/schema.mjs'
import { groups as groupsTable } from '../db/schema.js'
/**
* Groups model
*/
class Groups {
async init (ids) {
async init(ids) {
WIKI.logger.info('Inserting default groups...')
await WIKI.db.insert(groupsTable).values([

@ -0,0 +1,19 @@
import { authentication } from './authentication.js'
import { groups } from './groups.js'
import { jobs } from './jobs.js'
import { locales } from './locales.js'
import { sessions } from './sessions.js'
import { settings } from './settings.js'
import { sites } from './sites.js'
import { users } from './users.js'
export default {
authentication,
groups,
jobs,
locales,
sessions,
settings,
sites,
users
}

@ -1,17 +0,0 @@
import { authentication } from './authentication.mjs'
import { groups } from './groups.mjs'
import { locales } from './locales.mjs'
import { sessions } from './sessions.mjs'
import { settings } from './settings.mjs'
import { sites } from './sites.mjs'
import { users } from './users.mjs'
export default {
authentication,
groups,
locales,
sessions,
settings,
sites,
users
}

@ -0,0 +1,45 @@
import { DateTime } from 'luxon'
import { jobSchedule as jobScheduleTable, jobLock as jobLockTable } from '../db/schema.js'
/**
* Jobs model
*/
class Jobs {
/**
* Initialize jobs table
*/
async init() {
WIKI.logger.info('Inserting scheduled jobs...')
await WIKI.db.insert(jobScheduleTable).values([
{
task: 'checkVersion',
cron: '0 0 * * *',
type: 'system'
},
{
task: 'cleanJobHistory',
cron: '5 0 * * *',
type: 'system'
},
// {
// task: 'refreshAutocomplete',
// cron: '0 */6 * * *',
// type: 'system'
// },
{
task: 'updateLocales',
cron: '0 0 * * *',
type: 'system'
}
])
await WIKI.db.insert(jobLockTable).values({
key: 'cron',
lastCheckedBy: 'init',
lastCheckedAt: DateTime.utc().minus({ hours: 1 }).toISO()
})
}
}
export const jobs = new Jobs()

@ -0,0 +1,129 @@
import { stat, readFile } from 'node:fs/promises'
import path from 'node:path'
import { DateTime } from 'luxon'
import { locales as localesTable } from '../db/schema.js'
import { eq, sql } from 'drizzle-orm'
/**
* Locales model
*/
class Locales {
async refreshFromDisk({ force = false } = {}) {
try {
const localesMeta = (await import('../locales/metadata.js')).default
WIKI.logger.info(`Found ${localesMeta.languages.length} locales [ OK ]`)
const dbLocales = await WIKI.db
.select({
code: localesTable.code,
updatedAt: localesTable.updatedAt
})
.from(localesTable)
.orderBy(localesTable.code)
let localFilesSkipped = 0
for (const lang of localesMeta.languages) {
// -> Build filename
const langFilenameParts = [lang.language]
if (lang.region) {
langFilenameParts.push(lang.region)
}
if (lang.script) {
langFilenameParts.push(lang.script)
}
const langFilename = langFilenameParts.join('-')
// -> Get DB version
const dbLang = dbLocales.find((l) => l.code === langFilename)
// -> Get File version
const flPath = path.join(WIKI.SERVERPATH, `locales/${langFilename}.json`)
try {
const flStat = await stat(flPath)
const flUpdatedAt = DateTime.fromJSDate(flStat.mtime)
// -> Load strings
if (!dbLang || DateTime.fromJSDate(dbLang.updatedAt) < flUpdatedAt || force) {
WIKI.logger.info(`Loading locale ${langFilename} into DB...`)
const flStrings = JSON.parse(await readFile(flPath, 'utf8'))
await WIKI.db
.insert(localesTable)
.values({
code: langFilename,
name: lang.name,
nativeName: lang.localizedName,
language: lang.language,
region: lang.region,
script: lang.script,
isRTL: lang.isRtl,
strings: flStrings
})
.onConflictDoUpdate({
target: localesTable.code,
set: { strings: flStrings, updatedAt: sql`now()` }
})
WIKI.logger.info(`Locale ${langFilename} loaded successfully. [ OK ]`)
} else {
WIKI.logger.info(
`Locale ${langFilename} is newer in the DB. Skipping disk version. [ OK ]`
)
}
} catch {
localFilesSkipped++
WIKI.logger.warn(
`Locale ${langFilename} not found on disk. Missing strings file. [ SKIPPED ]`
)
}
}
if (localFilesSkipped > 0) {
WIKI.logger.warn(
`${localFilesSkipped} locales were defined in the metadata file but not found on disk. [ SKIPPED ]`
)
}
} catch (err) {
WIKI.logger.warn('Failed to load locales from disk: [ FAILED ]')
WIKI.logger.warn(err)
return false
}
}
async getLocales({ cache = true } = {}) {
if (!WIKI.cache.has('locales') || !cache) {
const locales = await WIKI.db
.select({
code: localesTable.code,
isRTL: localesTable.isRTL,
language: localesTable.language,
name: localesTable.name,
nativeName: localesTable.nativeName,
createdAt: localesTable.createdAt,
updatedAt: localesTable.updatedAt,
completeness: localesTable.completeness
})
.from(localesTable)
.orderBy(localesTable.code)
WIKI.cache.set('locales', locales)
for (const locale of locales) {
WIKI.cache.set(`locale:${locale.code}`, locale)
}
}
return WIKI.cache.get('locales')
}
async getStrings(locale) {
const results = await WIKI.db
.select({ strings: localesTable.strings })
.from(localesTable)
.where(eq(localesTable.code, locale))
.limit(1)
return results.length === 1 ? results[0].strings : []
}
async reloadCache() {
WIKI.logger.info('Reloading locales cache...')
const locales = await WIKI.models.locales.getLocales({ cache: false })
WIKI.logger.info(`Loaded ${locales.length} locales into cache [ OK ]`)
}
}
export const locales = new Locales()

@ -1,107 +0,0 @@
import { stat, readFile } from 'node:fs/promises'
import path from 'node:path'
import { DateTime } from 'luxon'
import { locales as localesTable } from '../db/schema.mjs'
import { eq, sql } from 'drizzle-orm'
/**
* Locales model
*/
class Locales {
async refreshFromDisk ({ force = false } = {}) {
try {
const localesMeta = (await import('../locales/metadata.mjs')).default
WIKI.logger.info(`Found ${localesMeta.languages.length} locales [ OK ]`)
const dbLocales = await WIKI.db.select({
code: localesTable.code,
updatedAt: localesTable.updatedAt
}).from(localesTable).orderBy(localesTable.code)
let localFilesSkipped = 0
for (const lang of localesMeta.languages) {
// -> Build filename
const langFilenameParts = [lang.language]
if (lang.region) {
langFilenameParts.push(lang.region)
}
if (lang.script) {
langFilenameParts.push(lang.script)
}
const langFilename = langFilenameParts.join('-')
// -> Get DB version
const dbLang = dbLocales.find(l => l.code === langFilename)
// -> Get File version
const flPath = path.join(WIKI.SERVERPATH, `locales/${langFilename}.json`)
try {
const flStat = await stat(flPath)
const flUpdatedAt = DateTime.fromJSDate(flStat.mtime)
// -> Load strings
if (!dbLang || DateTime.fromJSDate(dbLang.updatedAt) < flUpdatedAt || force) {
WIKI.logger.info(`Loading locale ${langFilename} into DB...`)
const flStrings = JSON.parse(await readFile(flPath, 'utf8'))
await WIKI.db.insert(localesTable).values({
code: langFilename,
name: lang.name,
nativeName: lang.localizedName,
language: lang.language,
region: lang.region,
script: lang.script,
isRTL: lang.isRtl,
strings: flStrings
}).onConflictDoUpdate({ target: localesTable.code, set: { strings: flStrings, updatedAt: sql`now()` } })
WIKI.logger.info(`Locale ${langFilename} loaded successfully. [ OK ]`)
} else {
WIKI.logger.info(`Locale ${langFilename} is newer in the DB. Skipping disk version. [ OK ]`)
}
} catch (err) {
localFilesSkipped++
WIKI.logger.warn(`Locale ${langFilename} not found on disk. Missing strings file. [ SKIPPED ]`)
}
}
if (localFilesSkipped > 0) {
WIKI.logger.warn(`${localFilesSkipped} locales were defined in the metadata file but not found on disk. [ SKIPPED ]`)
}
} catch (err) {
WIKI.logger.warn('Failed to load locales from disk: [ FAILED ]')
WIKI.logger.warn(err)
return false
}
}
async getLocales ({ cache = true } = {}) {
if (!WIKI.cache.has('locales') || !cache) {
const locales = await WIKI.db.select({
code: localesTable.code,
isRTL: localesTable.isRTL,
language: localesTable.language,
name: localesTable.name,
nativeName: localesTable.nativeName,
createdAt: localesTable.createdAt,
updatedAt: localesTable.updatedAt,
completeness: localesTable.completeness
}).from(localesTable).orderBy(localesTable.code)
WIKI.cache.set('locales', locales)
for (const locale of locales) {
WIKI.cache.set(`locale:${locale.code}`, locale)
}
}
return WIKI.cache.get('locales')
}
async getStrings (locale) {
const results = await WIKI.db.select({ strings: localesTable.strings }).from(localesTable).where(eq(localesTable.code, locale)).limit(1)
return results.length === 1 ? results[0].strings : []
}
async reloadCache () {
WIKI.logger.info('Reloading locales cache...')
const locales = await WIKI.models.locales.getLocales({ cache: false })
WIKI.logger.info(`Loaded ${locales.length} locales into cache [ OK ]`)
}
}
export const locales = new Locales()

@ -1,5 +1,5 @@
import { eq, sql } from 'drizzle-orm'
import { sessions as sessionsTable } from '../db/schema.mjs'
import { sessions as sessionsTable } from '../db/schema.js'
/**
* Sessions model

@ -1,4 +1,4 @@
import { settings as settingsTable } from '../db/schema.mjs'
import { settings as settingsTable } from '../db/schema.js'
import { pem2jwk } from 'pem-jwk'
import crypto from 'node:crypto'
@ -10,11 +10,11 @@ class Settings {
* Fetch settings from DB
* @returns {Promise<Object>} Settings
*/
async getConfig () {
async getConfig() {
const settings = await WIKI.db.select().from(settingsTable)
if (settings.length > 0) {
return settings.reduce((res, val) => {
res[val.key] = ('v' in val.value) ? val.value.v : val.value
res[val.key] = 'v' in val.value ? val.value.v : val.value
return res
}, {})
} else {
@ -27,8 +27,9 @@ class Settings {
* @param {string} key Setting key
* @param {Object} value Setting value object
*/
async updateConfig (key, value) {
await WIKI.models.insert(settingsTable)
async updateConfig(key, value) {
await WIKI.models
.insert(settingsTable)
.values({ key, value })
.onConflictDoUpdate({ target: settingsTable.key, set: { value } })
}
@ -37,7 +38,7 @@ class Settings {
* Initialize settings table
* @param {Object} ids Generated IDs
*/
async init (ids) {
async init(ids) {
WIKI.logger.info('Generating certificates...')
const secret = crypto.randomBytes(32).toString('hex')
const certs = crypto.generateKeyPairSync('rsa', {

@ -1,6 +1,6 @@
import { toMerged } from 'es-toolkit/object'
import { keyBy } from 'es-toolkit/array'
import { sites as sitesTable } from '../db/schema.mjs'
import { sites as sitesTable } from '../db/schema.js'
import { eq } from 'drizzle-orm'
/**
@ -25,6 +25,10 @@ class Sites {
return null
}
async isHostnameUnique(hostname) {
return (await WIKI.db.$count(sitesTable, eq(sitesTable.hostname, hostname))) === 0
}
async getAllSites() {
return WIKI.db.select().from(sitesTable).orderBy(sitesTable.hostname)
}
@ -40,7 +44,7 @@ class Sites {
WIKI.logger.info(`Loaded ${sites.length} site configurations [ OK ]`)
}
async createSite(hostname, config) {
async createSite(hostname, config = {}) {
const result = await WIKI.db
.insert(sitesTable)
.values({

@ -1,5 +1,5 @@
import bcrypt from 'bcryptjs'
import { userGroups, users as usersTable, userKeys } from '../db/schema.mjs'
import { userGroups, users as usersTable, userKeys } from '../db/schema.js'
import { eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { DateTime } from 'luxon'

@ -25,11 +25,14 @@
"ajv-formats": "3.0.1",
"bcryptjs": "3.0.3",
"chalk": "5.6.2",
"cron-parser": "5.5.0",
"drizzle-orm": "1.0.0-beta.15-859cf75",
"es-toolkit": "1.45.1",
"eventemitter2": "6.4.9",
"fastify": "5.7.1",
"fastify-favicon": "5.0.0",
"filesize": "11.0.13",
"fs-extra": "11.3.4",
"js-yaml": "4.1.1",
"luxon": "3.7.2",
"mime": "4.1.0",
@ -37,7 +40,7 @@
"node-cache": "5.1.2",
"pem-jwk": "2.0.0",
"pg": "8.17.2",
"pg-pubsub": "0.8.1",
"poolifier": "5.3.2",
"pug": "3.0.3",
"semver": "7.7.3",
"uuid": "13.0.0"
@ -2522,6 +2525,18 @@
"integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==",
"license": "MIT"
},
"node_modules/cron-parser": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-5.5.0.tgz",
"integrity": "sha512-oML4lKUXxizYswqmxuOCpgFS8BNUJpIu6k/2HVHyaL8Ynnf3wdf9tkns0yRdJLSIjkJ+b0DXHMZEHGpMwjnPww==",
"license": "MIT",
"dependencies": {
"luxon": "^3.7.1"
},
"engines": {
"node": ">=18"
}
},
"node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
@ -2963,6 +2978,12 @@
"node": ">=6"
}
},
"node_modules/eventemitter2": {
"version": "6.4.9",
"resolved": "https://registry.npmjs.org/eventemitter2/-/eventemitter2-6.4.9.tgz",
"integrity": "sha512-JEPTiaOt9f04oa6NOkc4aH+nVp5I3wEjpHbIPqfgCdD5v5bUzy7xQqwcVO2aDQgOWhI28da57HksMrzK9HlRxg==",
"license": "MIT"
},
"node_modules/events": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz",
@ -3170,6 +3191,20 @@
"node": ">= 0.6"
}
},
"node_modules/fs-extra": {
"version": "11.3.4",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz",
"integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==",
"license": "MIT",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=14.14"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
@ -3274,6 +3309,12 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"license": "ISC"
},
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
@ -3665,6 +3706,18 @@
"url": "https://github.com/Eomm/json-schema-resolver?sponsor=1"
}
},
"node_modules/jsonfile": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz",
"integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==",
"license": "MIT",
"dependencies": {
"universalify": "^2.0.0"
},
"optionalDependencies": {
"graceful-fs": "^4.1.6"
}
},
"node_modules/jsonwebtoken": {
"version": "9.0.3",
"resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.3.tgz",
@ -4312,15 +4365,6 @@
"integrity": "sha512-iNzslsoeSH2/gmDDKiyMqF64DATUCWj3YJ0wP14kqcsf2TUklwimd+66yYojKwZCA7h2yRNLGug71hCBA2a4sw==",
"license": "MIT"
},
"node_modules/pg-format": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/pg-format/-/pg-format-1.0.4.tgz",
"integrity": "sha512-YyKEF78pEA6wwTAqOUaHIN/rWpfzzIuMh9KdAhc3rSLQ/7zkRFcCgYBAEGatDstLyZw4g0s9SNICmaTGnBVeyw==",
"license": "MIT",
"engines": {
"node": ">=4.0"
}
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
@ -4345,21 +4389,6 @@
"integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==",
"license": "MIT"
},
"node_modules/pg-pubsub": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/pg-pubsub/-/pg-pubsub-0.8.1.tgz",
"integrity": "sha512-b/EHOwCrag4isghc4XgRipeAjfgyNg1DiL3Dwwh1Ojp91Lriltn5eg2nSWjBe4pzcFzhTM6HiB7LOG9NN1nx5g==",
"license": "MIT",
"dependencies": {
"pg": "^8.7.3",
"pg-format": "^1.0.2",
"pony-cause": "^2.1.8",
"promised-retry": "^0.5.0"
},
"engines": {
"node": "^14.18.0 || >=16.0.0"
}
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
@ -4422,13 +4451,24 @@
"integrity": "sha512-BndPH67/JxGExRgiX1dX0w1FvZck5Wa4aal9198SrRhZjH3GxKQUKIBnYJTdj2HDN3UQAS06HlfcSbQj2OHmaw==",
"license": "MIT"
},
"node_modules/pony-cause": {
"version": "2.1.11",
"resolved": "https://registry.npmjs.org/pony-cause/-/pony-cause-2.1.11.tgz",
"integrity": "sha512-M7LhCsdNbNgiLYiP4WjsfLUuFmCfnjdF6jKe2R9NKl4WFN+HZPGHJZ9lnLP7f9ZnKe3U9nuWD0szirmj+migUg==",
"license": "0BSD",
"node_modules/poolifier": {
"version": "5.3.2",
"resolved": "https://registry.npmjs.org/poolifier/-/poolifier-5.3.2.tgz",
"integrity": "sha512-5Cu+3i+m5s56mHYPS7OXmsl5Eqs8aoTALPTrhGKcRVoHQgQrySukBHoDmHx3yH6f29tS8JfSse/58Tl987ztow==",
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/poolifier"
},
{
"type": "github",
"url": "https://github.com/sponsors/poolifier"
}
],
"license": "MIT",
"engines": {
"node": ">=12.0.0"
"node": ">=20.11.0",
"pnpm": ">=9.0.0"
}
},
"node_modules/postgres-array": {
@ -4510,27 +4550,6 @@
"asap": "~2.0.3"
}
},
"node_modules/promised-retry": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/promised-retry/-/promised-retry-0.5.0.tgz",
"integrity": "sha512-jbYvN6UGE+/3E1g0JmgDPchUc+4VI4cBaPjdr2Lso22xfFqut2warEf6IhWuhPJKbJYVOQAyCt2Jx+01ORCItg==",
"license": "MIT",
"dependencies": {
"pony-cause": "^1.1.1"
},
"engines": {
"node": "^14.17.0 || >=16.0.0"
}
},
"node_modules/promised-retry/node_modules/pony-cause": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/pony-cause/-/pony-cause-1.1.1.tgz",
"integrity": "sha512-PxkIc/2ZpLiEzQXu5YRDOUgBlfGYBY8156HY5ZcRAwwonMk5W/MrJP2LLkG/hF7GEQzaHo2aS7ho6ZLCOvf+6g==",
"license": "0BSD",
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/pstree.remy": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz",
@ -5183,6 +5202,15 @@
"license": "MIT",
"peer": true
},
"node_modules/universalify": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
"license": "MIT",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",

@ -3,7 +3,7 @@
"version": "3.0.0",
"releaseDate": "2026-01-01T01:01:01.000Z",
"description": "The most powerful and extensible open source Wiki software",
"main": "index.mjs",
"main": "index.js",
"type": "module",
"private": true,
"dev": true,
@ -12,7 +12,7 @@
"dev": "cd .. && nodemon backend --watch backend --ext mjs,js,json",
"ncu": "ncu -i",
"ncu-u": "ncu -u",
"db-generate": "drizzle-kit generate --dialect=postgresql --schema=./db/schema.mjs --out=./db/migrations --name=main",
"db-generate": "drizzle-kit generate --dialect=postgresql --schema=./db/schema.js --out=./db/migrations --name=main",
"db-up": "drizzle-kit up --dialect=postgresql --out=./db/migrations"
},
"repository": {
@ -54,11 +54,14 @@
"ajv-formats": "3.0.1",
"bcryptjs": "3.0.3",
"chalk": "5.6.2",
"cron-parser": "5.5.0",
"drizzle-orm": "1.0.0-beta.15-859cf75",
"es-toolkit": "1.45.1",
"eventemitter2": "6.4.9",
"fastify": "5.7.1",
"fastify-favicon": "5.0.0",
"filesize": "11.0.13",
"fs-extra": "11.3.4",
"js-yaml": "4.1.1",
"luxon": "3.7.2",
"mime": "4.1.0",
@ -66,7 +69,7 @@
"node-cache": "5.1.2",
"pem-jwk": "2.0.0",
"pg": "8.17.2",
"pg-pubsub": "0.8.1",
"poolifier": "5.3.2",
"pug": "3.0.3",
"semver": "7.7.3",
"uuid": "13.0.0"

@ -0,0 +1,24 @@
export async function task() {
WIKI.logger.info('Checking for latest version...')
try {
const resp = await fetch('https://api.github.com/repos/requarks/wiki/releases/latest').then(
(r) => r.json()
)
const strictVersion =
resp.tag_name.indexOf('v') === 0 ? resp.tag_name.substring(1) : resp.tag_name
WIKI.logger.info(`Latest version is ${resp.tag_name}.`)
WIKI.config.update = {
lastCheckedAt: new Date().toISOString(),
version: strictVersion,
versionDate: resp.published_at
}
await WIKI.configSvc.saveToDb(['update'])
WIKI.logger.info('Checked for latest version: [ COMPLETED ]')
} catch (err) {
WIKI.logger.error('Checking for latest version: [ FAILED ]')
WIKI.logger.error(err.message)
throw err
}
}

@ -0,0 +1,27 @@
import path from 'node:path'
import fse from 'fs-extra'
import { DateTime } from 'luxon'
export async function task() {
WIKI.logger.info('Purging orphaned upload files...')
try {
const uplTempPath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, 'uploads')
await fse.ensureDir(uplTempPath)
const ls = await fse.readdir(uplTempPath)
const fifteenAgo = DateTime.now().minus({ minutes: 15 })
for (const f of ls) {
const stat = await fse.stat(path.join(uplTempPath, f))
if (stat.isFile() && stat.ctime < fifteenAgo) {
await fse.unlink(path.join(uplTempPath, f))
}
}
WIKI.logger.info('Purging orphaned upload files: [ COMPLETED ]')
} catch (err) {
WIKI.logger.error('Purging orphaned upload files: [ FAILED ]')
WIKI.logger.error(err.message)
throw err
}
}

@ -0,0 +1,56 @@
import { ThreadWorker } from 'poolifier'
import { kebabCase } from 'es-toolkit/string'
import path from 'node:path'
import configSvc from './core/config.js'
import logger from './core/logger.js'
import db from './core/db.js'
// ----------------------------------------
// Init Minimal Core
// ----------------------------------------
const WIKI = {
IS_DEBUG: process.env.NODE_ENV === 'development',
ROOTPATH: process.cwd(),
INSTANCE_ID: 'worker',
SERVERPATH: path.join(process.cwd(), 'backend'),
configSvc,
ensureDb: async () => {
if (WIKI.db) {
return true
}
WIKI.db = await db.init(true)
try {
await WIKI.configSvc.loadFromDb()
await WIKI.configSvc.applyFlags()
} catch (err) {
WIKI.logger.error('Database Initialization Error: ' + err.message)
if (WIKI.IS_DEBUG) {
WIKI.logger.error(err)
}
process.exit(1)
}
}
}
global.WIKI = WIKI
await WIKI.configSvc.init(true)
// ----------------------------------------
// Init Logger
// ----------------------------------------
WIKI.logger = logger.init()
// ----------------------------------------
// Execute Task
// ----------------------------------------
export default new ThreadWorker(async (job) => {
WIKI.INSTANCE_ID = job.INSTANCE_ID
const task = (await import(`./tasks/workers/${kebabCase(job.task)}.js`)).task
await task(job)
return true
})

@ -41,15 +41,6 @@ db:
#######################################################################
# Do not change unless you know what you are doing!
# ---------------------------------------------------------------------
# Database Pool Options
# ---------------------------------------------------------------------
# Refer to https://github.com/vincit/tarn.js for all possible options
pool:
# min: 2
# max: 10
# ---------------------------------------------------------------------
# IP address the server should listen to
# ---------------------------------------------------------------------

@ -11,6 +11,7 @@ export function initializeApi(store) {
const client = ky.create({
prefixUrl: '/_api',
credentials: 'same-origin',
throwHttpErrors: (statusNumber) => statusNumber > 400, // Don't throw for 400
hooks: {
beforeRequest: [
async (request) => {

@ -88,27 +88,13 @@ const isLatest = computed(() => {
async function check () {
state.isLoading = true
try {
const resp = await APOLLO_CLIENT.mutate({
mutation: `
mutation checkForUpdates {
checkForUpdates {
operation {
succeeded
message
}
current
latest
latestDate
}
}
`
})
if (resp?.data?.checkForUpdates?.operation?.succeeded) {
state.current = resp?.data?.checkForUpdates?.current
state.latest = resp?.data?.checkForUpdates?.latest
state.latestDate = DateTime.fromISO(resp?.data?.checkForUpdates?.latestDate).toFormat(userStore.preferredDateFormat)
const resp = await API_CLIENT.post('system/checkForUpdate').json()
if (resp?.current) {
state.current = resp.current
state.latest = resp.latest
state.latestDate = DateTime.fromISO(resp.latestDate).toFormat(userStore.preferredDateFormat)
} else {
throw new Error(resp?.data?.checkForUpdates?.operation?.message || 'An unexpected error occured.')
throw new Error(resp?.message || 'An unexpected error occured.')
}
} catch (err) {
$q.notify({

@ -111,29 +111,13 @@ async function create () {
if (!isFormValid) {
throw new Error(t('admin.sites.createInvalidData'))
}
const resp = await APOLLO_CLIENT.mutate({
mutation: `
mutation createSite (
$hostname: String!
$title: String!
) {
createSite(
hostname: $hostname
title: $title
) {
operation {
succeeded
message
}
}
}
`,
variables: {
const resp = await API_CLIENT.post('sites', {
json: {
hostname: state.siteHostname,
title: state.siteName
}
})
if (resp?.data?.createSite?.operation?.succeeded) {
}).json()
if (resp?.ok) {
$q.notify({
type: 'positive',
message: t('admin.sites.createSuccess')
@ -141,7 +125,7 @@ async function create () {
await adminStore.fetchSites()
onDialogOK()
} else {
throw new Error(resp?.data?.createSite?.operation?.message || 'An unexpected error occured.')
throw new Error(t(`admin.sites.${resp?.error}`, resp?.message || 'An unexpected error occured.'))
}
} catch (err) {
$q.notify({

@ -77,22 +77,8 @@ const state = reactive({
async function confirm () {
state.isLoading = true
try {
const resp = await APOLLO_CLIENT.mutate({
mutation: `
mutation deleteSite ($id: UUID!) {
deleteSite(id: $id) {
operation {
succeeded
message
}
}
}
`,
variables: {
id: props.site.id
}
})
if (resp?.data?.deleteSite?.operation?.succeeded) {
const resp = await API_CLIENT.delete(`sites/${props.site.id}`)
if (resp?.ok) {
$q.notify({
type: 'positive',
message: t('admin.sites.deleteSuccess')
@ -102,7 +88,7 @@ async function confirm () {
})
onDialogOK()
} else {
throw new Error(resp?.data?.deleteSite?.operation?.message || 'An unexpected error occured.')
throw new Error(t(`admin.sites.${resp?.error}`, resp?.message || 'An unexpected error occured.'))
}
} catch (err) {
$q.notify({

Loading…
Cancel
Save