mirror of https://github.com/requarks/wiki
parent
68e6a2787a
commit
6f91c2e052
@ -0,0 +1,17 @@
|
||||
/**
|
||||
* API Routes
|
||||
*/
|
||||
async function routes(app, options) {
|
||||
app.register(import('./authentication.js'))
|
||||
app.register(import('./locales.js'), { prefix: '/locales' })
|
||||
app.register(import('./pages.js'))
|
||||
app.register(import('./sites.js'), { prefix: '/sites' })
|
||||
app.register(import('./system.js'), { prefix: '/system' })
|
||||
app.register(import('./users.js'), { prefix: '/users' })
|
||||
|
||||
app.get('/', async (req, reply) => {
|
||||
return { ok: true }
|
||||
})
|
||||
}
|
||||
|
||||
export default routes
|
||||
@ -1,17 +0,0 @@
|
||||
/**
|
||||
* API Routes
|
||||
*/
|
||||
async function routes (app, options) {
|
||||
app.register(import('./authentication.mjs'))
|
||||
app.register(import('./locales.mjs'), { prefix: '/locales' })
|
||||
app.register(import('./pages.mjs'))
|
||||
app.register(import('./sites.mjs'), { prefix: '/sites' })
|
||||
app.register(import('./system.mjs'), { prefix: '/system' })
|
||||
app.register(import('./users.mjs'), { prefix: '/users' })
|
||||
|
||||
app.get('/', async (req, reply) => {
|
||||
return { ok: true }
|
||||
})
|
||||
}
|
||||
|
||||
export default routes
|
||||
@ -1,119 +0,0 @@
|
||||
import { padEnd } from 'lodash-es'
|
||||
import eventemitter2 from 'eventemitter2'
|
||||
import NodeCache from 'node-cache'
|
||||
|
||||
import asar from './asar.mjs'
|
||||
import db from './db.mjs'
|
||||
import extensions from './extensions.mjs'
|
||||
import scheduler from './scheduler.mjs'
|
||||
import servers from './servers.mjs'
|
||||
import metrics from './metrics.mjs'
|
||||
|
||||
let isShuttingDown = false
|
||||
|
||||
export default {
|
||||
async init () {
|
||||
WIKI.logger.info('=======================================')
|
||||
WIKI.logger.info(`= Wiki.js ${padEnd(WIKI.version + ' ', 29, '=')}`)
|
||||
WIKI.logger.info('=======================================')
|
||||
WIKI.logger.info('Initializing...')
|
||||
WIKI.logger.info(`Running node.js ${process.version}`)
|
||||
|
||||
WIKI.db = await db.init()
|
||||
|
||||
try {
|
||||
await WIKI.configSvc.loadFromDb()
|
||||
await WIKI.configSvc.applyFlags()
|
||||
} catch (err) {
|
||||
WIKI.logger.error('Database Initialization Error: ' + err.message)
|
||||
if (WIKI.IS_DEBUG) {
|
||||
WIKI.logger.error(err)
|
||||
}
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
this.bootWeb()
|
||||
},
|
||||
/**
|
||||
* Pre-Web Boot Sequence
|
||||
*/
|
||||
async preBootWeb () {
|
||||
try {
|
||||
WIKI.cache = new NodeCache({ checkperiod: 0 })
|
||||
WIKI.scheduler = await scheduler.init()
|
||||
WIKI.servers = servers
|
||||
WIKI.events = {
|
||||
inbound: new eventemitter2.EventEmitter2(),
|
||||
outbound: new eventemitter2.EventEmitter2()
|
||||
}
|
||||
WIKI.extensions = extensions
|
||||
WIKI.asar = asar
|
||||
WIKI.metrics = await metrics.init()
|
||||
} catch (err) {
|
||||
WIKI.logger.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Boot Web Process
|
||||
*/
|
||||
async bootWeb () {
|
||||
try {
|
||||
await this.preBootWeb()
|
||||
await (await import('../web.mjs')).init()
|
||||
this.postBootWeb()
|
||||
} catch (err) {
|
||||
WIKI.logger.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Post-Web Boot Sequence
|
||||
*/
|
||||
async postBootWeb () {
|
||||
await WIKI.db.locales.refreshFromDisk()
|
||||
|
||||
await WIKI.db.analytics.refreshProvidersFromDisk()
|
||||
await WIKI.db.authentication.refreshStrategiesFromDisk()
|
||||
await WIKI.db.commentProviders.refreshProvidersFromDisk()
|
||||
await WIKI.db.renderers.refreshRenderersFromDisk()
|
||||
await WIKI.db.storage.refreshTargetsFromDisk()
|
||||
|
||||
await WIKI.extensions.init()
|
||||
|
||||
await WIKI.auth.activateStrategies()
|
||||
await WIKI.db.commentProviders.initProvider()
|
||||
await WIKI.db.locales.reloadCache()
|
||||
await WIKI.db.sites.reloadCache()
|
||||
await WIKI.db.storage.initTargets()
|
||||
|
||||
await WIKI.db.subscribeToNotifications()
|
||||
await WIKI.scheduler.start()
|
||||
},
|
||||
/**
|
||||
* Graceful shutdown
|
||||
*/
|
||||
async shutdown (devMode = false) {
|
||||
if (isShuttingDown) { return }
|
||||
isShuttingDown = true
|
||||
if (WIKI.servers) {
|
||||
await WIKI.servers.stopServers()
|
||||
}
|
||||
if (WIKI.scheduler) {
|
||||
await WIKI.scheduler.stop()
|
||||
}
|
||||
if (WIKI.models) {
|
||||
await WIKI.db.unsubscribeToNotifications()
|
||||
if (WIKI.db.knex) {
|
||||
await WIKI.db.knex.destroy()
|
||||
}
|
||||
}
|
||||
if (WIKI.asar) {
|
||||
await WIKI.asar.unload()
|
||||
}
|
||||
if (!devMode) {
|
||||
WIKI.logger.info('Terminating process...')
|
||||
process.exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,378 @@
|
||||
import { DynamicThreadPool } from 'poolifier'
|
||||
import os from 'node:os'
|
||||
import fs from 'node:fs/promises'
|
||||
import path from 'node:path'
|
||||
import { CronExpressionParser } from 'cron-parser'
|
||||
import { DateTime } from 'luxon'
|
||||
import { v4 as uuid } from 'uuid'
|
||||
import { createDeferred } from '../helpers/common.js'
|
||||
import { camelCase } from 'es-toolkit/string'
|
||||
import { remove } from 'es-toolkit/array'
|
||||
import {
|
||||
jobs as jobsTable,
|
||||
jobLock as jobLockTable,
|
||||
jobSchedule as jobScheduleTable
|
||||
} from '../db/schema.js'
|
||||
import { eq, inArray, sql } from 'drizzle-orm'
|
||||
|
||||
export default {
|
||||
workerPool: null,
|
||||
pubsubClient: null,
|
||||
maxWorkers: 1,
|
||||
activeWorkers: 0,
|
||||
pollingRef: null,
|
||||
scheduledRef: null,
|
||||
tasks: null,
|
||||
completionPromises: [],
|
||||
async init() {
|
||||
this.maxWorkers =
|
||||
WIKI.config.scheduler.workers === 'auto'
|
||||
? os.cpus().length - 1
|
||||
: WIKI.config.scheduler.workers
|
||||
if (this.maxWorkers < 1) {
|
||||
this.maxWorkers = 1
|
||||
}
|
||||
WIKI.logger.info(`Initializing Worker Pool (Limit: ${this.maxWorkers})...`)
|
||||
this.workerPool = new DynamicThreadPool(
|
||||
1,
|
||||
this.maxWorkers,
|
||||
path.join(WIKI.SERVERPATH, 'worker.js'),
|
||||
{
|
||||
errorHandler: (err) => WIKI.logger.warn(err),
|
||||
exitHandler: () => WIKI.logger.debug('A worker has gone offline.'),
|
||||
onlineHandler: () => WIKI.logger.debug('New worker is online.')
|
||||
}
|
||||
)
|
||||
this.tasks = {}
|
||||
for (const f of await fs.readdir(path.join(WIKI.SERVERPATH, 'tasks/simple'))) {
|
||||
const taskName = camelCase(f.replace('.js', ''))
|
||||
this.tasks[taskName] = (await import(path.join(WIKI.SERVERPATH, 'tasks/simple', f))).task
|
||||
}
|
||||
return this
|
||||
},
|
||||
async start() {
|
||||
WIKI.logger.info('Starting Scheduler...')
|
||||
|
||||
const connectionAppName = `Wiki.js - ${WIKI.INSTANCE_ID}:SCHEDULER`
|
||||
this.pubsubClient = await WIKI.dbManager.pool.connect()
|
||||
await this.pubsubClient.query(`SET application_name = '${connectionAppName}'`)
|
||||
|
||||
// -> Outbound events handling
|
||||
|
||||
this.pubsubClient.query('LISTEN scheduler')
|
||||
this.pubsubClient.on('notification', async (msg) => {
|
||||
if (msg.channel !== 'scheduler') {
|
||||
return
|
||||
}
|
||||
try {
|
||||
const decoded = JSON.parse(msg.payload)
|
||||
switch (decoded?.event) {
|
||||
case 'newJob': {
|
||||
if (this.activeWorkers < this.maxWorkers) {
|
||||
this.activeWorkers++
|
||||
await this.processJob()
|
||||
this.activeWorkers--
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'jobCompleted': {
|
||||
const jobPromise = this.completionPromises.find((p) => p.id === decoded.id)
|
||||
if (jobPromise) {
|
||||
if (decoded.state === 'success') {
|
||||
jobPromise.resolve()
|
||||
} else {
|
||||
jobPromise.reject(new Error(decoded.errorMessage))
|
||||
}
|
||||
setTimeout(() => {
|
||||
remove(this.completionPromises, (p) => p.id === decoded.id)
|
||||
})
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
})
|
||||
|
||||
// -> Start scheduled jobs check
|
||||
this.scheduledRef = setInterval(async () => {
|
||||
this.addScheduled()
|
||||
}, WIKI.config.scheduler.scheduledCheck * 1000)
|
||||
|
||||
// -> Add scheduled jobs on init
|
||||
await this.addScheduled()
|
||||
|
||||
// -> Start job polling
|
||||
this.pollingRef = setInterval(async () => {
|
||||
this.processJob()
|
||||
}, WIKI.config.scheduler.pollingCheck * 1000)
|
||||
|
||||
WIKI.logger.info('Scheduler: [ STARTED ]')
|
||||
},
|
||||
/**
|
||||
* Add a job to the scheduler
|
||||
* @param {Object} opts - Job options
|
||||
* @param {string} opts.task - The task name to execute.
|
||||
* @param {Object} [opts.payload={}] - An optional data object to pass to the job.
|
||||
* @param {Date} [opts.waitUntil] - An optional datetime after which the task is allowed to run.
|
||||
* @param {Number} [opts.maxRetries] - The number of times this job can be restarted upon failure. Uses server defaults if not provided.
|
||||
* @param {Boolean} [opts.isScheduled=false] - Whether this is a scheduled job.
|
||||
* @param {Boolean} [opts.notify=true] - Whether to notify all instances that a new job is available.
|
||||
* @param {Boolean} [opts.promise=false] - Whether to return a promise property that resolves when the job completes.
|
||||
* @returns {Promise}
|
||||
*/
|
||||
async addJob({
|
||||
task,
|
||||
payload = {},
|
||||
waitUntil,
|
||||
maxRetries,
|
||||
isScheduled = false,
|
||||
notify = true,
|
||||
promise = false
|
||||
}) {
|
||||
try {
|
||||
const jobId = uuid()
|
||||
const jobDefer = createDeferred()
|
||||
if (promise) {
|
||||
this.completionPromises.push({
|
||||
id: jobId,
|
||||
added: DateTime.utc(),
|
||||
resolve: jobDefer.resolve,
|
||||
reject: jobDefer.reject
|
||||
})
|
||||
}
|
||||
await WIKI.db.insert(jobsTable).values({
|
||||
id: jobId,
|
||||
task,
|
||||
useWorker: !(typeof this.tasks[task] === 'function'),
|
||||
payload,
|
||||
maxRetries: maxRetries ?? WIKI.config.scheduler.maxRetries,
|
||||
isScheduled,
|
||||
waitUntil,
|
||||
createdBy: WIKI.INSTANCE_ID
|
||||
})
|
||||
if (notify) {
|
||||
this.pubsubClient.query(`SELECT pg_notify($1, $2)`, [
|
||||
'scheduler',
|
||||
JSON.stringify({
|
||||
source: WIKI.INSTANCE_ID,
|
||||
event: 'newJob',
|
||||
id: jobId
|
||||
})
|
||||
])
|
||||
}
|
||||
return {
|
||||
id: jobId,
|
||||
...(promise && { promise: jobDefer.promise })
|
||||
}
|
||||
} catch (err) {
|
||||
WIKI.logger.warn(`Failed to add job to scheduler: ${err.message}`)
|
||||
}
|
||||
},
|
||||
async processJob() {
|
||||
const jobIds = []
|
||||
try {
|
||||
const availableWorkers = this.maxWorkers - this.activeWorkers
|
||||
if (availableWorkers < 1) {
|
||||
WIKI.logger.debug('All workers are busy. Cannot process more jobs at the moment.')
|
||||
return
|
||||
}
|
||||
|
||||
await WIKI.db.transaction(async (trx) => {
|
||||
const jobs = await trx
|
||||
.delete(jobsTable)
|
||||
.where(
|
||||
inArray(
|
||||
jobsTable.id,
|
||||
sql`(SELECT id FROM jobs WHERE ("waitUntil" IS NULL OR "waitUntil" <= NOW()) ORDER BY id FOR UPDATE SKIP LOCKED LIMIT ${availableWorkers})`
|
||||
)
|
||||
)
|
||||
.returning()
|
||||
if (jobs && jobs.length > 0) {
|
||||
for (const job of jobs) {
|
||||
WIKI.logger.info(`Processing new job ${job.id}: ${job.task}...`)
|
||||
// -> Add to Job History
|
||||
await WIKI.db
|
||||
.knex('jobHistory')
|
||||
.insert({
|
||||
id: job.id,
|
||||
task: job.task,
|
||||
state: 'active',
|
||||
useWorker: job.useWorker,
|
||||
wasScheduled: job.isScheduled,
|
||||
payload: job.payload,
|
||||
attempt: job.retries + 1,
|
||||
maxRetries: job.maxRetries,
|
||||
executedBy: WIKI.INSTANCE_ID,
|
||||
createdAt: job.createdAt
|
||||
})
|
||||
.onConflict('id')
|
||||
.merge({
|
||||
executedBy: WIKI.INSTANCE_ID,
|
||||
startedAt: new Date()
|
||||
})
|
||||
jobIds.push(job.id)
|
||||
|
||||
// -> Start working on it
|
||||
try {
|
||||
if (job.useWorker) {
|
||||
await this.workerPool.execute({
|
||||
...job,
|
||||
INSTANCE_ID: `${WIKI.INSTANCE_ID}:WKR`
|
||||
})
|
||||
} else {
|
||||
await this.tasks[job.task](job.payload)
|
||||
}
|
||||
// -> Update job history (success)
|
||||
await WIKI.db
|
||||
.knex('jobHistory')
|
||||
.where({
|
||||
id: job.id
|
||||
})
|
||||
.update({
|
||||
state: 'completed',
|
||||
completedAt: new Date()
|
||||
})
|
||||
WIKI.logger.info(`Completed job ${job.id}: ${job.task}`)
|
||||
this.pubsubClient.query(`SELECT pg_notify($1, $2)`, [
|
||||
'scheduler',
|
||||
JSON.stringify({
|
||||
source: WIKI.INSTANCE_ID,
|
||||
event: 'jobCompleted',
|
||||
state: 'success',
|
||||
id: job.id
|
||||
})
|
||||
])
|
||||
} catch (err) {
|
||||
WIKI.logger.warn(`Failed to complete job ${job.id}: ${job.task} [ FAILED ]`)
|
||||
WIKI.logger.warn(err)
|
||||
// -> Update job history (fail)
|
||||
await WIKI.db
|
||||
.knex('jobHistory')
|
||||
.where({
|
||||
id: job.id
|
||||
})
|
||||
.update({
|
||||
attempt: job.retries + 1,
|
||||
state: 'failed',
|
||||
lastErrorMessage: err.message
|
||||
})
|
||||
this.pubsubClient.query(`SELECT pg_notify($1, $2)`, [
|
||||
'scheduler',
|
||||
JSON.stringify({
|
||||
source: WIKI.INSTANCE_ID,
|
||||
event: 'jobCompleted',
|
||||
state: 'failed',
|
||||
id: job.id,
|
||||
errorMessage: err.message
|
||||
})
|
||||
])
|
||||
// -> Reschedule for retry
|
||||
if (job.retries < job.maxRetries) {
|
||||
const backoffDelay = 2 ** job.retries * WIKI.config.scheduler.retryBackoff
|
||||
await trx('jobs').insert({
|
||||
...job,
|
||||
retries: job.retries + 1,
|
||||
waitUntil: DateTime.utc().plus({ seconds: backoffDelay }).toJSDate(),
|
||||
updatedAt: new Date()
|
||||
})
|
||||
WIKI.logger.warn(`Rescheduling new attempt for job ${job.id}: ${job.task}...`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
WIKI.logger.warn(err)
|
||||
if (jobIds && jobIds.length > 0) {
|
||||
WIKI.db.knex('jobHistory').whereIn('id', jobIds).update({
|
||||
state: 'interrupted',
|
||||
lastErrorMessage: err.message
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
async addScheduled() {
|
||||
try {
|
||||
await WIKI.db.transaction(async (trx) => {
|
||||
// -> Acquire lock
|
||||
const jobLock = await trx
|
||||
.update(jobLockTable)
|
||||
.set({
|
||||
lastCheckedBy: WIKI.INSTANCE_ID,
|
||||
lastCheckedAt: DateTime.utc().toISO()
|
||||
})
|
||||
.where(
|
||||
eq(
|
||||
jobLockTable.key,
|
||||
sql`(SELECT "jobLock"."key" FROM "jobLock" WHERE "jobLock"."key" = 'cron' AND "jobLock"."lastCheckedAt" <= ${DateTime.utc().minus({ minutes: 5 }).toISO()} FOR UPDATE SKIP LOCKED LIMIT 1)`
|
||||
)
|
||||
)
|
||||
|
||||
if (jobLock.rowCount > 0) {
|
||||
WIKI.logger.info('Scheduling future planned jobs...')
|
||||
const scheduledJobs = await WIKI.db.select().from(jobScheduleTable)
|
||||
if (scheduledJobs?.length > 0) {
|
||||
// -> Get existing scheduled jobs
|
||||
const existingJobs = await WIKI.db
|
||||
.select()
|
||||
.from(jobsTable)
|
||||
.where(eq(jobsTable.isScheduled, true))
|
||||
let totalAdded = 0
|
||||
for (const job of scheduledJobs) {
|
||||
// -> Get next planned iterations
|
||||
const plannedIterations = CronExpressionParser.parse(job.cron, {
|
||||
startDate: DateTime.utc().toISO(),
|
||||
endDate: DateTime.utc().plus({ days: 1, minutes: 5 }).toISO(),
|
||||
tz: 'UTC'
|
||||
})
|
||||
// -> Add a maximum of 10 future iterations for a single task
|
||||
let addedFutureJobs = 0
|
||||
while (true) {
|
||||
try {
|
||||
const next = plannedIterations.next()
|
||||
// -> Ensure this iteration isn't already scheduled
|
||||
if (
|
||||
!existingJobs.some(
|
||||
(j) => j.task === job.task && j.waitUntil.getTime() === next.value.getTime()
|
||||
)
|
||||
) {
|
||||
this.addJob({
|
||||
task: job.task,
|
||||
useWorker: !(typeof this.tasks[job.task] === 'function'),
|
||||
payload: job.payload,
|
||||
isScheduled: true,
|
||||
waitUntil: next.toISOString(),
|
||||
notify: false
|
||||
})
|
||||
addedFutureJobs++
|
||||
totalAdded++
|
||||
}
|
||||
// -> No more iterations for this period or max iterations count reached
|
||||
if (next.done || addedFutureJobs >= 10) {
|
||||
break
|
||||
}
|
||||
} catch {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (totalAdded > 0) {
|
||||
WIKI.logger.info(`Scheduled ${totalAdded} new future planned jobs: [ OK ]`)
|
||||
} else {
|
||||
WIKI.logger.info('No new future planned jobs to schedule: [ OK ]')
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
} catch (err) {
|
||||
WIKI.logger.warn(err)
|
||||
}
|
||||
},
|
||||
async stop() {
|
||||
WIKI.logger.info('Stopping Scheduler...')
|
||||
clearInterval(this.scheduledRef)
|
||||
clearInterval(this.pollingRef)
|
||||
await this.workerPool.destroy()
|
||||
WIKI.logger.info('Scheduler: [ STOPPED ]')
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,5 @@
|
||||
import { defineRelations } from 'drizzle-orm'
|
||||
import * as schema from './schema.mjs'
|
||||
import * as schema from './schema.js'
|
||||
|
||||
export const relations = defineRelations(schema, (r) => ({
|
||||
users: {
|
||||
@ -0,0 +1,429 @@
|
||||
import { sql } from 'drizzle-orm'
|
||||
import {
|
||||
bigint,
|
||||
boolean,
|
||||
bytea,
|
||||
customType,
|
||||
index,
|
||||
integer,
|
||||
jsonb,
|
||||
pgEnum,
|
||||
pgTable,
|
||||
primaryKey,
|
||||
text,
|
||||
timestamp,
|
||||
uniqueIndex,
|
||||
uuid,
|
||||
varchar
|
||||
} from 'drizzle-orm/pg-core'
|
||||
|
||||
// == CUSTOM TYPES =====================
|
||||
|
||||
const ltree = customType({
|
||||
dataType() {
|
||||
return 'ltree'
|
||||
}
|
||||
})
|
||||
const tsvector = customType({
|
||||
dataType() {
|
||||
return 'tsvector'
|
||||
}
|
||||
})
|
||||
|
||||
// == TABLES ===========================
|
||||
|
||||
// API KEYS ----------------------------
|
||||
export const apiKeys = pgTable('apiKeys', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
key: text().notNull(),
|
||||
expiration: timestamp().notNull().defaultNow(),
|
||||
isRevoked: boolean().notNull().default(false),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// ASSETS ------------------------------
|
||||
export const assetKindEnum = pgEnum('assetKind', ['document', 'image', 'other'])
|
||||
export const assets = pgTable(
|
||||
'assets',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
fileName: varchar({ length: 255 }).notNull(),
|
||||
fileExt: varchar({ length: 255 }).notNull(),
|
||||
isSystem: boolean().notNull().default(false),
|
||||
kind: assetKindEnum().notNull().default('other'),
|
||||
mimeType: varchar({ length: 255 }).notNull().default('application/octet-stream'),
|
||||
fileSize: bigint({ mode: 'number' }), // in bytes
|
||||
meta: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
data: bytea(),
|
||||
preview: bytea(),
|
||||
storageInfo: jsonb(),
|
||||
authorId: uuid()
|
||||
.notNull()
|
||||
.references(() => users.id),
|
||||
siteId: uuid()
|
||||
.notNull()
|
||||
.references(() => sites.id)
|
||||
},
|
||||
(table) => [index('assets_siteId_idx').on(table.siteId)]
|
||||
)
|
||||
|
||||
// AUTHENTICATION ----------------------
|
||||
export const authentication = pgTable('authentication', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
module: varchar({ length: 255 }).notNull(),
|
||||
isEnabled: boolean().notNull().default(false),
|
||||
displayName: varchar({ length: 255 }).notNull().default(''),
|
||||
config: jsonb().notNull().default({}),
|
||||
registration: boolean().notNull().default(false),
|
||||
allowedEmailRegex: varchar({ length: 255 }).notNull().default(''),
|
||||
autoEnrollGroups: uuid().array().default([])
|
||||
})
|
||||
|
||||
// BLOCKS ------------------------------
|
||||
export const blocks = pgTable(
|
||||
'blocks',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
block: varchar({ length: 255 }).notNull(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
description: varchar({ length: 255 }).notNull(),
|
||||
icon: varchar({ length: 255 }).notNull(),
|
||||
isEnabled: boolean().notNull().default(false),
|
||||
isCustom: boolean().notNull().default(false),
|
||||
config: jsonb().notNull().default({}),
|
||||
siteId: uuid()
|
||||
.notNull()
|
||||
.references(() => sites.id)
|
||||
},
|
||||
(table) => [index('blocks_siteId_idx').on(table.siteId)]
|
||||
)
|
||||
|
||||
// GROUPS ------------------------------
|
||||
export const groups = pgTable('groups', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
permissions: jsonb().notNull(),
|
||||
rules: jsonb().notNull(),
|
||||
redirectOnLogin: varchar({ length: 255 }).notNull().default(''),
|
||||
redirectOnFirstLogin: varchar({ length: 255 }).notNull().default(''),
|
||||
redirectOnLogout: varchar({ length: 255 }).notNull().default(''),
|
||||
isSystem: boolean().notNull().default(false),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// JOB HISTORY -------------------------
|
||||
export const jobHistoryStateEnum = pgEnum('jobHistoryState', [
|
||||
'active',
|
||||
'completed',
|
||||
'failed',
|
||||
'interrupted'
|
||||
])
|
||||
export const jobHistory = pgTable('jobHistory', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
task: varchar({ length: 255 }).notNull(),
|
||||
state: jobHistoryStateEnum().notNull(),
|
||||
useWorker: boolean().notNull().default(false),
|
||||
wasScheduled: boolean().notNull().default(false),
|
||||
payload: jsonb(),
|
||||
attempt: integer().notNull().default(1),
|
||||
maxRetries: integer().notNull().default(0),
|
||||
lastErrorMessage: text(),
|
||||
executedBy: varchar({ length: 255 }),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
completedAt: timestamp().notNull()
|
||||
})
|
||||
|
||||
// JOB SCHEDULE ------------------------
|
||||
export const jobSchedule = pgTable('jobSchedule', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
task: varchar({ length: 255 }).notNull(),
|
||||
cron: varchar({ length: 255 }).notNull(),
|
||||
type: varchar({ length: 255 }).notNull().default('system'),
|
||||
payload: jsonb(),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// JOB LOCK ----------------------------
|
||||
export const jobLock = pgTable('jobLock', {
|
||||
key: varchar({ length: 255 }).primaryKey(),
|
||||
lastCheckedBy: varchar({ length: 255 }),
|
||||
lastCheckedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// JOBS --------------------------------
|
||||
export const jobs = pgTable('jobs', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
task: varchar({ length: 255 }).notNull(),
|
||||
useWorker: boolean().notNull().default(false),
|
||||
payload: jsonb(),
|
||||
retries: integer().notNull().default(0),
|
||||
maxRetries: integer().notNull().default(0),
|
||||
waitUntil: timestamp(),
|
||||
isScheduled: boolean().notNull().default(false),
|
||||
createdBy: varchar({ length: 255 }),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// LOCALES -----------------------------
|
||||
export const locales = pgTable(
|
||||
'locales',
|
||||
{
|
||||
code: varchar({ length: 255 }).primaryKey(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
nativeName: varchar({ length: 255 }).notNull(),
|
||||
language: varchar({ length: 8 }).notNull(), // Unicode language subtag
|
||||
region: varchar({ length: 3 }).notNull(), // Unicode region subtag
|
||||
script: varchar({ length: 4 }).notNull(), // Unicode script subtag
|
||||
isRTL: boolean().notNull().default(false),
|
||||
strings: jsonb().notNull().default([]),
|
||||
completeness: integer().notNull().default(0),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
},
|
||||
(table) => [index('locales_language_idx').on(table.language)]
|
||||
)
|
||||
|
||||
// NAVIGATION --------------------------
|
||||
export const navigation = pgTable(
|
||||
'navigation',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
items: jsonb().notNull().default([]),
|
||||
siteId: uuid()
|
||||
.notNull()
|
||||
.references(() => sites.id)
|
||||
},
|
||||
(table) => [index('navigation_siteId_idx').on(table.siteId)]
|
||||
)
|
||||
|
||||
// PAGES ------------------------------
|
||||
export const pagePublishStateEnum = pgEnum('pagePublishState', ['draft', 'published', 'scheduled'])
|
||||
export const pages = pgTable(
|
||||
'pages',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
locale: ltree('locale').notNull(),
|
||||
path: varchar({ length: 255 }).notNull(),
|
||||
hash: varchar({ length: 255 }).notNull(),
|
||||
alias: varchar({ length: 255 }),
|
||||
title: varchar({ length: 255 }).notNull(),
|
||||
description: varchar({ length: 255 }),
|
||||
icon: varchar({ length: 255 }),
|
||||
publishState: pagePublishStateEnum('publishState').notNull().default('draft'),
|
||||
publishStartDate: timestamp(),
|
||||
publishEndDate: timestamp(),
|
||||
config: jsonb().notNull().default({}),
|
||||
relations: jsonb().notNull().default([]),
|
||||
content: text(),
|
||||
render: text(),
|
||||
searchContent: text(),
|
||||
ts: tsvector('ts'),
|
||||
tags: text()
|
||||
.array()
|
||||
.notNull()
|
||||
.default(sql`ARRAY[]::text[]`),
|
||||
toc: jsonb(),
|
||||
editor: varchar({ length: 255 }).notNull(),
|
||||
contentType: varchar({ length: 255 }).notNull(),
|
||||
isBrowsable: boolean().notNull().default(true),
|
||||
isSearchable: boolean().notNull().default(true),
|
||||
isSearchableComputed: boolean('isSearchableComputed').generatedAlwaysAs(
|
||||
() => sql`${pages.publishState} != 'draft' AND ${pages.isSearchable}`
|
||||
),
|
||||
password: varchar({ length: 255 }),
|
||||
ratingScore: integer().notNull().default(0),
|
||||
ratingCount: timestamp().notNull().defaultNow(),
|
||||
scripts: jsonb().notNull().default({}),
|
||||
historyData: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
authorId: uuid()
|
||||
.notNull()
|
||||
.references(() => users.id),
|
||||
creatorId: uuid()
|
||||
.notNull()
|
||||
.references(() => users.id),
|
||||
ownerId: uuid()
|
||||
.notNull()
|
||||
.references(() => users.id),
|
||||
siteId: uuid()
|
||||
.notNull()
|
||||
.references(() => sites.id)
|
||||
},
|
||||
(table) => [
|
||||
index('pages_authorId_idx').on(table.authorId),
|
||||
index('pages_creatorId_idx').on(table.creatorId),
|
||||
index('pages_ownerId_idx').on(table.ownerId),
|
||||
index('pages_siteId_idx').on(table.siteId),
|
||||
index('pages_ts_idx').using('gin', table.ts),
|
||||
index('pages_tags_idx').using('gin', table.tags),
|
||||
index('pages_isSearchableComputed_idx').on(table.isSearchableComputed)
|
||||
]
|
||||
)
|
||||
|
||||
// SETTINGS ----------------------------
|
||||
export const settings = pgTable('settings', {
|
||||
key: varchar({ length: 255 }).notNull().primaryKey(),
|
||||
value: jsonb().notNull().default({})
|
||||
})
|
||||
|
||||
// SESSIONS ----------------------------
|
||||
export const sessions = pgTable(
|
||||
'sessions',
|
||||
{
|
||||
id: varchar({ length: 255 }).primaryKey(),
|
||||
userId: uuid().references(() => users.id),
|
||||
data: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
},
|
||||
(table) => [index('sessions_userId_idx').on(table.userId)]
|
||||
)
|
||||
|
||||
// SITES -------------------------------
|
||||
export const sites = pgTable('sites', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
hostname: varchar({ length: 255 }).notNull().unique(),
|
||||
isEnabled: boolean().notNull().default(false),
|
||||
config: jsonb().notNull(),
|
||||
createdAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// TAGS --------------------------------
|
||||
export const tags = pgTable(
|
||||
'tags',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
tag: varchar({ length: 255 }).notNull(),
|
||||
usageCount: integer().notNull().default(0),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
siteId: uuid()
|
||||
.notNull()
|
||||
.references(() => sites.id)
|
||||
},
|
||||
(table) => [
|
||||
index('tags_siteId_idx').on(table.siteId),
|
||||
uniqueIndex('tags_composite_idx').on(table.siteId, table.tag)
|
||||
]
|
||||
)
|
||||
|
||||
// TREE --------------------------------
|
||||
export const treeTypeEnum = pgEnum('treeType', ['folder', 'page', 'asset'])
|
||||
export const treeNavigationModeEnum = pgEnum('treeNavigationMode', [
|
||||
'inherit',
|
||||
'override',
|
||||
'overrideExact',
|
||||
'hide',
|
||||
'hideExact'
|
||||
])
|
||||
export const tree = pgTable(
|
||||
'tree',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
folderPath: ltree('folderPath'),
|
||||
fileName: varchar({ length: 255 }).notNull(),
|
||||
hash: varchar({ length: 255 }).notNull(),
|
||||
type: treeTypeEnum('tree').notNull(),
|
||||
locale: ltree('locale').notNull(),
|
||||
title: varchar({ length: 255 }).notNull(),
|
||||
navigationMode: treeNavigationModeEnum('navigationMode').notNull().default('inherit'),
|
||||
navigationId: uuid(),
|
||||
tags: text()
|
||||
.array()
|
||||
.notNull()
|
||||
.default(sql`ARRAY[]::text[]`),
|
||||
meta: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
siteId: uuid()
|
||||
.notNull()
|
||||
.references(() => sites.id)
|
||||
},
|
||||
(table) => [
|
||||
index('tree_folderpath_idx').on(table.folderPath),
|
||||
index('tree_folderpath_gist_idx').using('gist', table.folderPath),
|
||||
index('tree_fileName_idx').on(table.fileName),
|
||||
index('tree_hash_idx').on(table.hash),
|
||||
index('tree_type_idx').on(table.type),
|
||||
index('tree_locale_idx').using('gist', table.locale),
|
||||
index('tree_navigationMode_idx').on(table.navigationMode),
|
||||
index('tree_navigationId_idx').on(table.navigationId),
|
||||
index('tree_tags_idx').using('gin', table.tags),
|
||||
index('tree_siteId_idx').on(table.siteId)
|
||||
]
|
||||
)
|
||||
|
||||
// USER AVATARS ------------------------
|
||||
export const userAvatars = pgTable('userAvatars', {
|
||||
id: uuid().primaryKey(),
|
||||
data: bytea().notNull()
|
||||
})
|
||||
|
||||
// USER KEYS ---------------------------
|
||||
export const userKeys = pgTable(
|
||||
'userKeys',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
kind: varchar({ length: 255 }).notNull(),
|
||||
token: varchar({ length: 255 }).notNull(),
|
||||
meta: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
validUntil: timestamp().notNull(),
|
||||
userId: uuid()
|
||||
.notNull()
|
||||
.references(() => users.id)
|
||||
},
|
||||
(table) => [index('userKeys_userId_idx').on(table.userId)]
|
||||
)
|
||||
|
||||
// USERS -------------------------------
|
||||
export const users = pgTable(
|
||||
'users',
|
||||
{
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
email: varchar({ length: 255 }).notNull().unique(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
auth: jsonb().notNull().default({}),
|
||||
meta: jsonb().notNull().default({}),
|
||||
passkeys: jsonb().notNull().default({}),
|
||||
prefs: jsonb().notNull().default({}),
|
||||
hasAvatar: boolean().notNull().default(false),
|
||||
isActive: boolean().notNull().default(false),
|
||||
isSystem: boolean().notNull().default(false),
|
||||
isVerified: boolean().notNull().default(false),
|
||||
lastLoginAt: timestamp(),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
},
|
||||
(table) => [index('users_lastLoginAt_idx').on(table.lastLoginAt)]
|
||||
)
|
||||
|
||||
// == RELATION TABLES ==================
|
||||
|
||||
// USER GROUPS -------------------------
|
||||
export const userGroups = pgTable(
|
||||
'userGroups',
|
||||
{
|
||||
userId: uuid()
|
||||
.notNull()
|
||||
.references(() => users.id, { onDelete: 'cascade' }),
|
||||
groupId: uuid()
|
||||
.notNull()
|
||||
.references(() => groups.id, { onDelete: 'cascade' })
|
||||
},
|
||||
(table) => [
|
||||
primaryKey({ columns: [table.userId, table.groupId] }),
|
||||
index('userGroups_userId_idx').on(table.userId),
|
||||
index('userGroups_groupId_idx').on(table.groupId),
|
||||
index('userGroups_composite_idx').on(table.userId, table.groupId)
|
||||
]
|
||||
)
|
||||
@ -1,338 +0,0 @@
|
||||
import { sql } from 'drizzle-orm'
|
||||
import { bigint, boolean, bytea, customType, index, integer, jsonb, pgEnum, pgTable, primaryKey, text, timestamp, uniqueIndex, uuid, varchar } from 'drizzle-orm/pg-core'
|
||||
|
||||
// == CUSTOM TYPES =====================
|
||||
|
||||
const ltree = customType({
|
||||
dataType () {
|
||||
return 'ltree'
|
||||
}
|
||||
})
|
||||
const tsvector = customType({
|
||||
dataType () {
|
||||
return 'tsvector'
|
||||
}
|
||||
})
|
||||
|
||||
// == TABLES ===========================
|
||||
|
||||
// API KEYS ----------------------------
|
||||
export const apiKeys = pgTable('apiKeys', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
key: text().notNull(),
|
||||
expiration: timestamp().notNull().defaultNow(),
|
||||
isRevoked: boolean().notNull().default(false),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// ASSETS ------------------------------
|
||||
export const assetKindEnum = pgEnum('assetKind', ['document', 'image', 'other'])
|
||||
export const assets = pgTable('assets', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
fileName: varchar({ length: 255 }).notNull(),
|
||||
fileExt: varchar({ length: 255 }).notNull(),
|
||||
isSystem: boolean().notNull().default(false),
|
||||
kind: assetKindEnum().notNull().default('other'),
|
||||
mimeType: varchar({ length: 255 }).notNull().default('application/octet-stream'),
|
||||
fileSize: bigint({ mode: 'number' }), // in bytes
|
||||
meta: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
data: bytea(),
|
||||
preview: bytea(),
|
||||
storageInfo: jsonb(),
|
||||
authorId: uuid().notNull().references(() => users.id),
|
||||
siteId: uuid().notNull().references(() => sites.id),
|
||||
}, (table) => [
|
||||
index('assets_siteId_idx').on(table.siteId)
|
||||
])
|
||||
|
||||
// AUTHENTICATION ----------------------
|
||||
export const authentication = pgTable('authentication', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
module: varchar({ length: 255 }).notNull(),
|
||||
isEnabled: boolean().notNull().default(false),
|
||||
displayName: varchar({ length: 255 }).notNull().default(''),
|
||||
config: jsonb().notNull().default({}),
|
||||
registration: boolean().notNull().default(false),
|
||||
allowedEmailRegex: varchar({ length: 255 }).notNull().default(''),
|
||||
autoEnrollGroups: uuid().array().default([])
|
||||
})
|
||||
|
||||
// BLOCKS ------------------------------
|
||||
export const blocks = pgTable('blocks', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
block: varchar({ length: 255 }).notNull(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
description: varchar({ length: 255 }).notNull(),
|
||||
icon: varchar({ length: 255 }).notNull(),
|
||||
isEnabled: boolean().notNull().default(false),
|
||||
isCustom: boolean().notNull().default(false),
|
||||
config: jsonb().notNull().default({}),
|
||||
siteId: uuid().notNull().references(() => sites.id),
|
||||
}, (table) => [
|
||||
index('blocks_siteId_idx').on(table.siteId)
|
||||
])
|
||||
|
||||
// GROUPS ------------------------------
|
||||
export const groups = pgTable('groups', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
permissions: jsonb().notNull(),
|
||||
rules: jsonb().notNull(),
|
||||
redirectOnLogin: varchar({ length: 255 }).notNull().default(''),
|
||||
redirectOnFirstLogin: varchar({ length: 255 }).notNull().default(''),
|
||||
redirectOnLogout: varchar({ length: 255 }).notNull().default(''),
|
||||
isSystem: boolean().notNull().default(false),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// JOB HISTORY -------------------------
|
||||
export const jobHistoryStateEnum = pgEnum('jobHistoryState', ['active', 'completed', 'failed', 'interrupted'])
|
||||
export const jobHistory = pgTable('jobHistory', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
task: varchar({ length: 255 }).notNull(),
|
||||
state: jobHistoryStateEnum().notNull(),
|
||||
useWorker: boolean().notNull().default(false),
|
||||
wasScheduled: boolean().notNull().default(false),
|
||||
payload: jsonb().notNull(),
|
||||
attempt: integer().notNull().default(1),
|
||||
maxRetries: integer().notNull().default(0),
|
||||
lastErrorMessage: text(),
|
||||
executedBy: varchar({ length: 255 }),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
completedAt: timestamp().notNull()
|
||||
})
|
||||
|
||||
// JOB SCHEDULE ------------------------
|
||||
export const jobSchedule = pgTable('jobSchedule', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
task: varchar({ length: 255 }).notNull(),
|
||||
cron: varchar({ length: 255 }).notNull(),
|
||||
type: varchar({ length: 255 }).notNull().default('system'),
|
||||
payload: jsonb().notNull(),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// JOB LOCK ----------------------------
|
||||
export const jobLock = pgTable('jobLock', {
|
||||
key: varchar({ length: 255 }).primaryKey(),
|
||||
lastCheckedBy: varchar({ length: 255 }),
|
||||
lastCheckedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// JOBS --------------------------------
|
||||
export const jobs = pgTable('jobs', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
task: varchar({ length: 255 }).notNull(),
|
||||
useWorker: boolean().notNull().default(false),
|
||||
payload: jsonb().notNull(),
|
||||
retries: integer().notNull().default(0),
|
||||
maxRetries: integer().notNull().default(0),
|
||||
waitUntil: timestamp(),
|
||||
isScheduled: boolean().notNull().default(false),
|
||||
createdBy: varchar({ length: 255 }),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// LOCALES -----------------------------
|
||||
export const locales = pgTable('locales', {
|
||||
code: varchar({ length: 255 }).primaryKey(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
nativeName: varchar({ length: 255 }).notNull(),
|
||||
language: varchar({ length: 8 }).notNull(), // Unicode language subtag
|
||||
region: varchar({ length: 3 }).notNull(), // Unicode region subtag
|
||||
script: varchar({ length: 4 }).notNull(), // Unicode script subtag
|
||||
isRTL: boolean().notNull().default(false),
|
||||
strings: jsonb().notNull().default([]),
|
||||
completeness: integer().notNull().default(0),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
}, (table) => [
|
||||
index('locales_language_idx').on(table.language)
|
||||
])
|
||||
|
||||
// NAVIGATION --------------------------
|
||||
export const navigation = pgTable('navigation', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
items: jsonb().notNull().default([]),
|
||||
siteId: uuid().notNull().references(() => sites.id),
|
||||
}, (table) => [
|
||||
index('navigation_siteId_idx').on(table.siteId)
|
||||
])
|
||||
|
||||
// PAGES ------------------------------
|
||||
export const pagePublishStateEnum = pgEnum('pagePublishState', ['draft', 'published', 'scheduled'])
|
||||
export const pages = pgTable('pages', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
locale: ltree('locale').notNull(),
|
||||
path: varchar({ length: 255 }).notNull(),
|
||||
hash: varchar({ length: 255 }).notNull(),
|
||||
alias: varchar({ length: 255 }),
|
||||
title: varchar({ length: 255 }).notNull(),
|
||||
description: varchar({ length: 255 }),
|
||||
icon: varchar({ length: 255 }),
|
||||
publishState: pagePublishStateEnum('publishState').notNull().default('draft'),
|
||||
publishStartDate: timestamp(),
|
||||
publishEndDate: timestamp(),
|
||||
config: jsonb().notNull().default({}),
|
||||
relations: jsonb().notNull().default([]),
|
||||
content: text(),
|
||||
render: text(),
|
||||
searchContent: text(),
|
||||
ts: tsvector('ts'),
|
||||
tags: text().array().notNull().default(sql`ARRAY[]::text[]`),
|
||||
toc: jsonb(),
|
||||
editor: varchar({ length: 255 }).notNull(),
|
||||
contentType: varchar({ length: 255 }).notNull(),
|
||||
isBrowsable: boolean().notNull().default(true),
|
||||
isSearchable: boolean().notNull().default(true),
|
||||
isSearchableComputed: boolean('isSearchableComputed').generatedAlwaysAs(() => sql`${pages.publishState} != 'draft' AND ${pages.isSearchable}`),
|
||||
password: varchar({ length: 255 }),
|
||||
ratingScore: integer().notNull().default(0),
|
||||
ratingCount: timestamp().notNull().defaultNow(),
|
||||
scripts: jsonb().notNull().default({}),
|
||||
historyData: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
authorId: uuid().notNull().references(() => users.id),
|
||||
creatorId: uuid().notNull().references(() => users.id),
|
||||
ownerId: uuid().notNull().references(() => users.id),
|
||||
siteId: uuid().notNull().references(() => sites.id),
|
||||
}, (table) => [
|
||||
index('pages_authorId_idx').on(table.authorId),
|
||||
index('pages_creatorId_idx').on(table.creatorId),
|
||||
index('pages_ownerId_idx').on(table.ownerId),
|
||||
index('pages_siteId_idx').on(table.siteId),
|
||||
index('pages_ts_idx').using('gin', table.ts),
|
||||
index('pages_tags_idx').using('gin', table.tags),
|
||||
index('pages_isSearchableComputed_idx').on(table.isSearchableComputed)
|
||||
])
|
||||
|
||||
// SETTINGS ----------------------------
|
||||
export const settings = pgTable('settings', {
|
||||
key: varchar({ length: 255 }).notNull().primaryKey(),
|
||||
value: jsonb().notNull().default({})
|
||||
})
|
||||
|
||||
// SESSIONS ----------------------------
|
||||
export const sessions = pgTable('sessions', {
|
||||
id: varchar({ length: 255 }).primaryKey(),
|
||||
userId: uuid().references(() => users.id),
|
||||
data: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
}, (table) => [
|
||||
index('sessions_userId_idx').on(table.userId)
|
||||
])
|
||||
|
||||
// SITES -------------------------------
|
||||
export const sites = pgTable('sites', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
hostname: varchar({ length: 255 }).notNull().unique(),
|
||||
isEnabled: boolean().notNull().default(false),
|
||||
config: jsonb().notNull(),
|
||||
createdAt: timestamp().notNull().defaultNow()
|
||||
})
|
||||
|
||||
// TAGS --------------------------------
|
||||
export const tags = pgTable('tags', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
tag: varchar({ length: 255 }).notNull(),
|
||||
usageCount: integer().notNull().default(0),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
siteId: uuid().notNull().references(() => sites.id)
|
||||
}, (table) => [
|
||||
index('tags_siteId_idx').on(table.siteId),
|
||||
uniqueIndex('tags_composite_idx').on(table.siteId, table.tag)
|
||||
])
|
||||
|
||||
// TREE --------------------------------
|
||||
export const treeTypeEnum = pgEnum('treeType', ['folder', 'page', 'asset'])
|
||||
export const treeNavigationModeEnum = pgEnum('treeNavigationMode', ['inherit', 'override', 'overrideExact', 'hide', 'hideExact'])
|
||||
export const tree = pgTable('tree', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
folderPath: ltree('folderPath'),
|
||||
fileName: varchar({ length: 255 }).notNull(),
|
||||
hash: varchar({ length: 255 }).notNull(),
|
||||
type: treeTypeEnum('tree').notNull(),
|
||||
locale: ltree('locale').notNull(),
|
||||
title: varchar({ length: 255 }).notNull(),
|
||||
navigationMode: treeNavigationModeEnum('navigationMode').notNull().default('inherit'),
|
||||
navigationId: uuid(),
|
||||
tags: text().array().notNull().default(sql`ARRAY[]::text[]`),
|
||||
meta: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow(),
|
||||
siteId: uuid().notNull().references(() => sites.id)
|
||||
}, (table) => [
|
||||
index('tree_folderpath_idx').on(table.folderPath),
|
||||
index('tree_folderpath_gist_idx').using('gist', table.folderPath),
|
||||
index('tree_fileName_idx').on(table.fileName),
|
||||
index('tree_hash_idx').on(table.hash),
|
||||
index('tree_type_idx').on(table.type),
|
||||
index('tree_locale_idx').using('gist', table.locale),
|
||||
index('tree_navigationMode_idx').on(table.navigationMode),
|
||||
index('tree_navigationId_idx').on(table.navigationId),
|
||||
index('tree_tags_idx').using('gin', table.tags),
|
||||
index('tree_siteId_idx').on(table.siteId)
|
||||
])
|
||||
|
||||
// USER AVATARS ------------------------
|
||||
export const userAvatars = pgTable('userAvatars', {
|
||||
id: uuid().primaryKey(),
|
||||
data: bytea().notNull()
|
||||
})
|
||||
|
||||
// USER KEYS ---------------------------
|
||||
export const userKeys = pgTable('userKeys', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
kind: varchar({ length: 255 }).notNull(),
|
||||
token: varchar({ length: 255 }).notNull(),
|
||||
meta: jsonb().notNull().default({}),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
validUntil: timestamp().notNull(),
|
||||
userId: uuid().notNull().references(() => users.id)
|
||||
}, (table) => [
|
||||
index('userKeys_userId_idx').on(table.userId)
|
||||
])
|
||||
|
||||
// USERS -------------------------------
|
||||
export const users = pgTable('users', {
|
||||
id: uuid().primaryKey().defaultRandom(),
|
||||
email: varchar({ length: 255 }).notNull().unique(),
|
||||
name: varchar({ length: 255 }).notNull(),
|
||||
auth: jsonb().notNull().default({}),
|
||||
meta: jsonb().notNull().default({}),
|
||||
passkeys: jsonb().notNull().default({}),
|
||||
prefs: jsonb().notNull().default({}),
|
||||
hasAvatar: boolean().notNull().default(false),
|
||||
isActive: boolean().notNull().default(false),
|
||||
isSystem: boolean().notNull().default(false),
|
||||
isVerified: boolean().notNull().default(false),
|
||||
lastLoginAt: timestamp(),
|
||||
createdAt: timestamp().notNull().defaultNow(),
|
||||
updatedAt: timestamp().notNull().defaultNow()
|
||||
}, (table) => [
|
||||
index('users_lastLoginAt_idx').on(table.lastLoginAt)
|
||||
])
|
||||
|
||||
// == RELATION TABLES ==================
|
||||
|
||||
// USER GROUPS -------------------------
|
||||
export const userGroups = pgTable('userGroups', {
|
||||
userId: uuid().notNull().references(() => users.id, { onDelete: 'cascade' }),
|
||||
groupId: uuid().notNull().references(() => groups.id, { onDelete: 'cascade' })
|
||||
}, (table) => [
|
||||
primaryKey({ columns: [table.userId, table.groupId] }),
|
||||
index('userGroups_userId_idx').on(table.userId),
|
||||
index('userGroups_groupId_idx').on(table.groupId),
|
||||
index('userGroups_composite_idx').on(table.userId, table.groupId)
|
||||
])
|
||||
@ -1,11 +1,11 @@
|
||||
import { v4 as uuid } from 'uuid'
|
||||
import { groups as groupsTable } from '../db/schema.mjs'
|
||||
import { groups as groupsTable } from '../db/schema.js'
|
||||
|
||||
/**
|
||||
* Groups model
|
||||
*/
|
||||
class Groups {
|
||||
async init (ids) {
|
||||
async init(ids) {
|
||||
WIKI.logger.info('Inserting default groups...')
|
||||
|
||||
await WIKI.db.insert(groupsTable).values([
|
||||
@ -0,0 +1,19 @@
|
||||
import { authentication } from './authentication.js'
|
||||
import { groups } from './groups.js'
|
||||
import { jobs } from './jobs.js'
|
||||
import { locales } from './locales.js'
|
||||
import { sessions } from './sessions.js'
|
||||
import { settings } from './settings.js'
|
||||
import { sites } from './sites.js'
|
||||
import { users } from './users.js'
|
||||
|
||||
export default {
|
||||
authentication,
|
||||
groups,
|
||||
jobs,
|
||||
locales,
|
||||
sessions,
|
||||
settings,
|
||||
sites,
|
||||
users
|
||||
}
|
||||
@ -1,17 +0,0 @@
|
||||
import { authentication } from './authentication.mjs'
|
||||
import { groups } from './groups.mjs'
|
||||
import { locales } from './locales.mjs'
|
||||
import { sessions } from './sessions.mjs'
|
||||
import { settings } from './settings.mjs'
|
||||
import { sites } from './sites.mjs'
|
||||
import { users } from './users.mjs'
|
||||
|
||||
export default {
|
||||
authentication,
|
||||
groups,
|
||||
locales,
|
||||
sessions,
|
||||
settings,
|
||||
sites,
|
||||
users
|
||||
}
|
||||
@ -0,0 +1,45 @@
|
||||
import { DateTime } from 'luxon'
|
||||
import { jobSchedule as jobScheduleTable, jobLock as jobLockTable } from '../db/schema.js'
|
||||
|
||||
/**
|
||||
* Jobs model
|
||||
*/
|
||||
class Jobs {
|
||||
/**
|
||||
* Initialize jobs table
|
||||
*/
|
||||
async init() {
|
||||
WIKI.logger.info('Inserting scheduled jobs...')
|
||||
|
||||
await WIKI.db.insert(jobScheduleTable).values([
|
||||
{
|
||||
task: 'checkVersion',
|
||||
cron: '0 0 * * *',
|
||||
type: 'system'
|
||||
},
|
||||
{
|
||||
task: 'cleanJobHistory',
|
||||
cron: '5 0 * * *',
|
||||
type: 'system'
|
||||
},
|
||||
// {
|
||||
// task: 'refreshAutocomplete',
|
||||
// cron: '0 */6 * * *',
|
||||
// type: 'system'
|
||||
// },
|
||||
{
|
||||
task: 'updateLocales',
|
||||
cron: '0 0 * * *',
|
||||
type: 'system'
|
||||
}
|
||||
])
|
||||
|
||||
await WIKI.db.insert(jobLockTable).values({
|
||||
key: 'cron',
|
||||
lastCheckedBy: 'init',
|
||||
lastCheckedAt: DateTime.utc().minus({ hours: 1 }).toISO()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export const jobs = new Jobs()
|
||||
@ -0,0 +1,129 @@
|
||||
import { stat, readFile } from 'node:fs/promises'
|
||||
import path from 'node:path'
|
||||
import { DateTime } from 'luxon'
|
||||
import { locales as localesTable } from '../db/schema.js'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
|
||||
/**
|
||||
* Locales model
|
||||
*/
|
||||
class Locales {
|
||||
async refreshFromDisk({ force = false } = {}) {
|
||||
try {
|
||||
const localesMeta = (await import('../locales/metadata.js')).default
|
||||
WIKI.logger.info(`Found ${localesMeta.languages.length} locales [ OK ]`)
|
||||
|
||||
const dbLocales = await WIKI.db
|
||||
.select({
|
||||
code: localesTable.code,
|
||||
updatedAt: localesTable.updatedAt
|
||||
})
|
||||
.from(localesTable)
|
||||
.orderBy(localesTable.code)
|
||||
|
||||
let localFilesSkipped = 0
|
||||
for (const lang of localesMeta.languages) {
|
||||
// -> Build filename
|
||||
const langFilenameParts = [lang.language]
|
||||
if (lang.region) {
|
||||
langFilenameParts.push(lang.region)
|
||||
}
|
||||
if (lang.script) {
|
||||
langFilenameParts.push(lang.script)
|
||||
}
|
||||
const langFilename = langFilenameParts.join('-')
|
||||
|
||||
// -> Get DB version
|
||||
const dbLang = dbLocales.find((l) => l.code === langFilename)
|
||||
|
||||
// -> Get File version
|
||||
const flPath = path.join(WIKI.SERVERPATH, `locales/${langFilename}.json`)
|
||||
try {
|
||||
const flStat = await stat(flPath)
|
||||
const flUpdatedAt = DateTime.fromJSDate(flStat.mtime)
|
||||
|
||||
// -> Load strings
|
||||
if (!dbLang || DateTime.fromJSDate(dbLang.updatedAt) < flUpdatedAt || force) {
|
||||
WIKI.logger.info(`Loading locale ${langFilename} into DB...`)
|
||||
const flStrings = JSON.parse(await readFile(flPath, 'utf8'))
|
||||
await WIKI.db
|
||||
.insert(localesTable)
|
||||
.values({
|
||||
code: langFilename,
|
||||
name: lang.name,
|
||||
nativeName: lang.localizedName,
|
||||
language: lang.language,
|
||||
region: lang.region,
|
||||
script: lang.script,
|
||||
isRTL: lang.isRtl,
|
||||
strings: flStrings
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: localesTable.code,
|
||||
set: { strings: flStrings, updatedAt: sql`now()` }
|
||||
})
|
||||
WIKI.logger.info(`Locale ${langFilename} loaded successfully. [ OK ]`)
|
||||
} else {
|
||||
WIKI.logger.info(
|
||||
`Locale ${langFilename} is newer in the DB. Skipping disk version. [ OK ]`
|
||||
)
|
||||
}
|
||||
} catch {
|
||||
localFilesSkipped++
|
||||
WIKI.logger.warn(
|
||||
`Locale ${langFilename} not found on disk. Missing strings file. [ SKIPPED ]`
|
||||
)
|
||||
}
|
||||
}
|
||||
if (localFilesSkipped > 0) {
|
||||
WIKI.logger.warn(
|
||||
`${localFilesSkipped} locales were defined in the metadata file but not found on disk. [ SKIPPED ]`
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
WIKI.logger.warn('Failed to load locales from disk: [ FAILED ]')
|
||||
WIKI.logger.warn(err)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async getLocales({ cache = true } = {}) {
|
||||
if (!WIKI.cache.has('locales') || !cache) {
|
||||
const locales = await WIKI.db
|
||||
.select({
|
||||
code: localesTable.code,
|
||||
isRTL: localesTable.isRTL,
|
||||
language: localesTable.language,
|
||||
name: localesTable.name,
|
||||
nativeName: localesTable.nativeName,
|
||||
createdAt: localesTable.createdAt,
|
||||
updatedAt: localesTable.updatedAt,
|
||||
completeness: localesTable.completeness
|
||||
})
|
||||
.from(localesTable)
|
||||
.orderBy(localesTable.code)
|
||||
WIKI.cache.set('locales', locales)
|
||||
for (const locale of locales) {
|
||||
WIKI.cache.set(`locale:${locale.code}`, locale)
|
||||
}
|
||||
}
|
||||
return WIKI.cache.get('locales')
|
||||
}
|
||||
|
||||
async getStrings(locale) {
|
||||
const results = await WIKI.db
|
||||
.select({ strings: localesTable.strings })
|
||||
.from(localesTable)
|
||||
.where(eq(localesTable.code, locale))
|
||||
.limit(1)
|
||||
return results.length === 1 ? results[0].strings : []
|
||||
}
|
||||
|
||||
async reloadCache() {
|
||||
WIKI.logger.info('Reloading locales cache...')
|
||||
const locales = await WIKI.models.locales.getLocales({ cache: false })
|
||||
WIKI.logger.info(`Loaded ${locales.length} locales into cache [ OK ]`)
|
||||
}
|
||||
}
|
||||
|
||||
export const locales = new Locales()
|
||||
@ -1,107 +0,0 @@
|
||||
import { stat, readFile } from 'node:fs/promises'
|
||||
import path from 'node:path'
|
||||
import { DateTime } from 'luxon'
|
||||
import { locales as localesTable } from '../db/schema.mjs'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
|
||||
/**
|
||||
* Locales model
|
||||
*/
|
||||
class Locales {
|
||||
async refreshFromDisk ({ force = false } = {}) {
|
||||
try {
|
||||
const localesMeta = (await import('../locales/metadata.mjs')).default
|
||||
WIKI.logger.info(`Found ${localesMeta.languages.length} locales [ OK ]`)
|
||||
|
||||
const dbLocales = await WIKI.db.select({
|
||||
code: localesTable.code,
|
||||
updatedAt: localesTable.updatedAt
|
||||
}).from(localesTable).orderBy(localesTable.code)
|
||||
|
||||
let localFilesSkipped = 0
|
||||
for (const lang of localesMeta.languages) {
|
||||
// -> Build filename
|
||||
const langFilenameParts = [lang.language]
|
||||
if (lang.region) {
|
||||
langFilenameParts.push(lang.region)
|
||||
}
|
||||
if (lang.script) {
|
||||
langFilenameParts.push(lang.script)
|
||||
}
|
||||
const langFilename = langFilenameParts.join('-')
|
||||
|
||||
// -> Get DB version
|
||||
const dbLang = dbLocales.find(l => l.code === langFilename)
|
||||
|
||||
// -> Get File version
|
||||
const flPath = path.join(WIKI.SERVERPATH, `locales/${langFilename}.json`)
|
||||
try {
|
||||
const flStat = await stat(flPath)
|
||||
const flUpdatedAt = DateTime.fromJSDate(flStat.mtime)
|
||||
|
||||
// -> Load strings
|
||||
if (!dbLang || DateTime.fromJSDate(dbLang.updatedAt) < flUpdatedAt || force) {
|
||||
WIKI.logger.info(`Loading locale ${langFilename} into DB...`)
|
||||
const flStrings = JSON.parse(await readFile(flPath, 'utf8'))
|
||||
await WIKI.db.insert(localesTable).values({
|
||||
code: langFilename,
|
||||
name: lang.name,
|
||||
nativeName: lang.localizedName,
|
||||
language: lang.language,
|
||||
region: lang.region,
|
||||
script: lang.script,
|
||||
isRTL: lang.isRtl,
|
||||
strings: flStrings
|
||||
}).onConflictDoUpdate({ target: localesTable.code, set: { strings: flStrings, updatedAt: sql`now()` } })
|
||||
WIKI.logger.info(`Locale ${langFilename} loaded successfully. [ OK ]`)
|
||||
} else {
|
||||
WIKI.logger.info(`Locale ${langFilename} is newer in the DB. Skipping disk version. [ OK ]`)
|
||||
}
|
||||
} catch (err) {
|
||||
localFilesSkipped++
|
||||
WIKI.logger.warn(`Locale ${langFilename} not found on disk. Missing strings file. [ SKIPPED ]`)
|
||||
}
|
||||
}
|
||||
if (localFilesSkipped > 0) {
|
||||
WIKI.logger.warn(`${localFilesSkipped} locales were defined in the metadata file but not found on disk. [ SKIPPED ]`)
|
||||
}
|
||||
} catch (err) {
|
||||
WIKI.logger.warn('Failed to load locales from disk: [ FAILED ]')
|
||||
WIKI.logger.warn(err)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async getLocales ({ cache = true } = {}) {
|
||||
if (!WIKI.cache.has('locales') || !cache) {
|
||||
const locales = await WIKI.db.select({
|
||||
code: localesTable.code,
|
||||
isRTL: localesTable.isRTL,
|
||||
language: localesTable.language,
|
||||
name: localesTable.name,
|
||||
nativeName: localesTable.nativeName,
|
||||
createdAt: localesTable.createdAt,
|
||||
updatedAt: localesTable.updatedAt,
|
||||
completeness: localesTable.completeness
|
||||
}).from(localesTable).orderBy(localesTable.code)
|
||||
WIKI.cache.set('locales', locales)
|
||||
for (const locale of locales) {
|
||||
WIKI.cache.set(`locale:${locale.code}`, locale)
|
||||
}
|
||||
}
|
||||
return WIKI.cache.get('locales')
|
||||
}
|
||||
|
||||
async getStrings (locale) {
|
||||
const results = await WIKI.db.select({ strings: localesTable.strings }).from(localesTable).where(eq(localesTable.code, locale)).limit(1)
|
||||
return results.length === 1 ? results[0].strings : []
|
||||
}
|
||||
|
||||
async reloadCache () {
|
||||
WIKI.logger.info('Reloading locales cache...')
|
||||
const locales = await WIKI.models.locales.getLocales({ cache: false })
|
||||
WIKI.logger.info(`Loaded ${locales.length} locales into cache [ OK ]`)
|
||||
}
|
||||
}
|
||||
|
||||
export const locales = new Locales()
|
||||
@ -1,5 +1,5 @@
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { sessions as sessionsTable } from '../db/schema.mjs'
|
||||
import { sessions as sessionsTable } from '../db/schema.js'
|
||||
|
||||
/**
|
||||
* Sessions model
|
||||
@ -1,5 +1,5 @@
|
||||
import bcrypt from 'bcryptjs'
|
||||
import { userGroups, users as usersTable, userKeys } from '../db/schema.mjs'
|
||||
import { userGroups, users as usersTable, userKeys } from '../db/schema.js'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { DateTime } from 'luxon'
|
||||
@ -0,0 +1,24 @@
|
||||
export async function task() {
|
||||
WIKI.logger.info('Checking for latest version...')
|
||||
|
||||
try {
|
||||
const resp = await fetch('https://api.github.com/repos/requarks/wiki/releases/latest').then(
|
||||
(r) => r.json()
|
||||
)
|
||||
const strictVersion =
|
||||
resp.tag_name.indexOf('v') === 0 ? resp.tag_name.substring(1) : resp.tag_name
|
||||
WIKI.logger.info(`Latest version is ${resp.tag_name}.`)
|
||||
WIKI.config.update = {
|
||||
lastCheckedAt: new Date().toISOString(),
|
||||
version: strictVersion,
|
||||
versionDate: resp.published_at
|
||||
}
|
||||
await WIKI.configSvc.saveToDb(['update'])
|
||||
|
||||
WIKI.logger.info('Checked for latest version: [ COMPLETED ]')
|
||||
} catch (err) {
|
||||
WIKI.logger.error('Checking for latest version: [ FAILED ]')
|
||||
WIKI.logger.error(err.message)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,27 @@
|
||||
import path from 'node:path'
|
||||
import fse from 'fs-extra'
|
||||
import { DateTime } from 'luxon'
|
||||
|
||||
export async function task() {
|
||||
WIKI.logger.info('Purging orphaned upload files...')
|
||||
|
||||
try {
|
||||
const uplTempPath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, 'uploads')
|
||||
await fse.ensureDir(uplTempPath)
|
||||
const ls = await fse.readdir(uplTempPath)
|
||||
const fifteenAgo = DateTime.now().minus({ minutes: 15 })
|
||||
|
||||
for (const f of ls) {
|
||||
const stat = await fse.stat(path.join(uplTempPath, f))
|
||||
if (stat.isFile() && stat.ctime < fifteenAgo) {
|
||||
await fse.unlink(path.join(uplTempPath, f))
|
||||
}
|
||||
}
|
||||
|
||||
WIKI.logger.info('Purging orphaned upload files: [ COMPLETED ]')
|
||||
} catch (err) {
|
||||
WIKI.logger.error('Purging orphaned upload files: [ FAILED ]')
|
||||
WIKI.logger.error(err.message)
|
||||
throw err
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,56 @@
|
||||
import { ThreadWorker } from 'poolifier'
|
||||
import { kebabCase } from 'es-toolkit/string'
|
||||
import path from 'node:path'
|
||||
import configSvc from './core/config.js'
|
||||
import logger from './core/logger.js'
|
||||
import db from './core/db.js'
|
||||
|
||||
// ----------------------------------------
|
||||
// Init Minimal Core
|
||||
// ----------------------------------------
|
||||
|
||||
const WIKI = {
|
||||
IS_DEBUG: process.env.NODE_ENV === 'development',
|
||||
ROOTPATH: process.cwd(),
|
||||
INSTANCE_ID: 'worker',
|
||||
SERVERPATH: path.join(process.cwd(), 'backend'),
|
||||
configSvc,
|
||||
ensureDb: async () => {
|
||||
if (WIKI.db) {
|
||||
return true
|
||||
}
|
||||
|
||||
WIKI.db = await db.init(true)
|
||||
|
||||
try {
|
||||
await WIKI.configSvc.loadFromDb()
|
||||
await WIKI.configSvc.applyFlags()
|
||||
} catch (err) {
|
||||
WIKI.logger.error('Database Initialization Error: ' + err.message)
|
||||
if (WIKI.IS_DEBUG) {
|
||||
WIKI.logger.error(err)
|
||||
}
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
global.WIKI = WIKI
|
||||
|
||||
await WIKI.configSvc.init(true)
|
||||
|
||||
// ----------------------------------------
|
||||
// Init Logger
|
||||
// ----------------------------------------
|
||||
|
||||
WIKI.logger = logger.init()
|
||||
|
||||
// ----------------------------------------
|
||||
// Execute Task
|
||||
// ----------------------------------------
|
||||
|
||||
export default new ThreadWorker(async (job) => {
|
||||
WIKI.INSTANCE_ID = job.INSTANCE_ID
|
||||
const task = (await import(`./tasks/workers/${kebabCase(job.task)}.js`)).task
|
||||
await task(job)
|
||||
return true
|
||||
})
|
||||
Loading…
Reference in new issue