diff --git a/.eslintrc.yml b/.eslintrc.yml index 56f0f735..593a73c7 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -13,3 +13,4 @@ globals: document: false navigator: false window: false + WIKI: true diff --git a/config.sample.yml b/config.sample.yml index 5819d46d..7e42a76f 100644 --- a/config.sample.yml +++ b/config.sample.yml @@ -124,9 +124,11 @@ dataPath: ./data bodyParserLimit: 5mb # --------------------------------------------------------------------- -# Workers Limit +# Scheduler # --------------------------------------------------------------------- -# Maximum number of workers that can run background cpu-intensive jobs. -# Leave to 'auto' to use CPU cores count as maximum. -workers: auto +scheduler: + # Maximum number of workers to run background cpu-intensive jobs. + # Make sure your PostgreSQL server can handle an extra connection + # for each worker! + workers: 3 diff --git a/package.json b/package.json index 8a909603..4b9be4e4 100644 --- a/package.json +++ b/package.json @@ -81,6 +81,7 @@ "filesize": "6.1.0", "fs-extra": "9.0.1", "getos": "3.2.1", + "graphile-worker": "0.13.0", "graphql": "16.3.0", "graphql-list-fields": "2.0.2", "graphql-rate-limit-directive": "2.0.2", @@ -148,7 +149,6 @@ "passport-twitch-strategy": "2.2.0", "pem-jwk": "2.0.0", "pg": "8.8.0", - "pg-boss": "8.0.0", "pg-hstore": "2.3.4", "pg-pubsub": "0.8.0", "pg-query-stream": "4.2.4", diff --git a/server/app/data.yml b/server/app/data.yml index d3a80374..33c33806 100644 --- a/server/app/data.yml +++ b/server/app/data.yml @@ -29,7 +29,8 @@ defaults: offline: false dataPath: ./data bodyParserLimit: 5mb - workers: auto + scheduler: + workers: 3 # DB defaults api: isEnabled: false @@ -78,27 +79,23 @@ defaults: maxHits: 100 maintainerEmail: security@requarks.io jobs: - purgeUploads: - onInit: true - schedule: '*/15 * * * *' - offlineSkip: false - repeat: true - syncGraphLocales: - onInit: true - schedule: '0 0 * * *' - offlineSkip: true - repeat: true - syncGraphUpdates: - onInit: true - schedule: '0 0 * * *' - offlineSkip: true - repeat: true - rebuildTree: - onInit: true - offlineSkip: false - repeat: false - immediate: true - worker: true + - task: background + pattern: '*/15 * * * *' + payload: + name: purgeUploads + data: {} + # - task: simple + # identifier: letest + # pattern: '* * * * *' + # payload: + # name: bob + # data: {} + # - task: simple + # identifier: letest2 + # pattern: '* * * * *' + # payload: + # name: bob + # data: {} groups: defaultPermissions: - 'read:pages' diff --git a/server/core/db.js b/server/core/db.js index b7946d96..1dadbbe8 100644 --- a/server/core/db.js +++ b/server/core/db.js @@ -18,6 +18,7 @@ module.exports = { Objection, knex: null, listener: null, + config: null, /** * Initialize DB */ @@ -28,7 +29,7 @@ module.exports = { // Fetch DB Config - const dbConfig = (!_.isEmpty(process.env.DATABASE_URL)) ? process.env.DATABASE_URL : { + this.config = (!_.isEmpty(process.env.DATABASE_URL)) ? process.env.DATABASE_URL : { host: WIKI.config.db.host.toString(), user: WIKI.config.db.user.toString(), password: WIKI.config.db.pass.toString(), @@ -40,7 +41,7 @@ module.exports = { let dbUseSSL = (WIKI.config.db.ssl === true || WIKI.config.db.ssl === 'true' || WIKI.config.db.ssl === 1 || WIKI.config.db.ssl === '1') let sslOptions = null - if (dbUseSSL && _.isPlainObject(dbConfig) && _.get(WIKI.config.db, 'sslOptions.auto', null) === false) { + if (dbUseSSL && _.isPlainObject(this.config) && _.get(WIKI.config.db, 'sslOptions.auto', null) === false) { sslOptions = WIKI.config.db.sslOptions sslOptions.rejectUnauthorized = sslOptions.rejectUnauthorized !== false if (sslOptions.ca && sslOptions.ca.indexOf('-----') !== 0) { @@ -73,8 +74,8 @@ module.exports = { } } - if (dbUseSSL && _.isPlainObject(dbConfig)) { - dbConfig.ssl = (sslOptions === true) ? { rejectUnauthorized: true } : sslOptions + if (dbUseSSL && _.isPlainObject(this.config)) { + this.config.ssl = (sslOptions === true) ? { rejectUnauthorized: true } : sslOptions } // Initialize Knex @@ -82,7 +83,7 @@ module.exports = { client: 'pg', useNullAsDefault: true, asyncStackTraces: WIKI.IS_DEBUG, - connection: dbConfig, + connection: this.config, searchPath: [WIKI.config.db.schemas.wiki], pool: { ...WIKI.config.pool, diff --git a/server/core/kernel.js b/server/core/kernel.js index 557dc140..fa8629ea 100644 --- a/server/core/kernel.js +++ b/server/core/kernel.js @@ -77,7 +77,6 @@ module.exports = { await WIKI.models.sites.reloadCache() await WIKI.models.storage.initTargets() await WIKI.scheduler.start() - await WIKI.scheduler.registerScheduledJobs() await WIKI.models.subscribeToNotifications() }, diff --git a/server/core/scheduler.js b/server/core/scheduler.js index f3c66ed1..6960ec0f 100644 --- a/server/core/scheduler.js +++ b/server/core/scheduler.js @@ -1,39 +1,16 @@ -const PgBoss = require('pg-boss') +const { run, parseCronItems, Logger } = require('graphile-worker') +const { Pool } = require('pg') const { DynamicThreadPool } = require('poolifier') +const { v4: uuid } = require('uuid') const os = require('node:os') - -/* global WIKI */ +const path = require('node:path') module.exports = { pool: null, - boss: null, + runner: null, maxWorkers: 1, async init () { - WIKI.logger.info('Initializing Scheduler...') - this.boss = new PgBoss({ - db: { - close: () => Promise.resolve('ok'), - executeSql: async (text, values) => { - try { - const resource = await WIKI.models.knex.client.pool.acquire().promise - const res = await resource.query(text, values) - WIKI.models.knex.client.pool.release(resource) - return res - } catch (err) { - WIKI.logger.error('Failed to acquire DB connection during scheduler query execution.') - WIKI.logger.error(err) - } - } - }, - // ...WIKI.models.knex.client.connectionSettings, - application_name: 'Wiki.js Scheduler', - schema: WIKI.config.db.schemas.scheduler, - uuid: 'v4', - archiveCompletedAfterSeconds: 120, - deleteAfterHours: 24 - }) - - this.maxWorkers = WIKI.config.workers === 'auto' ? os.cpus().length : WIKI.config.workers + this.maxWorkers = WIKI.config.scheduler.workers === 'auto' ? os.cpus().length : WIKI.config.scheduler.workers WIKI.logger.info(`Initializing Worker Pool (Limit: ${this.maxWorkers})...`) this.pool = new DynamicThreadPool(1, this.maxWorkers, './server/worker.js', { errorHandler: (err) => WIKI.logger.warn(err), @@ -44,40 +21,49 @@ module.exports = { }, async start () { WIKI.logger.info('Starting Scheduler...') - await this.boss.start() - this.boss.work('wk-*', { - teamSize: this.maxWorkers, - teamConcurrency: this.maxWorkers - }, async job => { - WIKI.logger.debug(`Starting job ${job.name}:${job.id}...`) - try { - const result = await this.pool.execute({ - id: job.id, - name: job.name, - data: job.data - }) - WIKI.logger.debug(`Completed job ${job.name}:${job.id}.`) - job.done(null, result) - } catch (err) { - WIKI.logger.warn(`Failed job ${job.name}:${job.id}): ${err.message}`) - job.done(err) + this.runner = await run({ + pgPool: new Pool({ + ...(typeof WIKI.models.config === 'string') ? { + connectionString: WIKI.models.config + } : WIKI.models.config, + max: this.maxWorkers + 2 + }), + schema: WIKI.config.db.schemas.scheduler, + concurrency: this.maxWorkers, + noHandleSignals: true, + logger: new Logger(scope => { + return (level, message, meta) => { + const prefix = (scope?.workerId) ? `[${scope.workerId}] ` : '' + WIKI.logger[level](`${prefix}${message}`, meta) + } + }), + parsedCronItems: parseCronItems(WIKI.data.jobs.map(j => ({ + ...j, + identifier: uuid() + }))), + taskList: { + simple: async (payload, helpers) => { + // TODO: Handle task + }, + background: async (payload, helpers) => { + try { + await this.pool.execute({ + id: helpers.job.id, + name: payload.name, + data: payload.data + }) + } catch (err) { + helpers.logger.warn(`Failed job: ${err.message}`) + throw err + } + } } - this.boss.complete(job.id) }) WIKI.logger.info('Scheduler: [ STARTED ]') }, async stop () { WIKI.logger.info('Stopping Scheduler...') - await this.boss.stop({ timeout: 5000 }) - await this.pool.destroy() + await this.runner.stop() WIKI.logger.info('Scheduler: [ STOPPED ]') - }, - async registerScheduledJobs () { - for (const [key, job] of Object.entries(WIKI.data.jobs)) { - if (job.schedule) { - WIKI.logger.debug(`Scheduling regular job ${key}...`) - await this.boss.schedule(`wk-${key}`, job.schedule) - } - } } } diff --git a/server/jobs/fetch-graph-locale.js b/server/jobs/fetch-graph-locale.js deleted file mode 100644 index 7a38cfe9..00000000 --- a/server/jobs/fetch-graph-locale.js +++ /dev/null @@ -1,66 +0,0 @@ -const _ = require('lodash') -const { createApolloFetch } = require('apollo-fetch') - -/* global WIKI */ - -module.exports = async (localeCode) => { - WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint...`) - - try { - const apollo = createApolloFetch({ - uri: WIKI.config.graphEndpoint - }) - - const respStrings = await apollo({ - query: `query ($code: String!) { - localization { - strings(code: $code) { - key - value - } - } - }`, - variables: { - code: localeCode - } - }) - const strings = _.get(respStrings, 'data.localization.strings', []) - let lcObj = {} - _.forEach(strings, row => { - if (_.includes(row.key, '::')) { return } - if (_.isEmpty(row.value)) { - row.value = row.key - } - _.set(lcObj, row.key.replace(':', '.'), row.value) - }) - - const locales = await WIKI.cache.get('locales') - if (locales) { - const currentLocale = _.find(locales, ['code', localeCode]) || {} - const existingLocale = await WIKI.models.locales.query().where('code', localeCode).first() - if (existingLocale) { - await WIKI.models.locales.query().patch({ - strings: lcObj - }).where('code', localeCode) - } else { - await WIKI.models.locales.query().insert({ - code: localeCode, - strings: lcObj, - isRTL: currentLocale.isRTL, - name: currentLocale.name, - nativeName: currentLocale.nativeName, - availability: currentLocale.availability - }) - } - } else { - throw new Error('Failed to fetch cached locales list! Restart server to resolve this issue.') - } - - await WIKI.lang.refreshNamespaces() - - WIKI.logger.info(`Fetching locale ${localeCode} from Graph endpoint: [ COMPLETED ]`) - } catch (err) { - WIKI.logger.error(`Fetching locale ${localeCode} from Graph endpoint: [ FAILED ]`) - WIKI.logger.error(err.message) - } -} diff --git a/server/jobs/purge-uploads.js b/server/jobs/purge-uploads.js deleted file mode 100644 index 7b370e86..00000000 --- a/server/jobs/purge-uploads.js +++ /dev/null @@ -1,32 +0,0 @@ -/* global WIKI */ - -const Promise = require('bluebird') -const fs = require('fs-extra') -const moment = require('moment') -const path = require('path') - -module.exports = async () => { - WIKI.logger.info('Purging orphaned upload files...') - - try { - const uplTempPath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, 'uploads') - await fs.ensureDir(uplTempPath) - const ls = await fs.readdir(uplTempPath) - const fifteenAgo = moment().subtract(15, 'minutes') - - await Promise.map(ls, (f) => { - return fs.stat(path.join(uplTempPath, f)).then((s) => { return { filename: f, stat: s } }) - }).filter((s) => { return s.stat.isFile() }).then((arrFiles) => { - return Promise.map(arrFiles, (f) => { - if (moment(f.stat.ctime).isBefore(fifteenAgo, 'minute')) { - return fs.unlink(path.join(uplTempPath, f.filename)) - } - }) - }) - - WIKI.logger.info('Purging orphaned upload files: [ COMPLETED ]') - } catch (err) { - WIKI.logger.error('Purging orphaned upload files: [ FAILED ]') - WIKI.logger.error(err.message) - } -} diff --git a/server/jobs/rebuild-tree.js b/server/jobs/rebuild-tree.js deleted file mode 100644 index c2fc3728..00000000 --- a/server/jobs/rebuild-tree.js +++ /dev/null @@ -1,80 +0,0 @@ -const _ = require('lodash') - -/* global WIKI */ - -module.exports = async (pageId) => { - WIKI.logger.info(`Rebuilding page tree...`) - - try { - WIKI.models = require('../core/db').init() - await WIKI.configSvc.loadFromDb() - await WIKI.configSvc.applyFlags() - - const pages = await WIKI.models.pages.query().select('id', 'path', 'localeCode', 'title', 'isPrivate', 'privateNS').orderBy(['localeCode', 'path']) - let tree = [] - let pik = 0 - - for (const page of pages) { - const pagePaths = page.path.split('/') - let currentPath = '' - let depth = 0 - let parentId = null - let ancestors = [] - for (const part of pagePaths) { - depth++ - const isFolder = (depth < pagePaths.length) - currentPath = currentPath ? `${currentPath}/${part}` : part - const found = _.find(tree, { - localeCode: page.localeCode, - path: currentPath - }) - if (!found) { - pik++ - tree.push({ - id: pik, - localeCode: page.localeCode, - path: currentPath, - depth: depth, - title: isFolder ? part : page.title, - isFolder: isFolder, - isPrivate: !isFolder && page.isPrivate, - privateNS: !isFolder ? page.privateNS : null, - parent: parentId, - pageId: isFolder ? null : page.id, - ancestors: JSON.stringify(ancestors) - }) - parentId = pik - } else if (isFolder && !found.isFolder) { - found.isFolder = true - parentId = found.id - } else { - parentId = found.id - } - ancestors.push(parentId) - } - } - - await WIKI.models.knex.table('pageTree').truncate() - if (tree.length > 0) { - // -> Save in chunks, because of per query max parameters (35k Postgres, 2k MSSQL, 1k for SQLite) - if ((WIKI.config.db.type !== 'sqlite')) { - for (const chunk of _.chunk(tree, 100)) { - await WIKI.models.knex.table('pageTree').insert(chunk) - } - } else { - for (const chunk of _.chunk(tree, 60)) { - await WIKI.models.knex.table('pageTree').insert(chunk) - } - } - } - - await WIKI.models.knex.destroy() - - WIKI.logger.info(`Rebuilding page tree: [ COMPLETED ]`) - } catch (err) { - WIKI.logger.error(`Rebuilding page tree: [ FAILED ]`) - WIKI.logger.error(err.message) - // exit process with error code - throw err - } -} diff --git a/server/jobs/render-page.js b/server/jobs/render-page.js deleted file mode 100644 index 3a88b375..00000000 --- a/server/jobs/render-page.js +++ /dev/null @@ -1,96 +0,0 @@ -const _ = require('lodash') -const cheerio = require('cheerio') - -/* global WIKI */ - -module.exports = async (pageId) => { - WIKI.logger.info(`Rendering page ID ${pageId}...`) - - try { - WIKI.models = require('../core/db').init() - await WIKI.configSvc.loadFromDb() - await WIKI.configSvc.applyFlags() - - const page = await WIKI.models.pages.getPageFromDb(pageId) - if (!page) { - throw new Error('Invalid Page Id') - } - - await WIKI.models.renderers.fetchDefinitions() - const pipeline = await WIKI.models.renderers.getRenderingPipeline(page.contentType) - - let output = page.content - - if (_.isEmpty(page.content)) { - await WIKI.models.knex.destroy() - WIKI.logger.warn(`Failed to render page ID ${pageId} because content was empty: [ FAILED ]`) - } - - for (let core of pipeline) { - const renderer = require(`../modules/rendering/${_.kebabCase(core.key)}/renderer.js`) - output = await renderer.render.call({ - config: core.config, - children: core.children, - page: page, - input: output - }) - } - - // Parse TOC - const $ = cheerio.load(output) - let isStrict = $('h1').length > 0 // <- Allows for documents using H2 as top level - let toc = { root: [] } - - $('h1,h2,h3,h4,h5,h6').each((idx, el) => { - const depth = _.toSafeInteger(el.name.substring(1)) - (isStrict ? 1 : 2) - let leafPathError = false - - const leafPath = _.reduce(_.times(depth), (curPath, curIdx) => { - if (_.has(toc, curPath)) { - const lastLeafIdx = _.get(toc, curPath).length - 1 - if (lastLeafIdx >= 0) { - curPath = `${curPath}[${lastLeafIdx}].children` - } else { - leafPathError = true - } - } - return curPath - }, 'root') - - if (leafPathError) { return } - - const leafSlug = $('.toc-anchor', el).first().attr('href') - $('.toc-anchor', el).remove() - - _.get(toc, leafPath).push({ - title: _.trim($(el).text()), - anchor: leafSlug, - children: [] - }) - }) - - // Save to DB - await WIKI.models.pages.query() - .patch({ - render: output, - toc: JSON.stringify(toc.root) - }) - .where('id', pageId) - - // Save to cache - await WIKI.models.pages.savePageToCache({ - ...page, - render: output, - toc: JSON.stringify(toc.root) - }) - - await WIKI.models.knex.destroy() - - WIKI.logger.info(`Rendering page ID ${pageId}: [ COMPLETED ]`) - } catch (err) { - WIKI.logger.error(`Rendering page ID ${pageId}: [ FAILED ]`) - WIKI.logger.error(err.message) - // exit process with error code - throw err - } -} diff --git a/server/jobs/sanitize-svg.js b/server/jobs/sanitize-svg.js deleted file mode 100644 index 117c20e4..00000000 --- a/server/jobs/sanitize-svg.js +++ /dev/null @@ -1,25 +0,0 @@ -const fs = require('fs-extra') -const { JSDOM } = require('jsdom') -const createDOMPurify = require('dompurify') - -/* global WIKI */ - -module.exports = async (svgPath) => { - WIKI.logger.info(`Sanitizing SVG file upload...`) - - try { - let svgContents = await fs.readFile(svgPath, 'utf8') - - const window = new JSDOM('').window - const DOMPurify = createDOMPurify(window) - - svgContents = DOMPurify.sanitize(svgContents) - - await fs.writeFile(svgPath, svgContents) - WIKI.logger.info(`Sanitized SVG file upload: [ COMPLETED ]`) - } catch (err) { - WIKI.logger.error(`Failed to sanitize SVG file upload: [ FAILED ]`) - WIKI.logger.error(err.message) - throw err - } -} diff --git a/server/jobs/sync-graph-locales.js b/server/jobs/sync-graph-locales.js deleted file mode 100644 index c534a0d9..00000000 --- a/server/jobs/sync-graph-locales.js +++ /dev/null @@ -1,84 +0,0 @@ -const _ = require('lodash') -const { createApolloFetch } = require('apollo-fetch') - -/* global WIKI */ - -module.exports = async () => { - WIKI.logger.info('Syncing locales with Graph endpoint...') - - try { - const apollo = createApolloFetch({ - uri: WIKI.config.graphEndpoint - }) - - // -> Fetch locales list - - const respList = await apollo({ - query: `{ - localization { - locales { - availability - code - name - nativeName - isRTL - createdAt - updatedAt - } - } - }` - }) - const locales = _.sortBy(_.get(respList, 'data.localization.locales', []), 'name').map(lc => ({...lc, isInstalled: (lc.code === 'en')})) - WIKI.cache.set('locales', locales) - - // -> Download locale strings - - if (WIKI.config.lang.autoUpdate) { - const activeLocales = WIKI.config.lang.namespacing ? WIKI.config.lang.namespaces : [WIKI.config.lang.code] - for (const currentLocale of activeLocales) { - const localeInfo = _.find(locales, ['code', currentLocale]) - - const respStrings = await apollo({ - query: `query ($code: String!) { - localization { - strings(code: $code) { - key - value - } - } - }`, - variables: { - code: currentLocale - } - }) - const strings = _.get(respStrings, 'data.localization.strings', []) - let lcObj = {} - _.forEach(strings, row => { - if (_.includes(row.key, '::')) { return } - if (_.isEmpty(row.value)) { - row.value = row.key - } - _.set(lcObj, row.key.replace(':', '.'), row.value) - }) - - await WIKI.models.locales.query().update({ - code: currentLocale, - strings: lcObj, - isRTL: localeInfo.isRTL, - name: localeInfo.name, - nativeName: localeInfo.nativeName, - availability: localeInfo.availability - }).where('code', currentLocale) - - WIKI.logger.info(`Pulled latest locale updates for ${localeInfo.name} from Graph endpoint: [ COMPLETED ]`) - } - } - - await WIKI.lang.refreshNamespaces() - - WIKI.logger.info('Syncing locales with Graph endpoint: [ COMPLETED ]') - } catch (err) { - WIKI.logger.error('Syncing locales with Graph endpoint: [ FAILED ]') - WIKI.logger.error(err.message) - } -} diff --git a/server/jobs/sync-graph-updates.js b/server/jobs/sync-graph-updates.js deleted file mode 100644 index 79b6cc1f..00000000 --- a/server/jobs/sync-graph-updates.js +++ /dev/null @@ -1,41 +0,0 @@ -const _ = require('lodash') -const { createApolloFetch } = require('apollo-fetch') - -/* global WIKI */ - -module.exports = async () => { - WIKI.logger.info(`Fetching latest updates from Graph endpoint...`) - - try { - const apollo = createApolloFetch({ - uri: WIKI.config.graphEndpoint - }) - - const resp = await apollo({ - query: `query ($channel: ReleaseChannel!, $version: String!) { - releases { - checkForUpdates(channel: $channel, version: $version) { - channel - version - releaseDate - minimumVersionRequired - minimumNodeRequired - } - } - }`, - variables: { - channel: WIKI.config.channel, - version: WIKI.version - } - }) - const info = _.get(resp, 'data.releases.checkForUpdates', false) - if (info) { - WIKI.system.updates = info - } - - WIKI.logger.info(`Fetching latest updates from Graph endpoint: [ COMPLETED ]`) - } catch (err) { - WIKI.logger.error(`Fetching latest updates from Graph endpoint: [ FAILED ]`) - WIKI.logger.error(err.message) - } -} diff --git a/server/jobs/sync-storage.js b/server/jobs/sync-storage.js deleted file mode 100644 index 9208f2c4..00000000 --- a/server/jobs/sync-storage.js +++ /dev/null @@ -1,35 +0,0 @@ -const _ = require('lodash') - -/* global WIKI */ - -module.exports = async (targetKey) => { - WIKI.logger.info(`Syncing with storage target ${targetKey}...`) - - try { - const target = _.find(WIKI.models.storage.targets, ['key', targetKey]) - if (target) { - await target.fn.sync() - WIKI.logger.info(`Syncing with storage target ${targetKey}: [ COMPLETED ]`) - - await WIKI.models.storage.query().patch({ - state: { - status: 'operational', - message: '', - lastAttempt: new Date().toISOString() - } - }).where('key', targetKey) - } else { - throw new Error('Invalid storage target. Unable to perform sync.') - } - } catch (err) { - WIKI.logger.error(`Syncing with storage target ${targetKey}: [ FAILED ]`) - WIKI.logger.error(err.message) - await WIKI.models.storage.query().patch({ - state: { - status: 'error', - message: err.message, - lastAttempt: new Date().toISOString() - } - }).where('key', targetKey) - } -} diff --git a/server/tasks/background/purge-uploads.js b/server/tasks/background/purge-uploads.js new file mode 100644 index 00000000..6b4ce9e2 --- /dev/null +++ b/server/tasks/background/purge-uploads.js @@ -0,0 +1,26 @@ +const path = require('node:path') +const fs = require('fs-extra') +const { DateTime } = require('luxon') + +module.exports = async (payload, helpers) => { + helpers.logger.info('Purging orphaned upload files...') + + try { + const uplTempPath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, 'uploads') + await fs.ensureDir(uplTempPath) + const ls = await fs.readdir(uplTempPath) + const fifteenAgo = DateTime.now().minus({ minutes: 15 }) + + for (const f of ls) { + const stat = fs.stat(path.join(uplTempPath, f)) + if ((await stat).isFile && stat.ctime < fifteenAgo) { + await fs.unlink(path.join(uplTempPath, f)) + } + } + + helpers.logger.info('Purging orphaned upload files: [ COMPLETED ]') + } catch (err) { + helpers.logger.error('Purging orphaned upload files: [ FAILED ]') + helpers.logger.error(err.message) + } +} diff --git a/server/worker.js b/server/worker.js index 18d4ed54..2309354d 100644 --- a/server/worker.js +++ b/server/worker.js @@ -1,5 +1,6 @@ const { ThreadWorker } = require('poolifier') module.exports = new ThreadWorker(async (job) => { + // TODO: Call external task file return { ok: true } }, { async: true }) diff --git a/yarn.lock b/yarn.lock index c34db04e..6a0f1a32 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2490,6 +2490,11 @@ dependencies: passport-oauth2 "^1.4.0" +"@graphile/logger@^0.2.0": + version "0.2.0" + resolved "https://registry.yarnpkg.com/@graphile/logger/-/logger-0.2.0.tgz#e484ec420162157c6e6f0cfb080fa29ef3a714ba" + integrity sha512-jjcWBokl9eb1gVJ85QmoaQ73CQ52xAaOCF29ukRbYNl6lY+ts0ErTaDYOBlejcbUs2OpaiqYLO5uDhyLFzWw4w== + "@graphql-tools/merge@8.2.7": version "8.2.7" resolved "https://registry.yarnpkg.com/@graphql-tools/merge/-/merge-8.2.7.tgz#add05bcc47df6b7390f31acbcadd986e160d58f9" @@ -3086,6 +3091,13 @@ resolved "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.12.tgz#6b2c510a7ad7039e98e7b8d3d6598f4359e5c080" integrity sha512-vt+kDhq/M2ayberEtJcIN/hxXy1Pk+59g2FV/ZQceeaTyCtCucjL2Q7FXlFjtWn4n15KCr1NE2lNNFhp0lEThw== +"@types/debug@^4.1.2": + version "4.1.7" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82" + integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg== + dependencies: + "@types/ms" "*" + "@types/debug@^4.1.5": version "4.1.5" resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.5.tgz#b14efa8852b7768d898906613c23f688713e02cd" @@ -3224,6 +3236,11 @@ resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.3.tgz#3dca0e3f33b200fc7d1139c0cd96c1268cadfd9d" integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== +"@types/ms@*": + version "0.7.31" + resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197" + integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== + "@types/node-fetch@^2.5.0": version "2.5.4" resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.4.tgz#5245b6d8841fc3a6208b82291119bc11c4e0ce44" @@ -3251,6 +3268,20 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== +"@types/parse-json@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" + integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + +"@types/pg@>=6 <9": + version "8.6.5" + resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.6.5.tgz#2dce9cb468a6a5e0f1296a59aea3ac75dd27b702" + integrity sha512-tOkGtAqRVkHa/PVZicq67zuujI4Oorfglsr2IbKofDwBSysnaqSx7W1mDqFqdkGE6Fbgh+PZAl0r/BWON/mozw== + dependencies: + "@types/node" "*" + pg-protocol "*" + pg-types "^2.2.0" + "@types/prettier@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.0.0.tgz#dc85454b953178cc6043df5208b9e949b54a3bc4" @@ -3812,14 +3843,6 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" -aggregate-error@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-4.0.1.tgz#25091fe1573b9e0be892aeda15c7c66a545f758e" - integrity sha512-0poP0T7el6Vq3rstR8Mn4V/IQrpBLO6POkUSrN7RhyY+GF/InCFShQzsQ39T25gkHhLgSLByyAz+Kjb+c2L98w== - dependencies: - clean-stack "^4.0.0" - indent-string "^5.0.0" - ajv-errors@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/ajv-errors/-/ajv-errors-1.0.1.tgz#f35986aceb91afadec4102fbd85014950cefa64d" @@ -5728,13 +5751,6 @@ clean-stack@^2.0.0: resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== -clean-stack@^4.0.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-4.2.0.tgz#c464e4cde4ac789f4e0735c5d75beb49d7b30b31" - integrity sha512-LYv6XPxoyODi36Dp976riBtSY27VmFo+MKqEU9QCCWyTrdEPDog+RWA7xQWHi6Vbp61j5c4cdzzX1NidnwtUWg== - dependencies: - escape-string-regexp "5.0.0" - clean-webpack-plugin@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/clean-webpack-plugin/-/clean-webpack-plugin-3.0.0.tgz#a99d8ec34c1c628a4541567aa7b457446460c62b" @@ -6347,6 +6363,17 @@ cosmiconfig@^5.0.0: js-yaml "^3.13.1" parse-json "^4.0.0" +cosmiconfig@^7.0.0: + version "7.0.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.0.1.tgz#714d756522cace867867ccb4474c5d01bbae5d6d" + integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + cpu-features@0.0.2: version "0.0.2" resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.2.tgz#9f636156f1155fd04bdbaa028bb3c2fbef3cea7a" @@ -6393,7 +6420,7 @@ create-hmac@^1.1.0, create-hmac@^1.1.2, create-hmac@^1.1.4: safe-buffer "^5.0.1" sha.js "^2.4.8" -cron-parser@4.6.0, cron-parser@^4.0.0: +cron-parser@4.6.0: version "4.6.0" resolved "https://registry.yarnpkg.com/cron-parser/-/cron-parser-4.6.0.tgz#404c3fdbff10ae80eef6b709555d577ef2fd2e0d" integrity sha512-guZNLMGUgg6z4+eGhmHGw7ft+v6OQeuHzd1gcLxCo9Yg/qoxmG3nindp2/uwGCLizEisf2H0ptqeVXeoCpP6FA== @@ -7500,11 +7527,6 @@ delaunator@4: resolved "https://registry.yarnpkg.com/delaunator/-/delaunator-4.0.1.tgz#3d779687f57919a7a418f8ab947d3bddb6846957" integrity sha512-WNPWi1IRKZfCt/qIDMfERkDp93+iZEmOxN2yy4Jg+Xhv8SLk2UTqqbe1sfiipn0and9QrE914/ihdx82Y/Giag== -delay@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/delay/-/delay-5.0.0.tgz#137045ef1b96e5071060dd5be60bf9334436bd1d" - integrity sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw== - delayed-stream@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" @@ -8185,11 +8207,6 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= -escape-string-regexp@5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" - integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw== - escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" @@ -9574,6 +9591,21 @@ graceful-fs@^4.2.4: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== +graphile-worker@0.13.0: + version "0.13.0" + resolved "https://registry.yarnpkg.com/graphile-worker/-/graphile-worker-0.13.0.tgz#8cf2ef75d1d58f2a634c4dcb7fe700c4fda9a77f" + integrity sha512-8Hl5XV6hkabZRhYzvbUfvjJfPFR5EPxYRVWlzQC2rqYHrjULTLBgBYZna5R9ukbnsbWSvn4vVrzOBIOgIC1jjw== + dependencies: + "@graphile/logger" "^0.2.0" + "@types/debug" "^4.1.2" + "@types/pg" ">=6 <9" + chokidar "^3.4.0" + cosmiconfig "^7.0.0" + json5 "^2.1.3" + pg ">=6.5 <9" + tslib "^2.1.0" + yargs "^16.2.0" + graphlib@^2.1.7, graphlib@^2.1.8: version "2.1.8" resolved "https://registry.yarnpkg.com/graphlib/-/graphlib-2.1.8.tgz#5761d414737870084c92ec7b5dbcb0592c9d35da" @@ -10242,11 +10274,6 @@ indent-string@^4.0.0: resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -indent-string@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-5.0.0.tgz#4fd2980fccaf8622d14c64d694f4cf33c81951a5" - integrity sha512-m6FAo/spmsW2Ab2fU35JTYwtOKa2yAwXSwgjSv1TJzh4Mh7mC3lzAOVLBprb72XsTrgkEIsl7YrFNAiDiRhIGg== - indexes-of@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" @@ -11497,6 +11524,11 @@ json5@^2.1.2: dependencies: minimist "^1.2.5" +json5@^2.1.3: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" @@ -11901,11 +11933,6 @@ lodash.clonedeep@^4.5.0: resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" integrity sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ== -lodash.debounce@^4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af" - integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== - lodash.includes@^4.3.0: version "4.3.0" resolved "https://registry.yarnpkg.com/lodash.includes/-/lodash.includes-4.3.0.tgz#60bb98a87cb923c68ca1e51325483314849f553f" @@ -13494,13 +13521,6 @@ p-map@^4.0.0: dependencies: aggregate-error "^3.0.0" -p-map@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-5.5.0.tgz#054ca8ca778dfa4cf3f8db6638ccb5b937266715" - integrity sha512-VFqfGDHlx87K66yZrNdI4YGtD70IRyd+zSvgks6mzHPRNkoKy+9EKP4SFC77/vTTQYmRmti7dvqC+m5jBrBAcg== - dependencies: - aggregate-error "^4.0.0" - p-try@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" @@ -13991,19 +14011,6 @@ persistgraphql@^0.3.11: "@types/graphql" "^0.9.0" "@types/isomorphic-fetch" "0.0.34" -pg-boss@8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/pg-boss/-/pg-boss-8.0.0.tgz#a32cd2c6f09c894b9e3ace027ecdecc06929edf6" - integrity sha512-WTchkRcTS9/AFuXhNzCQ9KlHgvi9VI3YpPk2EqGDhf2Od+/5Ug1e8b+NB+A81swe8LusAoQ6ka6n4pBkpkgrkw== - dependencies: - cron-parser "^4.0.0" - delay "^5.0.0" - lodash.debounce "^4.0.8" - p-map "^5.3.0" - pg "^8.5.1" - serialize-error "^11.0.0" - uuid "^8.3.2" - pg-connection-string@2.5.0, pg-connection-string@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" @@ -14036,7 +14043,7 @@ pg-pool@^3.5.2: resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.5.2.tgz#ed1bed1fb8d79f1c6fd5fb1c99e990fbf9ddf178" integrity sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w== -pg-protocol@^1.5.0: +pg-protocol@*, pg-protocol@^1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.5.0.tgz#b5dd452257314565e2d54ab3c132adc46565a6a0" integrity sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ== @@ -14063,7 +14070,7 @@ pg-tsquery@8.4.0: resolved "https://registry.yarnpkg.com/pg-tsquery/-/pg-tsquery-8.4.0.tgz#411293cce23ca1eeb8c29109af9fadf28f20a7d9" integrity sha512-m0jIxUVwLKSdmOAlqtlbo6K+EFIOZ/hyOMnoe8DmYFqEmOmvafIjGQFmcPP+z5MWd/p7ExxoKNIL31gmM+CwxQ== -pg-types@^2.1.0: +pg-types@^2.1.0, pg-types@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA== @@ -14074,7 +14081,7 @@ pg-types@^2.1.0: postgres-date "~1.0.4" postgres-interval "^1.1.0" -pg@8.8.0, pg@^8.5.1, pg@^8.7.3: +pg@8.8.0, "pg@>=6.5 <9", pg@^8.7.3: version "8.8.0" resolved "https://registry.yarnpkg.com/pg/-/pg-8.8.0.tgz#a77f41f9d9ede7009abfca54667c775a240da686" integrity sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw== @@ -16784,13 +16791,6 @@ send@0.18.0: range-parser "~1.2.1" statuses "2.0.1" -serialize-error@^11.0.0: - version "11.0.0" - resolved "https://registry.yarnpkg.com/serialize-error/-/serialize-error-11.0.0.tgz#0129f2b07b19b09bc7a5f2d850ffe9cd2d561582" - integrity sha512-YKrURWDqcT3VGX/s/pCwaWtpfJEEaEw5Y4gAnQDku92b/HjVj4r4UhA5QrMVMFotymK2wIWs5xthny5SMFu7Vw== - dependencies: - type-fest "^2.12.2" - serialize-javascript@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-2.1.2.tgz#ecec53b0e0317bdc95ef76ab7074b7384785fa61" @@ -18157,11 +18157,6 @@ type-fest@^0.8.1: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== -type-fest@^2.12.2: - version "2.19.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-2.19.0.tgz#88068015bb33036a598b952e55e9311a60fd3a9b" - integrity sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA== - type-is@^1.6.4, type-is@~1.6.17, type-is@~1.6.18: version "1.6.18" resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" @@ -18510,7 +18505,7 @@ uuid@8.0.0: resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.0.0.tgz#bc6ccf91b5ff0ac07bbcdbf1c7c4e150db4dbb6c" integrity sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw== -uuid@8.3.2, uuid@^8.3.2: +uuid@8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== @@ -19378,6 +19373,11 @@ y18n@^5.0.2: resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.5.tgz#8769ec08d03b1ea2df2500acef561743bbb9ab18" integrity sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg== +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + yallist@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52" @@ -19393,6 +19393,11 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== +yaml@^1.10.0: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + yargs-parser@^13.1.2: version "13.1.2" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" @@ -19492,6 +19497,19 @@ yargs@^15.4.1: y18n "^4.0.0" yargs-parser "^18.1.2" +yargs@^16.2.0: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" + yargs@^7.1.0: version "7.1.0" resolved "https://registry.yarnpkg.com/yargs/-/yargs-7.1.0.tgz#6ba318eb16961727f5d284f8ea003e8d6154d0c8"