feat: tree model improvements + site uploads config

pull/6078/head
Nicolas Giard 2 years ago
parent 714aa1eb0f
commit ce766ce3e9
No known key found for this signature in database
GPG Key ID: 85061B8F9D55B7C8

@ -43,6 +43,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => {
switch (req.params.resource) {
case 'logo': {
if (site.config.assets.logo) {
// TODO: Fetch from db if not in disk cache
res.sendFile(path.join(siteAssetsPath, `logo-${site.id}.${site.config.assets.logoExt}`))
} else {
res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg'))
@ -51,6 +52,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => {
}
case 'favicon': {
if (site.config.assets.favicon) {
// TODO: Fetch from db if not in disk cache
res.sendFile(path.join(siteAssetsPath, `favicon-${site.id}.${site.config.assets.faviconExt}`))
} else {
res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/logo-wikijs.svg'))
@ -59,6 +61,7 @@ router.get('/_site/:siteId?/:resource', async (req, res, next) => {
}
case 'loginbg': {
if (site.config.assets.loginBg) {
// TODO: Fetch from db if not in disk cache
res.sendFile(path.join(siteAssetsPath, `loginbg-${site.id}.jpg`))
} else {
res.sendFile(path.join(WIKI.ROOTPATH, 'assets/_assets/bg/login.jpg'))

@ -45,20 +45,17 @@ exports.up = async knex => {
.createTable('assets', table => {
table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
table.string('filename').notNullable()
table.string('hash').notNullable().index()
table.string('ext').notNullable()
table.enum('kind', ['binary', 'image']).notNullable().defaultTo('binary')
table.boolean('isSystem').notNullable().defaultTo(false)
table.enum('kind', ['document', 'image', 'other']).notNullable().defaultTo('other')
table.string('mime').notNullable().defaultTo('application/octet-stream')
table.integer('fileSize').unsigned().comment('In kilobytes')
table.jsonb('metadata')
table.jsonb('metadata').notNullable().defaultTo('{}')
table.timestamp('createdAt').notNullable().defaultTo(knex.fn.now())
table.timestamp('updatedAt').notNullable().defaultTo(knex.fn.now())
})
// ASSET DATA --------------------------
.createTable('assetData', table => {
table.uuid('id').notNullable().primary()
table.binary('data').notNullable()
table.binary('preview')
table.enum('previewState', ['none', 'pending', 'ready', 'failed']).notNullable().defaultTo('none')
})
// AUTHENTICATION ----------------------
.createTable('authentication', table => {
@ -284,6 +281,7 @@ exports.up = async knex => {
table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
table.specificType('folderPath', 'ltree').index().index('tree_folderpath_gist_index', { indexType: 'GIST' })
table.string('fileName').notNullable().index()
table.string('hash').notNullable().index()
table.enu('type', ['folder', 'page', 'asset']).notNullable().index()
table.string('localeCode', 5).notNullable().defaultTo('en').index()
table.string('title').notNullable()
@ -588,6 +586,10 @@ exports.up = async knex => {
showPrintBtn: true,
baseFont: 'roboto',
contentFont: 'roboto'
},
uploads: {
conflictBehavior: 'overwrite',
normalizeFilename: true
}
}
})

@ -1,71 +1,17 @@
const _ = require('lodash')
const sanitize = require('sanitize-filename')
const graphHelper = require('../../helpers/graph')
const assetHelper = require('../../helpers/asset')
const path = require('node:path')
const fs = require('fs-extra')
const { v4: uuid } = require('uuid')
module.exports = {
Query: {
async assets(obj, args, context) {
let cond = {
folderId: args.folderId === 0 ? null : args.folderId
}
if (args.kind !== 'ALL') {
cond.kind = args.kind.toLowerCase()
}
const folderHierarchy = await WIKI.db.assetFolders.getHierarchy(args.folderId)
const folderPath = folderHierarchy.map(h => h.slug).join('/')
const results = await WIKI.db.assets.query().where(cond)
return _.filter(results, r => {
const path = folderPath ? `${folderPath}/${r.filename}` : r.filename
return WIKI.auth.checkAccess(context.req.user, ['read:assets'], { path })
}).map(a => ({
...a,
kind: a.kind.toUpperCase()
}))
},
async assetsFolders(obj, args, context) {
const results = await WIKI.db.assetFolders.query().where({
parentId: args.parentFolderId === 0 ? null : args.parentFolderId
})
const parentHierarchy = await WIKI.db.assetFolders.getHierarchy(args.parentFolderId)
const parentPath = parentHierarchy.map(h => h.slug).join('/')
return _.filter(results, r => {
const path = parentPath ? `${parentPath}/${r.slug}` : r.slug
return WIKI.auth.checkAccess(context.req.user, ['read:assets'], { path })
})
async assetById(obj, args, context) {
return null
}
},
Mutation: {
/**
* Create New Asset Folder
*/
async createAssetsFolder(obj, args, context) {
try {
const folderSlug = sanitize(args.slug).toLowerCase()
const parentFolderId = args.parentFolderId === 0 ? null : args.parentFolderId
const result = await WIKI.db.assetFolders.query().where({
parentId: parentFolderId,
slug: folderSlug
}).first()
if (!result) {
await WIKI.db.assetFolders.query().insert({
slug: folderSlug,
name: folderSlug,
parentId: parentFolderId
})
return {
responseResult: graphHelper.generateSuccess('Asset Folder has been created successfully.')
}
} else {
throw new WIKI.Error.AssetFolderExists()
}
} catch (err) {
return graphHelper.generateError(err)
}
},
/**
* Rename an Asset
*/
@ -113,7 +59,7 @@ module.exports = {
}
// Update filename + hash
const fileHash = assetHelper.generateHash(assetTargetPath)
const fileHash = '' // assetHelper.generateHash(assetTargetPath)
await WIKI.db.assets.query().patch({
filename: filename,
hash: fileHash
@ -189,41 +135,62 @@ module.exports = {
*/
async uploadAssets(obj, args, context) {
try {
// -> Get Folder
const folder = await WIKI.db.tree.query().findById(args.folderId)
if (!folder) {
throw new Error('ERR_INVALID_FOLDER_ID')
}
// -> Get Site
const site = await WIKI.db.sites.query().findById(folder.siteId)
if (!site) {
throw new Error('ERR_INVALID_SITE_ID')
}
const results = await Promise.allSettled(args.files.map(async fl => {
const { filename, mimetype, createReadStream } = await fl
WIKI.logger.debug(`Processing asset upload ${filename} of type ${mimetype}...`)
// Format filename
const formattedFilename = ''
if (!WIKI.extensions.ext.sharp.isInstalled) {
throw new Error('This feature requires the Sharp extension but it is not installed.')
}
if (!['.png', '.jpg', 'webp', '.gif'].some(s => filename.endsWith(s))) {
throw new Error('Invalid File Extension. Must be svg, png, jpg, webp or gif.')
}
const destFormat = mimetype.startsWith('image/svg') ? 'svg' : 'png'
const destFolder = path.resolve(
process.cwd(),
WIKI.config.dataPath,
`assets`
)
const destPath = path.join(destFolder, `logo-${args.id}.${destFormat}`)
await fs.ensureDir(destFolder)
// -> Resize
await WIKI.extensions.ext.sharp.resize({
format: destFormat,
inputStream: createReadStream(),
outputPath: destPath,
height: 72
// Save asset to DB
const asset = await WIKI.db.knex('assets').insert({
}).returning('id')
// Add to tree
await WIKI.db.knex('tree').insert({
id: asset.id,
folderPath: folder.folderPath ? `${folder.folderPath}.${folder.fileName}` : folder.fileName,
fileName: formattedFilename,
type: 'asset',
localeCode: ''
})
// -> Save logo meta to DB
const site = await WIKI.db.sites.query().findById(args.id)
if (!site.config.assets.logo) {
site.config.assets.logo = uuid()
// Create thumbnail
if (!['.png', '.jpg', 'webp', '.gif'].some(s => filename.endsWith(s))) {
if (!WIKI.extensions.ext.sharp.isInstalled) {
WIKI.logger.warn('Cannot generate asset thumbnail because the Sharp extension is not installed.')
} else {
const destFormat = mimetype.startsWith('image/svg') ? 'svg' : 'png'
const destFolder = path.resolve(
process.cwd(),
WIKI.config.dataPath,
`assets`
)
const destPath = path.join(destFolder, `asset-${site.id}-${hash}.${destFormat}`)
await fs.ensureDir(destFolder)
// -> Resize
await WIKI.extensions.ext.sharp.resize({
format: destFormat,
inputStream: createReadStream(),
outputPath: destPath,
height: 72
})
}
}
site.config.assets.logoExt = destFormat
await WIKI.db.sites.query().findById(args.id).patch({ config: site.config })
await WIKI.db.sites.reloadCache()
// -> Save image data to DB
const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({
@ -254,9 +221,4 @@ module.exports = {
}
}
}
// File: {
// folder(fl) {
// return fl.getFolder()
// }
// }
}

@ -147,7 +147,7 @@ module.exports = {
/**
* UPLOAD LOGO
*/
async uploadSiteLogo (obj, args) {
async uploadSiteLogo (obj, args, context) {
try {
const { filename, mimetype, createReadStream } = await args.image
WIKI.logger.info(`Processing site logo ${filename} of type ${mimetype}...`)
@ -182,9 +182,18 @@ module.exports = {
await WIKI.db.sites.reloadCache()
// -> Save image data to DB
const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({
await WIKI.db.knex('assets').insert({
id: site.config.assets.logo,
data: imgBuffer
filename: `_logo.${destFormat}`,
hash: '_logo',
ext: `.${destFormat}`,
isSystem: true,
kind: 'image',
mime: (destFormat === 'svg') ? 'image/svg' : 'image/png',
fileSize: Math.ceil(imgBuffer.byteLength / 1024),
data: imgBuffer,
authorId: context.req.user.id,
siteId: site.id
}).onConflict('id').merge()
WIKI.logger.info('New site logo processed successfully.')
return {
@ -198,7 +207,7 @@ module.exports = {
/**
* UPLOAD FAVICON
*/
async uploadSiteFavicon (obj, args) {
async uploadSiteFavicon (obj, args, context) {
try {
const { filename, mimetype, createReadStream } = await args.image
WIKI.logger.info(`Processing site favicon ${filename} of type ${mimetype}...`)
@ -234,9 +243,18 @@ module.exports = {
await WIKI.db.sites.reloadCache()
// -> Save image data to DB
const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({
await WIKI.db.knex('assets').insert({
id: site.config.assets.favicon,
data: imgBuffer
filename: `_favicon.${destFormat}`,
hash: '_favicon',
ext: `.${destFormat}`,
isSystem: true,
kind: 'image',
mime: (destFormat === 'svg') ? 'image/svg' : 'image/png',
fileSize: Math.ceil(imgBuffer.byteLength / 1024),
data: imgBuffer,
authorId: context.req.user.id,
siteId: site.id
}).onConflict('id').merge()
WIKI.logger.info('New site favicon processed successfully.')
return {
@ -250,7 +268,7 @@ module.exports = {
/**
* UPLOAD LOGIN BG
*/
async uploadSiteLoginBg (obj, args) {
async uploadSiteLoginBg (obj, args, context) {
try {
const { filename, mimetype, createReadStream } = await args.image
WIKI.logger.info(`Processing site login bg ${filename} of type ${mimetype}...`)
@ -283,9 +301,18 @@ module.exports = {
}
// -> Save image data to DB
const imgBuffer = await fs.readFile(destPath)
await WIKI.db.knex('assetData').insert({
await WIKI.db.knex('assets').insert({
id: site.config.assets.loginBg,
data: imgBuffer
filename: '_loginbg.jpg',
hash: '_loginbg',
ext: '.jpg',
isSystem: true,
kind: 'image',
mime: 'image/jpg',
fileSize: Math.ceil(imgBuffer.byteLength / 1024),
data: imgBuffer,
authorId: context.req.user.id,
siteId: site.id
}).onConflict('id').merge()
WIKI.logger.info('New site login bg processed successfully.')
return {

@ -68,7 +68,7 @@ module.exports = {
}
}
// -> Include root items
if (args.includeRootItems) {
if (args.includeRootFolders) {
builder.orWhere({
folderPath: '',
type: 'folder'
@ -98,7 +98,8 @@ module.exports = {
createdAt: item.createdAt,
updatedAt: item.updatedAt,
...(item.type === 'folder') && {
childrenCount: item.meta?.children || 0
childrenCount: item.meta?.children || 0,
isAncestor: item.folderPath.length < parentPath.length
}
}))
},

@ -3,34 +3,30 @@
# ===============================================
extend type Query {
assets(
folderId: Int!
kind: AssetKind!
assetById(
id: UUID!
): [AssetItem]
assetsFolders(
parentFolderId: Int!
): [AssetFolder]
}
extend type Mutation {
createAssetsFolder(
parentFolderId: Int!
slug: String!
name: String
): DefaultResponse
renameAsset(
id: Int!
id: UUID!
filename: String!
): DefaultResponse
deleteAsset(
id: Int!
id: UUID!
): DefaultResponse
"""
Upload one or more assets.
Must provide either `folderId` or a combination of `folderPath`, `locale` and `siteId`.
"""
uploadAssets(
siteId: UUID!
folderId: UUID
folderPath: String
locale: String
siteId: UUID
files: [Upload!]!
): DefaultResponse
@ -42,27 +38,20 @@ extend type Mutation {
# -----------------------------------------------
type AssetItem {
id: Int!
filename: String!
ext: String!
kind: AssetKind!
mime: String!
fileSize: Int!
metadata: String
createdAt: Date!
updatedAt: Date!
folder: AssetFolder
id: UUID
filename: String
ext: String
kind: AssetKind
mime: String
fileSize: Int
metadata: JSON
createdAt: Date
updatedAt: Date
author: User
}
type AssetFolder {
id: Int!
slug: String!
name: String
}
enum AssetKind {
IMAGE
BINARY
ALL
document
image
other
}

@ -65,6 +65,7 @@ type Site {
robots: SiteRobots
features: SiteFeatures
defaults: SiteDefaults
uploads: SiteUploads
locale: String
localeNamespaces: [String]
localeNamespacing: Boolean
@ -93,6 +94,11 @@ type SiteDefaults {
tocDepth: PageTocDepth
}
type SiteUploads {
conflictBehavior: SiteUploadConflictBehavior
normalizeFilename: Boolean
}
type SiteLocale {
locale: String
autoUpdate: Boolean
@ -142,6 +148,12 @@ enum SiteReasonForChangeMode {
required
}
enum SiteUploadConflictBehavior {
overwrite
reject
new
}
type SiteCreateResponse {
operation: Operation
site: Site
@ -161,6 +173,7 @@ input SiteUpdateInput {
robots: SiteRobotsInput
features: SiteFeaturesInput
defaults: SiteDefaultsInput
uploads: SiteUploadsInput
theme: SiteThemeInput
}
@ -204,3 +217,8 @@ input SiteThemeInput {
baseFont: String
contentFont: String
}
input SiteUploadsInput {
conflictBehavior: SiteUploadConflictBehavior
normalizeFilename: Boolean
}

@ -3,18 +3,32 @@
# ===============================================
extend type Query {
"""
Browse the tree.
Must provide either `parentId` or a combination of `parentPath` and `locale`.
"""
tree(
siteId: UUID!
parentId: UUID
parentPath: String
locale: String
types: [TreeItemType]
limit: Int
offset: Int
orderBy: TreeOrderBy
orderByDirection: OrderByDirection
"""
How many levels of children to include. Defaults to 1.
"""
depth: Int
"""
Include all parent folders up to root
"""
includeAncestors: Boolean
includeRootItems: Boolean
"""
Include all folders at root level
"""
includeRootFolders: Boolean
): [TreeItem]
folderById(
id: UUID!
@ -72,16 +86,24 @@ enum TreeOrderBy {
updatedAt
}
type TreeItemFolder {
interface TreeItem {
id: UUID
folderPath: String
fileName: String
title: String
}
type TreeItemFolder implements TreeItem {
id: UUID
childrenCount: Int
depth: Int
fileName: String
folderPath: String
title: String
isAncestor: Boolean
}
type TreeItemPage {
type TreeItemPage implements TreeItem {
id: UUID
createdAt: Date
depth: Int
@ -93,7 +115,7 @@ type TreeItemPage {
updatedAt: Date
}
type TreeItemAsset {
type TreeItemAsset implements TreeItem {
id: UUID
createdAt: Date
depth: Int
@ -105,5 +127,3 @@ type TreeItemAsset {
title: String
updatedAt: Date
}
union TreeItem = TreeItemFolder | TreeItemPage | TreeItemAsset

@ -1,15 +0,0 @@
const crypto = require('crypto')
const path = require('path')
module.exports = {
/**
* Generate unique hash from page
*/
generateHash(assetPath) {
return crypto.createHash('sha1').update(assetPath).digest('hex')
},
getPathInfo(assetPath) {
return path.parse(assetPath.toLowerCase())
}
}

@ -1,4 +1,5 @@
const _ = require('lodash')
const crypto = require('node:crypto')
module.exports = {
/* eslint-disable promise/param-names */
@ -29,6 +30,33 @@ module.exports = {
})
}
},
/**
* Decode a tree path
*
* @param {string} str String to decode
* @returns Decoded tree path
*/
decodeTreePath (str) {
return str.replaceAll('_', '-').replaceAll('.', '/')
},
/**
* Encode a tree path
*
* @param {string} str String to encode
* @returns Encoded tree path
*/
encodeTreePath (str) {
return str?.toLowerCase()?.replaceAll('-', '_')?.replaceAll('/', '.') || ''
},
/**
* Generate SHA-1 Hash of a string
*
* @param {string} str String to hash
* @returns Hashed string
*/
generateHash (str) {
return crypto.createHash('sha1').update(str).digest('hex')
},
/**
* Get default value of type
*

@ -1,76 +0,0 @@
const Model = require('objection').Model
const _ = require('lodash')
/**
* Users model
*/
module.exports = class AssetFolder extends Model {
static get tableName() { return 'assetFolders' }
static get jsonSchema () {
return {
type: 'object',
properties: {
id: {type: 'integer'},
name: {type: 'string'},
slug: {type: 'string'}
}
}
}
static get relationMappings() {
return {
parent: {
relation: Model.BelongsToOneRelation,
modelClass: AssetFolder,
join: {
from: 'assetFolders.folderId',
to: 'assetFolders.id'
}
}
}
}
/**
* Get full folder hierarchy starting from specified folder to root
*
* @param {Number} folderId Id of the folder
*/
static async getHierarchy (folderId) {
let hier
if (WIKI.config.db.type === 'mssql') {
hier = await WIKI.db.knex.with('ancestors', qb => {
qb.select('id', 'name', 'slug', 'parentId').from('assetFolders').where('id', folderId).unionAll(sqb => {
sqb.select('a.id', 'a.name', 'a.slug', 'a.parentId').from('assetFolders AS a').join('ancestors', 'ancestors.parentId', 'a.id')
})
}).select('*').from('ancestors')
} else {
hier = await WIKI.db.knex.withRecursive('ancestors', qb => {
qb.select('id', 'name', 'slug', 'parentId').from('assetFolders').where('id', folderId).union(sqb => {
sqb.select('a.id', 'a.name', 'a.slug', 'a.parentId').from('assetFolders AS a').join('ancestors', 'ancestors.parentId', 'a.id')
})
}).select('*').from('ancestors')
}
// The ancestors are from children to grandparents, must reverse for correct path order.
return _.reverse(hier)
}
/**
* Get full folder paths
*/
static async getAllPaths () {
const all = await WIKI.db.assetFolders.query()
let folders = {}
all.forEach(fld => {
_.set(folders, fld.id, fld.slug)
let parentId = fld.parentId
while (parentId !== null || parentId > 0) {
const parent = _.find(all, ['id', parentId])
_.set(folders, fld.id, `${parent.slug}/${_.get(folders, fld.id)}`)
parentId = parent.parentId
}
})
return folders
}
}

@ -3,7 +3,6 @@ const moment = require('moment')
const path = require('path')
const fs = require('fs-extra')
const _ = require('lodash')
const assetHelper = require('../helpers/asset')
/**
* Users model
@ -16,7 +15,7 @@ module.exports = class Asset extends Model {
type: 'object',
properties: {
id: {type: 'integer'},
id: {type: 'string'},
filename: {type: 'string'},
hash: {type: 'string'},
ext: {type: 'string'},
@ -77,18 +76,16 @@ module.exports = class Asset extends Model {
static async upload(opts) {
const fileInfo = path.parse(opts.originalname)
const fileHash = assetHelper.generateHash(opts.assetPath)
// Check for existing asset
let asset = await WIKI.db.assets.query().where({
hash: fileHash,
// hash: fileHash,
folderId: opts.folderId
}).first()
// Build Object
let assetRow = {
filename: opts.originalname,
hash: fileHash,
ext: fileInfo.ext,
kind: _.startsWith(opts.mimetype, 'image/') ? 'image' : 'binary',
mime: opts.mimetype,
@ -138,11 +135,11 @@ module.exports = class Asset extends Model {
}
// Move temp upload to cache
if (opts.mode === 'upload') {
await fs.move(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true })
} else {
await fs.copy(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true })
}
// if (opts.mode === 'upload') {
// await fs.move(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true })
// } else {
// await fs.copy(opts.path, path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`), { overwrite: true })
// }
// Add to Storage
if (!opts.skipStorage) {
@ -165,8 +162,8 @@ module.exports = class Asset extends Model {
static async getAsset(assetPath, res) {
try {
const fileInfo = assetHelper.getPathInfo(assetPath)
const fileHash = assetHelper.generateHash(assetPath)
const fileInfo = '' // assetHelper.getPathInfo(assetPath)
const fileHash = '' // assetHelper.generateHash(assetPath)
const cachePath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${fileHash}.dat`)
// Force unsafe extensions to download

@ -341,12 +341,11 @@ module.exports = class Page extends Model {
// -> Add to tree
const pathParts = page.path.split('/')
await WIKI.db.knex('tree').insert({
await WIKI.db.tree.addPage({
id: page.id,
folderPath: _.initial(pathParts).join('/'),
parentPath: _.initial(pathParts).join('/'),
fileName: _.last(pathParts),
type: 'page',
localeCode: page.localeCode,
locale: page.localeCode,
title: page.title,
meta: {
authorId: page.authorId,

@ -1,6 +1,8 @@
const Model = require('objection').Model
const _ = require('lodash')
const commonHelper = require('../helpers/common')
const rePathName = /^[a-z0-9-]+$/
const reTitle = /^[^<>"]+$/
@ -60,6 +62,95 @@ module.exports = class Tree extends Model {
this.updatedAt = new Date().toISOString()
}
/**
* Get a Folder
*
* @param {Object} args - Fetch Properties
* @param {string} [args.id] - UUID of the folder
* @param {string} [args.path] - Path of the folder
* @param {string} [args.locale] - Locale code of the folder (when using path)
* @param {string} [args.siteId] - UUID of the site in which the folder is (when using path)
* @param {boolean} [args.createIfMissing] - Create the folder and its ancestor if it's missing (when using path)
*/
static async getFolder ({ id, path, locale, siteId, createIfMissing = false }) {
// Get by ID
if (id) {
const parent = await WIKI.db.knex('tree').where('id', id).first()
if (!parent) {
throw new Error('ERR_NONEXISTING_FOLDER_ID')
}
return parent
} else {
// Get by path
const parentPath = commonHelper.encodeTreePath(path)
const parentPathParts = parentPath.split('.')
const parentFilter = {
folderPath: _.dropRight(parentPathParts).join('.'),
fileName: _.last(parentPathParts)
}
const parent = await WIKI.db.knex('tree').where({
...parentFilter,
locale,
siteId
}).first()
if (parent) {
return parent
} else if (createIfMissing) {
return WIKI.db.tree.createFolder({
parentPath: parentFilter.folderPath,
pathName: parentFilter.fileName,
title: parentFilter.fileName,
locale,
siteId
})
} else {
throw new Error('ERR_NONEXISTING_FOLDER_PATH')
}
}
}
/**
* Add Page Entry
*
* @param {Object} args - New Page Properties
* @param {string} [args.parentId] - UUID of the parent folder
* @param {string} [args.parentPath] - Path of the parent folder
* @param {string} args.pathName - Path name of the page to add
* @param {string} args.title - Title of the page to add
* @param {string} args.locale - Locale code of the page to add
* @param {string} args.siteId - UUID of the site in which the page will be added
*/
static async addPage ({ id, parentId, parentPath, fileName, title, locale, siteId, meta = {} }) {
const folder = (parentId || parentPath) ? await WIKI.db.tree.getFolder({
parentId,
parentPath,
locale,
siteId,
createIfMissing: true
}) : {
folderPath: '',
fileName: ''
}
const folderPath = commonHelper.decodeTreePath(folder.folderPath ? `${folder.folderPath}.${folder.fileName}` : folder.fileName)
const fullPath = folderPath ? `${folderPath}/${fileName}` : fileName
WIKI.logger.debug(`Adding page ${fullPath} to tree...`)
const pageEntry = await WIKI.db.knex('tree').insert({
id,
folderPath,
fileName,
type: 'page',
title: title,
hash: commonHelper.generateHash(fullPath),
localeCode: locale,
siteId,
meta
}).returning('*')
return pageEntry[0]
}
/**
* Create New Folder
*
@ -82,8 +173,8 @@ module.exports = class Tree extends Model {
throw new Error('ERR_INVALID_TITLE')
}
parentPath = commonHelper.encodeTreePath(parentPath)
WIKI.logger.debug(`Creating new folder ${pathName}...`)
parentPath = parentPath?.replaceAll('/', '.')?.replaceAll('-', '_') || ''
const parentPathParts = parentPath.split('.')
const parentFilter = {
folderPath: _.dropRight(parentPathParts).join('.'),
@ -134,10 +225,12 @@ module.exports = class Tree extends Model {
})
for (const ancestor of _.differenceWith(expectedAncestors, existingAncestors, (expAnc, exsAnc) => expAnc.folderPath === exsAnc.folderPath && expAnc.fileName === exsAnc.fileName)) {
WIKI.logger.debug(`Creating missing parent folder ${ancestor.fileName} at path /${ancestor.folderPath}...`)
const newAncestorFullPath = ancestor.folderPath ? `${commonHelper.decodeTreePath(ancestor.folderPath)}/${ancestor.fileName}` : ancestor.fileName
const newAncestor = await WIKI.db.knex('tree').insert({
...ancestor,
type: 'folder',
title: ancestor.fileName,
hash: commonHelper.generateHash(newAncestorFullPath),
localeCode: locale,
siteId: siteId,
meta: {
@ -147,24 +240,25 @@ module.exports = class Tree extends Model {
// Parent didn't exist until now, assign it
if (!parent && ancestor.folderPath === parentFilter.folderPath && ancestor.fileName === parentFilter.fileName) {
parent = newAncestor
parent = newAncestor[0]
}
}
}
// Create folder
WIKI.logger.debug(`Creating new folder ${pathName} at path /${parentPath}...`)
await WIKI.db.knex('tree').insert({
const fullPath = parentPath ? `${commonHelper.decodeTreePath(parentPath)}/${pathName}` : pathName
const folder = await WIKI.db.knex('tree').insert({
folderPath: parentPath,
fileName: pathName,
type: 'folder',
title: title,
hash: commonHelper.generateHash(fullPath),
localeCode: locale,
siteId: siteId,
meta: {
children: 0
}
})
}).returning('*')
// Update parent ancestor count
if (parent) {
@ -175,6 +269,10 @@ module.exports = class Tree extends Model {
}
})
}
WIKI.logger.debug(`Created folder ${folder[0].id} successfully.`)
return folder[0]
}
/**
@ -231,9 +329,11 @@ module.exports = class Tree extends Model {
})
// Rename the folder itself
const fullPath = folder.folderPath ? `${commonHelper.decodeTreePath(folder.folderPath)}/${pathName}` : pathName
await WIKI.db.knex('tree').where('id', folder.id).update({
fileName: pathName,
title: title
title: title,
hash: commonHelper.generateHash(fullPath)
})
} else {
// Update the folder title only

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 40 40" width="80px" height="80px"><path fill="#fff" d="M13.5 38.5L13.5 26.5 1.5 26.5 1.5 1.5 26.5 1.5 26.5 13.5 38.5 13.5 38.5 38.5z"/><path fill="#4788c7" d="M26,2v11v1h1h11v24H14V27v-1h-1H2V2H26 M27,1H1v26h12v12h26V13H27V1L27,1z"/><path fill="#4788c7" d="M33 31L28 26 31 23 23 23 23 31 26 28 31 33zM14 12L9 7 7 9 12 14 9 17 17 17 17 9zM20.849 19.859l-.99.99c-.194.194-.513.194-.707 0l0 0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0l0 0C21.043 19.346 21.043 19.664 20.849 19.859zM17.849 22.859l-.99.99c-.194.194-.513.194-.707 0l0 0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0l0 0C18.043 22.346 18.043 22.664 17.849 22.859zM14.849 25.859l-.99.99c-.194.194-.513.194-.707 0h0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0h0C15.043 25.346 15.043 25.664 14.849 25.859zM23.849 16.859l-.99.99c-.194.194-.513.194-.707 0l0 0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0l0 0C24.043 16.346 24.043 16.664 23.849 16.859zM26.849 13.859l-.99.99c-.194.194-.513.194-.707 0v0c-.194-.194-.194-.513 0-.707l.99-.99c.194-.194.513-.194.707 0v0C27.043 13.346 27.043 13.664 26.849 13.859z"/></svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

@ -512,35 +512,34 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
query loadTree (
$siteId: UUID!
$parentId: UUID
$parentPath: String
$types: [TreeItemType]
$includeAncestors: Boolean
$includeRootFolders: Boolean
) {
tree (
siteId: $siteId
parentId: $parentId
parentPath: $parentPath
types: $types
includeAncestors: $includeAncestors
includeRootFolders: $includeRootFolders
) {
__typename
id
folderPath
fileName
title
... on TreeItemFolder {
id
folderPath
fileName
title
childrenCount
isAncestor
}
... on TreeItemPage {
id
folderPath
fileName
title
createdAt
updatedAt
editor
}
... on TreeItemAsset {
id
folderPath
fileName
title
createdAt
updatedAt
fileSize
@ -551,7 +550,10 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
variables: {
siteId: siteStore.id,
parentId,
types
parentPath,
types,
includeAncestors: initLoad,
includeRootFolders: initLoad
},
fetchPolicy: 'network-only'
})
@ -579,7 +581,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
}
// -> File List
if (parentId === state.currentFolderId) {
if (parentId === state.currentFolderId && !item.isAncestor) {
state.fileList.push({
id: item.id,
type: 'folder',
@ -682,7 +684,7 @@ function renameFolder (folderId) {
}
}).onOk(() => {
treeComp.value.resetLoaded()
loadTree({ parentId: folderId })
loadTree({ parentId: folderId, initLoad: true })
})
}

@ -251,7 +251,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
$parentPath: String
$types: [TreeItemType]
$includeAncestors: Boolean
$includeRootItems: Boolean
$includeRootFolders: Boolean
) {
tree (
siteId: $siteId
@ -259,7 +259,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
parentPath: $parentPath
types: $types
includeAncestors: $includeAncestors
includeRootItems: $includeRootItems
includeRootFolders: $includeRootFolders
) {
__typename
... on TreeItemFolder {
@ -287,7 +287,7 @@ async function loadTree ({ parentId = null, parentPath = null, types, initLoad =
parentPath,
types,
includeAncestors: initLoad,
includeRootItems: initLoad
includeRootFolders: initLoad
},
fetchPolicy: 'network-only'
})

@ -1614,5 +1614,13 @@
"fileman.copyURLSuccess": "URL has been copied to the clipboard.",
"fileman.folderRename": "Rename Folder",
"fileman.renameFolderInvalidData": "One or more fields are invalid.",
"fileman.renameFolderSuccess": "Folder renamed successfully."
"fileman.renameFolderSuccess": "Folder renamed successfully.",
"admin.general.uploads": "Uploads",
"admin.general.uploadConflictBehavior": "Upload Conflict Behavior",
"admin.general.uploadConflictBehaviorHint": "How should uploads for a file that already exists be handled?",
"admin.general.uploadConflictBehaviorOverwrite": "Overwrite",
"admin.general.uploadConflictBehaviorReject": "Reject",
"admin.general.uploadConflictBehaviorNew": "Append Time to Filename",
"admin.general.uploadNormalizeFilename": "Normalize Filenames",
"admin.general.uploadNormalizeFilenameHint": "Automatically transform filenames to a standard URL-friendly format."
}

@ -406,6 +406,46 @@ q-page.admin-general
markers
)
//- -----------------------
//- Uploads
//- -----------------------
q-card.shadow-1.q-pb-sm.q-mt-md(v-if='state.config.uploads')
q-card-section
.text-subtitle1 {{t('admin.general.uploads')}}
q-item
blueprint-icon(icon='merge-files')
q-item-section
q-item-label {{t(`admin.general.uploadConflictBehavior`)}}
q-item-label(caption) {{t(`admin.general.uploadConflictBehaviorHint`)}}
q-item-section
q-select(
outlined
v-model='state.config.uploads.conflictBehavior'
:options='uploadConflictBehaviors'
option-value='value'
option-label='label'
emit-value
map-options
dense
options-dense
:virtual-scroll-slice-size='1000'
:aria-label='t(`admin.general.uploadConflictBehavior`)'
)
q-separator.q-my-sm(inset)
q-item(tag='label')
blueprint-icon(icon='rename')
q-item-section
q-item-label {{t(`admin.general.uploadNormalizeFilename`)}}
q-item-label(caption) {{t(`admin.general.uploadNormalizeFilenameHint`)}}
q-item-section(avatar)
q-toggle(
v-model='state.config.uploads.normalizeFilename'
color='primary'
checked-icon='las la-check'
unchecked-icon='las la-times'
:aria-label='t(`admin.general.uploadNormalizeFilename`)'
)
//- -----------------------
//- SEO
//- -----------------------
@ -561,6 +601,11 @@ const timeFormats = [
{ value: '12h', label: t('admin.general.defaultTimeFormat12h') },
{ value: '24h', label: t('admin.general.defaultTimeFormat24h') }
]
const uploadConflictBehaviors = [
{ value: 'overwrite', label: t('admin.general.uploadConflictBehaviorOverwrite') },
{ value: 'reject', label: t('admin.general.uploadConflictBehaviorReject') },
{ value: 'new', label: t('admin.general.uploadConflictBehaviorNew') }
]
const timezones = Intl.supportedValuesOf('timeZone')
@ -601,6 +646,10 @@ async function load () {
pageExtensions
logoText
sitemap
uploads {
conflictBehavior
normalizeFilename
}
robots {
index
follow
@ -669,6 +718,10 @@ async function save () {
pageExtensions: state.config.pageExtensions ?? '',
logoText: state.config.logoText ?? false,
sitemap: state.config.sitemap ?? false,
uploads: {
conflictBehavior: state.config.uploads?.conflictBehavior ?? 'overwrite',
normalizeFilename: state.config.uploads?.normalizeFilename ?? false
},
robots: {
index: state.config.robots?.index ?? false,
follow: state.config.robots?.follow ?? false

Loading…
Cancel
Save