Просмотр исходного кода

refactor: fix models + storage modules

NGPixel 3 лет назад
Родитель
Сommit
edb529378e
53 измененных файлов с 1772 добавлено и 1193 удалено
  1. 5 3
      dev.code-workspace
  2. 19 8
      package.json
  3. 6 6
      server/core/auth.js
  4. 1 2
      server/core/kernel.js
  5. 6 0
      server/core/servers.js
  6. 55 39
      server/db/migrations/3.0.0.js
  7. 5 1
      server/index.js
  8. 1 1
      server/master.js
  9. 1 2
      server/models/analytics.js
  10. 13 69
      server/models/authentication.js
  11. 12 50
      server/models/commentProviders.js
  12. 0 65
      server/models/editors.js
  13. 19 36
      server/models/pages.js
  14. 23 65
      server/models/renderers.js
  15. 108 0
      server/models/sites.js
  16. 97 120
      server/models/storage.js
  17. 1 5
      server/models/tags.js
  18. 3 3
      server/models/users.js
  19. 0 6
      server/modules/editor/api/definition.yml
  20. 0 6
      server/modules/editor/ckeditor/definition.yml
  21. 0 6
      server/modules/editor/code/definition.yml
  22. 0 6
      server/modules/editor/markdown/definition.yml
  23. 0 6
      server/modules/editor/redirect/definition.yml
  24. 0 6
      server/modules/editor/wysiwyg/definition.yml
  25. 27 15
      server/modules/storage/azure/definition.yml
  26. 0 10
      server/modules/storage/box/definition.yml
  27. 0 26
      server/modules/storage/box/storage.js
  28. 25 0
      server/modules/storage/db/definition.yml
  29. 14 0
      server/modules/storage/db/storage.js
  30. 0 45
      server/modules/storage/digitalocean/definition.yml
  31. 0 3
      server/modules/storage/digitalocean/storage.js
  32. 22 10
      server/modules/storage/disk/definition.yml
  33. 1 4
      server/modules/storage/disk/storage.js
  34. 0 9
      server/modules/storage/dropbox/definition.yml
  35. 0 26
      server/modules/storage/dropbox/storage.js
  36. 65 0
      server/modules/storage/gcs/definition.yml
  37. 164 0
      server/modules/storage/gcs/storage.js
  38. 0 9
      server/modules/storage/gdrive/definition.yml
  39. 0 26
      server/modules/storage/gdrive/storage.js
  40. 70 27
      server/modules/storage/git/definition.yml
  41. 8 45
      server/modules/storage/git/storage.js
  42. 49 0
      server/modules/storage/github/definition.yml
  43. 211 0
      server/modules/storage/github/storage.js
  44. 0 9
      server/modules/storage/onedrive/definition.yml
  45. 0 26
      server/modules/storage/onedrive/storage.js
  46. 0 168
      server/modules/storage/s3/common.js
  47. 139 17
      server/modules/storage/s3/definition.yml
  48. 165 2
      server/modules/storage/s3/storage.js
  49. 0 57
      server/modules/storage/s3generic/definition.yml
  50. 0 3
      server/modules/storage/s3generic/storage.js
  51. 40 17
      server/modules/storage/sftp/definition.yml
  52. 1 6
      server/modules/storage/sftp/storage.js
  53. 396 122
      yarn.lock

+ 5 - 3
dev.code-workspace

@@ -1,10 +1,12 @@
 {
 	"folders": [
 		{
-			"path": "ux"
+			"name": "server",
+			"path": "server"
 		},
 		{
-			"path": "server"
+			"name": "ux",
+			"path": "ux"
 		}
 	],
 	"settings": {
@@ -13,4 +15,4 @@
 			"src/i18n/locales"
 		]
 	}
-}
+}

+ 19 - 8
package.json

@@ -7,7 +7,8 @@
   "dev": true,
   "scripts": {
     "start": "node server",
-    "dev": "node dev",
+    "dev": "nodemon server",
+    "dev-legacy": "node dev",
     "test": "eslint --format codeframe --ext .js,.vue . && pug-lint server/views && jest",
     "cypress:open": "cypress open"
   },
@@ -119,12 +120,12 @@
     "moment-timezone": "0.5.31",
     "ms": "2.1.3",
     "multer": "1.4.4",
-    "nanoid": "3.2.0",
+    "nanoid": "3.3.2",
     "node-2fa": "1.1.2",
     "node-cache": "5.1.2",
     "nodemailer": "6.7.3",
     "objection": "3.0.1",
-    "passport": "0.4.1",
+    "passport": "0.5.2",
     "passport-auth0": "1.4.2",
     "passport-azure-ad": "4.3.1",
     "passport-cas": "0.1.1",
@@ -135,13 +136,13 @@
     "passport-gitlab2": "5.0.0",
     "passport-google-oauth20": "2.0.0",
     "passport-jwt": "4.0.0",
-    "passport-ldapauth": "2.1.4",
+    "passport-ldapauth": "3.0.1",
     "passport-local": "1.0.0",
     "passport-microsoft": "0.1.0",
     "passport-oauth2": "1.6.1",
     "passport-okta-oauth": "0.0.1",
-    "passport-openidconnect": "0.0.2",
-    "passport-saml": "1.3.5",
+    "passport-openidconnect": "0.1.1",
+    "passport-saml": "3.2.1",
     "passport-slack-oauth2": "1.1.1",
     "passport-twitch-oauth": "1.0.0",
     "pem-jwk": "2.0.0",
@@ -241,7 +242,7 @@
     "filepond-plugin-file-validate-type": "1.2.6",
     "filesize.js": "2.0.0",
     "graphql-persisted-document-loader": "2.0.0",
-    "graphql-tag": "2.11.0",
+    "graphql-tag": "2.12.6",
     "hammerjs": "2.0.8",
     "html-webpack-plugin": "4.5.0",
     "html-webpack-pug-plugin": "2.0.0",
@@ -256,6 +257,7 @@
     "mini-css-extract-plugin": "0.11.3",
     "moment-duration-format": "2.3.2",
     "moment-timezone-data-webpack-plugin": "1.3.0",
+    "nodemon": "2.0.15",
     "offline-plugin": "5.0.7",
     "optimize-css-assets-webpack-plugin": "5.0.4",
     "pako": "1.0.11",
@@ -265,7 +267,7 @@
     "postcss-import": "12.0.1",
     "postcss-loader": "3.0.0",
     "postcss-preset-env": "6.7.0",
-    "postcss-selector-parser": "6.0.9",
+    "postcss-selector-parser": "6.0.10",
     "prismjs": "1.22.0",
     "pug-lint": "2.6.0",
     "pug-loader": "2.4.0",
@@ -371,5 +373,14 @@
     "type": "opencollective",
     "url": "https://opencollective.com/wikijs",
     "logo": "https://opencollective.com/opencollective/logo.txt"
+  },
+  "nodemonConfig": {
+    "env": {
+      "NODE_ENV": "development"
+    },
+    "ext": "js,json,graphql,gql",
+    "watch": [
+      "server/"
+    ]
   }
 }

+ 6 - 6
server/core/auth.js

@@ -66,7 +66,7 @@ module.exports = {
       // Load JWT
       passport.use('jwt', new passportJWT.Strategy({
         jwtFromRequest: securityHelper.extractJWT,
-        secretOrKey: WIKI.config.certs.public,
+        secretOrKey: WIKI.config.auth.certs.public,
         audience: WIKI.config.auth.audience,
         issuer: 'urn:wiki.js',
         algorithms: ['RS256']
@@ -76,13 +76,13 @@ module.exports = {
 
       // Load enabled strategies
       const enabledStrategies = await WIKI.models.authentication.getStrategies()
-      for (let idx in enabledStrategies) {
+      for (const idx in enabledStrategies) {
         const stg = enabledStrategies[idx]
         try {
-          const strategy = require(`../modules/authentication/${stg.strategyKey}/authentication.js`)
+          const strategy = require(`../modules/authentication/${stg.module}/authentication.js`)
 
-          stg.config.callbackURL = `${WIKI.config.host}/login/${stg.key}/callback`
-          stg.config.key = stg.key;
+          stg.config.callbackURL = `${WIKI.config.host}/login/${stg.id}/callback`
+          stg.config.key = stg.id
           strategy.init(passport, stg.config)
           strategy.config = stg.config
 
@@ -92,7 +92,7 @@ module.exports = {
           }
           WIKI.logger.info(`Authentication Strategy ${stg.displayName}: [ OK ]`)
         } catch (err) {
-          WIKI.logger.error(`Authentication Strategy ${stg.displayName} (${stg.key}): [ FAILED ]`)
+          WIKI.logger.error(`Authentication Strategy ${stg.displayName} (${stg.id}): [ FAILED ]`)
           WIKI.logger.error(err)
         }
       }

+ 1 - 2
server/core/kernel.js

@@ -67,7 +67,6 @@ module.exports = {
     await WIKI.models.analytics.refreshProvidersFromDisk()
     await WIKI.models.authentication.refreshStrategiesFromDisk()
     await WIKI.models.commentProviders.refreshProvidersFromDisk()
-    await WIKI.models.editors.refreshEditorsFromDisk()
     await WIKI.models.renderers.refreshRenderersFromDisk()
     await WIKI.models.storage.refreshTargetsFromDisk()
 
@@ -76,7 +75,7 @@ module.exports = {
     await WIKI.auth.activateStrategies()
     await WIKI.models.commentProviders.initProvider()
     await WIKI.models.storage.initTargets()
-    WIKI.scheduler.start()
+    // WIKI.scheduler.start()
 
     await WIKI.models.subscribeToNotifications()
   },

+ 6 - 0
server/core/servers.js

@@ -4,6 +4,7 @@ const https = require('https')
 const { ApolloServer } = require('apollo-server-express')
 const Promise = require('bluebird')
 const _ = require('lodash')
+const { ApolloServerPluginLandingPageGraphQLPlayground, ApolloServerPluginLandingPageProductionDefault } = require('apollo-server-core')
 
 /* global WIKI */
 
@@ -123,6 +124,11 @@ module.exports = {
       uploads: false,
       context: ({ req, res }) => ({ req, res }),
       plugins: [
+        process.env.NODE_ENV === 'development' ? ApolloServerPluginLandingPageGraphQLPlayground({
+          footer: false
+        }) : ApolloServerPluginLandingPageProductionDefault({
+          footer: false
+        })
         // ApolloServerPluginDrainHttpServer({ httpServer: this.servers.http })
         // ...(this.servers.https && ApolloServerPluginDrainHttpServer({ httpServer: this.servers.https }))
       ]

+ 55 - 39
server/db/migrations/3.0.0.js

@@ -71,6 +71,12 @@ exports.up = async knex => {
       table.jsonb('autoEnrollGroups').notNullable().defaultTo('[]')
       table.jsonb('hideOnSites').notNullable().defaultTo('[]')
     })
+    .createTable('commentProviders', table => {
+      table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
+      table.string('module').notNullable()
+      table.boolean('isEnabled').notNullable().defaultTo(false)
+      table.json('config').notNullable()
+    })
     // COMMENTS ----------------------------
     .createTable('comments', table => {
       table.uuid('id').notNullable().primary().defaultTo(knex.raw('gen_random_uuid()'))
@@ -140,6 +146,7 @@ exports.up = async knex => {
       table.timestamp('publishEndDate')
       table.string('action').defaultTo('updated')
       table.text('content')
+      table.string('editor').notNullable()
       table.string('contentType').notNullable()
       table.jsonb('extra').notNullable().defaultTo('{}')
       table.jsonb('tags').defaultTo('[]')
@@ -166,6 +173,7 @@ exports.up = async knex => {
       table.text('content')
       table.text('render')
       table.jsonb('toc')
+      table.string('editor').notNullable()
       table.string('contentType').notNullable()
       table.jsonb('extra').notNullable().defaultTo('{}')
       table.timestamp('createdAt').notNullable().defaultTo(knex.fn.now())
@@ -279,6 +287,9 @@ exports.up = async knex => {
     .table('assetFolders', table => {
       table.uuid('parentId').references('id').inTable('assetFolders').index()
     })
+    .table('commentProviders', table => {
+      table.uuid('siteId').notNullable().references('id').inTable('sites')
+    })
     .table('comments', table => {
       table.uuid('pageId').notNullable().references('id').inTable('pages').index()
       table.uuid('authorId').notNullable().references('id').inTable('users').index()
@@ -306,6 +317,9 @@ exports.up = async knex => {
       table.uuid('pageId').notNullable().references('id').inTable('pages').onDelete('CASCADE')
       table.string('localeCode', 5).references('code').inTable('locales')
     })
+    .table('renderers', table => {
+      table.uuid('siteId').notNullable().references('id').inTable('sites')
+    })
     .table('storage', table => {
       table.uuid('siteId').notNullable().references('id').inTable('sites')
     })
@@ -324,9 +338,50 @@ exports.up = async knex => {
   // DEFAULT DATA
   // =====================================
 
+  // -> GENERATE IDS
+
+  const groupAdminId = uuid()
+  const groupGuestId = '10000000-0000-4000-0000-000000000001'
+  const siteId = uuid()
+  const authModuleId = uuid()
+  const userAdminId = uuid()
+  const userGuestId = uuid()
+
   // -> SYSTEM CONFIG
 
+  WIKI.logger.info('Generating certificates...')
+  const secret = crypto.randomBytes(32).toString('hex')
+  const certs = crypto.generateKeyPairSync('rsa', {
+    modulusLength: 2048,
+    publicKeyEncoding: {
+      type: 'pkcs1',
+      format: 'pem'
+    },
+    privateKeyEncoding: {
+      type: 'pkcs1',
+      format: 'pem',
+      cipher: 'aes-256-cbc',
+      passphrase: secret
+    }
+  })
+
   await knex('settings').insert([
+    {
+      key: 'auth',
+      value: {
+        audience: 'urn:wiki.js',
+        tokenExpiration: '30m',
+        tokenRenewal: '14d',
+        certs: {
+          jwk: pem2jwk(certs.publicKey),
+          public: certs.publicKey,
+          private: certs.privateKey
+        },
+        secret,
+        rootAdminUserId: userAdminId,
+        guestUserId: userGuestId
+      }
+    },
     {
       key: 'mail',
       value: {
@@ -367,12 +422,6 @@ exports.up = async knex => {
         uploadScanSVG: true
       }
     },
-    {
-      key: 'system',
-      value: {
-        sessionSecret: crypto.randomBytes(32).toString('hex')
-      }
-    },
     {
       key: 'update',
       value: {
@@ -393,39 +442,11 @@ exports.up = async knex => {
 
   // -> DEFAULT SITE
 
-  WIKI.logger.info('Generating certificates...')
-  const secret = crypto.randomBytes(32).toString('hex')
-  const certs = crypto.generateKeyPairSync('rsa', {
-    modulusLength: 2048,
-    publicKeyEncoding: {
-      type: 'pkcs1',
-      format: 'pem'
-    },
-    privateKeyEncoding: {
-      type: 'pkcs1',
-      format: 'pem',
-      cipher: 'aes-256-cbc',
-      passphrase: secret
-    }
-  })
-
-  const siteId = uuid()
   await knex('sites').insert({
     id: siteId,
     hostname: '*',
     isEnabled: true,
     config: {
-      auth: {
-        audience: 'urn:wiki.js',
-        tokenExpiration: '30m',
-        tokenRenewal: '14d',
-        certs: {
-          jwk: pem2jwk(certs.publicKey),
-          public: certs.publicKey,
-          private: certs.privateKey
-        },
-        secret
-      },
       title: 'My Wiki Site',
       description: '',
       company: '',
@@ -471,8 +492,6 @@ exports.up = async knex => {
 
   // -> DEFAULT GROUPS
 
-  const groupAdminId = uuid()
-  const groupGuestId = '10000000-0000-4000-0000-000000000001'
   await knex('groups').insert([
     {
       id: groupAdminId,
@@ -503,7 +522,6 @@ exports.up = async knex => {
 
   // -> AUTHENTICATION MODULE
 
-  const authModuleId = uuid()
   await knex('authentication').insert({
     id: authModuleId,
     module: 'local',
@@ -513,8 +531,6 @@ exports.up = async knex => {
 
   // -> USERS
 
-  const userAdminId = uuid()
-  const userGuestId = uuid()
   await knex('users').insert([
     {
       id: userAdminId,

+ 5 - 1
server/index.js

@@ -22,7 +22,11 @@ let WIKI = {
   Error: require('./helpers/error'),
   configSvc: require('./core/config'),
   kernel: require('./core/kernel'),
-  startedAt: DateTime.utc()
+  startedAt: DateTime.utc(),
+  storage: {
+    defs: [],
+    modules: []
+  }
 }
 global.WIKI = WIKI
 

+ 1 - 1
server/master.js

@@ -77,7 +77,7 @@ module.exports = async () => {
 
   app.use(cookieParser())
   app.use(session({
-    secret: WIKI.config.system.sessionSecret,
+    secret: WIKI.config.auth.secret,
     resave: false,
     saveUninitialized: false,
     store: new KnexSessionStore({

+ 1 - 2
server/models/analytics.js

@@ -12,7 +12,6 @@ const commonHelper = require('../helpers/common')
  */
 module.exports = class Analytics extends Model {
   static get tableName() { return 'analytics' }
-  static get idColumn() { return 'key' }
 
   static get jsonSchema () {
     return {
@@ -52,7 +51,7 @@ module.exports = class Analytics extends Model {
 
       WIKI.logger.info(`Loaded ${WIKI.data.analytics.length} analytics module definitions: [ OK ]`)
     } catch (err) {
-      WIKI.logger.error(`Failed to scan or load new analytics providers: [ FAILED ]`)
+      WIKI.logger.error(`Failed to scan or load analytics providers: [ FAILED ]`)
       WIKI.logger.error(err)
     }
   }

+ 13 - 69
server/models/authentication.js

@@ -12,15 +12,15 @@ const commonHelper = require('../helpers/common')
  */
 module.exports = class Authentication extends Model {
   static get tableName() { return 'authentication' }
-  static get idColumn() { return 'key' }
 
   static get jsonSchema () {
     return {
       type: 'object',
-      required: ['key'],
+      required: ['module'],
 
       properties: {
-        key: {type: 'string'},
+        id: { type: 'string' },
+        module: { type: 'string' },
         selfRegistration: {type: 'boolean'}
       }
     }
@@ -43,79 +43,23 @@ module.exports = class Authentication extends Model {
     }))
   }
 
-  static async getStrategiesForLegacyClient() {
-    const strategies = await WIKI.models.authentication.query().select('key', 'selfRegistration')
-    let formStrategies = []
-    let socialStrategies = []
-
-    for (let stg of strategies) {
-      const stgInfo = _.find(WIKI.data.authentication, ['key', stg.key]) || {}
-      if (stgInfo.useForm) {
-        formStrategies.push({
-          key: stg.key,
-          title: stgInfo.title
-        })
-      } else {
-        socialStrategies.push({
-          ...stgInfo,
-          ...stg,
-          icon: await fs.readFile(path.join(WIKI.ROOTPATH, `assets/svg/auth-icon-${stg.key}.svg`), 'utf8').catch(err => {
-            if (err.code === 'ENOENT') {
-              return null
-            }
-            throw err
-          })
-        })
-      }
-    }
-
-    return {
-      formStrategies,
-      socialStrategies
-    }
-  }
-
   static async refreshStrategiesFromDisk() {
     try {
-      const dbStrategies = await WIKI.models.authentication.query()
-
       // -> Fetch definitions from disk
-      const authDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/authentication'))
+      const authenticationDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/authentication'))
       WIKI.data.authentication = []
-      for (let dir of authDirs) {
-        const defRaw = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/authentication', dir, 'definition.yml'), 'utf8')
-        const def = yaml.safeLoad(defRaw)
-        WIKI.data.authentication.push({
-          ...def,
-          props: commonHelper.parseModuleProps(def.props)
-        })
-      }
-
-      for (const strategy of dbStrategies) {
-        const strategyDef = _.find(WIKI.data.authentication, ['key', strategy.strategyKey])
-        if (!strategyDef) {
-          await WIKI.models.authentication.query().delete().where('key', strategy.key)
-          WIKI.logger.info(`Authentication strategy ${strategy.strategyKey} was removed from disk: [ REMOVED ]`)
-          continue
-        }
-        strategy.config = _.transform(strategyDef.props, (result, value, key) => {
-          if (!_.has(result, key)) {
-            _.set(result, key, value.default)
-          }
-          return result
-        }, strategy.config)
-
-        // Fix pre-2.5 strategies displayName
-        if (!strategy.displayName) {
-          await WIKI.models.authentication.query().patch({
-            displayName: strategyDef.title
-          }).where('key', strategy.key)
-        }
+      for (const dir of authenticationDirs) {
+        const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/authentication', dir, 'definition.yml'), 'utf8')
+        const defParsed = yaml.load(def)
+        defParsed.key = dir
+        defParsed.props = commonHelper.parseModuleProps(defParsed.props)
+        WIKI.data.analytics.push(defParsed)
+        WIKI.logger.debug(`Loaded authentication module definition ${dir}: [ OK ]`)
       }
 
-      WIKI.logger.info(`Loaded ${WIKI.data.authentication.length} authentication strategies: [ OK ]`)
+      WIKI.logger.info(`Loaded ${WIKI.data.analytics.length} authentication module definitions: [ OK ]`)
     } catch (err) {
-      WIKI.logger.error(`Failed to scan or load new authentication providers: [ FAILED ]`)
+      WIKI.logger.error(`Failed to scan or load authentication providers: [ FAILED ]`)
       WIKI.logger.error(err)
     }
   }

+ 12 - 50
server/models/commentProviders.js

@@ -36,65 +36,27 @@ module.exports = class CommentProvider extends Model {
 
   static async getProviders(isEnabled) {
     const providers = await WIKI.models.commentProviders.query().where(_.isBoolean(isEnabled) ? { isEnabled } : {})
-    return _.sortBy(providers, ['key'])
+    return _.sortBy(providers, ['module'])
   }
 
   static async refreshProvidersFromDisk() {
-    let trx
     try {
-      const dbProviders = await WIKI.models.commentProviders.query()
-
       // -> Fetch definitions from disk
-      const commentDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/comments'))
-      let diskProviders = []
-      for (let dir of commentDirs) {
+      const commentsDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/comments'))
+      WIKI.data.commentProviders = []
+      for (const dir of commentsDirs) {
         const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/comments', dir, 'definition.yml'), 'utf8')
-        diskProviders.push(yaml.safeLoad(def))
+        const defParsed = yaml.load(def)
+        defParsed.key = dir
+        defParsed.props = commonHelper.parseModuleProps(defParsed.props)
+        WIKI.data.commentProviders.push(defParsed)
+        WIKI.logger.debug(`Loaded comments provider module definition ${dir}: [ OK ]`)
       }
-      WIKI.data.commentProviders = diskProviders.map(provider => ({
-        ...provider,
-        props: commonHelper.parseModuleProps(provider.props)
-      }))
 
-      let newProviders = []
-      for (let provider of WIKI.data.commentProviders) {
-        if (!_.some(dbProviders, ['key', provider.key])) {
-          newProviders.push({
-            key: provider.key,
-            isEnabled: provider.key === 'default',
-            config: _.transform(provider.props, (result, value, key) => {
-              _.set(result, key, value.default)
-              return result
-            }, {})
-          })
-        } else {
-          const providerConfig = _.get(_.find(dbProviders, ['key', provider.key]), 'config', {})
-          await WIKI.models.commentProviders.query().patch({
-            config: _.transform(provider.props, (result, value, key) => {
-              if (!_.has(result, key)) {
-                _.set(result, key, value.default)
-              }
-              return result
-            }, providerConfig)
-          }).where('key', provider.key)
-        }
-      }
-      if (newProviders.length > 0) {
-        trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
-        for (let provider of newProviders) {
-          await WIKI.models.commentProviders.query(trx).insert(provider)
-        }
-        await trx.commit()
-        WIKI.logger.info(`Loaded ${newProviders.length} new comment providers: [ OK ]`)
-      } else {
-        WIKI.logger.info(`No new comment providers found: [ SKIPPED ]`)
-      }
+      WIKI.logger.info(`Loaded ${WIKI.data.commentProviders.length} comments providers module definitions: [ OK ]`)
     } catch (err) {
-      WIKI.logger.error(`Failed to scan or load new comment providers: [ FAILED ]`)
+      WIKI.logger.error(`Failed to scan or load comments providers: [ FAILED ]`)
       WIKI.logger.error(err)
-      if (trx) {
-        trx.rollback()
-      }
     }
   }
 
@@ -102,7 +64,7 @@ module.exports = class CommentProvider extends Model {
     const commentProvider = await WIKI.models.commentProviders.query().findOne('isEnabled', true)
     if (commentProvider) {
       WIKI.data.commentProvider = {
-        ..._.find(WIKI.data.commentProviders, ['key', commentProvider.key]),
+        ..._.find(WIKI.data.commentProviders, ['key', commentProvider.module]),
         head: '',
         bodyStart: '',
         bodyEnd: '',

+ 0 - 65
server/models/editors.js

@@ -1,9 +1,4 @@
 const Model = require('objection').Model
-const fs = require('fs-extra')
-const path = require('path')
-const _ = require('lodash')
-const yaml = require('js-yaml')
-const commonHelper = require('../helpers/common')
 
 /* global WIKI */
 
@@ -34,66 +29,6 @@ module.exports = class Editor extends Model {
     return WIKI.models.editors.query()
   }
 
-  static async refreshEditorsFromDisk() {
-    let trx
-    try {
-      const dbEditors = await WIKI.models.editors.query()
-
-      // -> Fetch definitions from disk
-      const editorDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/editor'))
-      let diskEditors = []
-      for (let dir of editorDirs) {
-        const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/editor', dir, 'definition.yml'), 'utf8')
-        diskEditors.push(yaml.safeLoad(def))
-      }
-      WIKI.data.editors = diskEditors.map(editor => ({
-        ...editor,
-        props: commonHelper.parseModuleProps(editor.props)
-      }))
-
-      // -> Insert new editors
-      let newEditors = []
-      for (let editor of WIKI.data.editors) {
-        if (!_.some(dbEditors, ['key', editor.key])) {
-          newEditors.push({
-            key: editor.key,
-            isEnabled: false,
-            config: _.transform(editor.props, (result, value, key) => {
-              _.set(result, key, value.default)
-              return result
-            }, {})
-          })
-        } else {
-          const editorConfig = _.get(_.find(dbEditors, ['key', editor.key]), 'config', {})
-          await WIKI.models.editors.query().patch({
-            config: _.transform(editor.props, (result, value, key) => {
-              if (!_.has(result, key)) {
-                _.set(result, key, value.default)
-              }
-              return result
-            }, editorConfig)
-          }).where('key', editor.key)
-        }
-      }
-      if (newEditors.length > 0) {
-        trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
-        for (let editor of newEditors) {
-          await WIKI.models.editors.query(trx).insert(editor)
-        }
-        await trx.commit()
-        WIKI.logger.info(`Loaded ${newEditors.length} new editors: [ OK ]`)
-      } else {
-        WIKI.logger.info(`No new editors found: [ SKIPPED ]`)
-      }
-    } catch (err) {
-      WIKI.logger.error(`Failed to scan or load new editors: [ FAILED ]`)
-      WIKI.logger.error(err)
-      if (trx) {
-        trx.rollback()
-      }
-    }
-  }
-
   static async getDefaultEditor(contentType) {
     // TODO - hardcoded for now
     switch (contentType) {

+ 19 - 36
server/models/pages.js

@@ -41,7 +41,7 @@ module.exports = class Page extends Model {
         hash: {type: 'string'},
         title: {type: 'string'},
         description: {type: 'string'},
-        isPublished: {type: 'boolean'},
+        publishState: {type: 'string'},
         privateNS: {type: 'string'},
         publishStartDate: {type: 'string'},
         publishEndDate: {type: 'string'},
@@ -96,14 +96,6 @@ module.exports = class Page extends Model {
           to: 'users.id'
         }
       },
-      editor: {
-        relation: Model.BelongsToOneRelation,
-        modelClass: require('./editors'),
-        join: {
-          from: 'pages.editorKey',
-          to: 'editors.key'
-        }
-      },
       locale: {
         relation: Model.BelongsToOneRelation,
         modelClass: require('./locales'),
@@ -143,16 +135,14 @@ module.exports = class Page extends Model {
       creatorId: 'uint',
       creatorName: 'string',
       description: 'string',
-      editorKey: 'string',
-      isPrivate: 'boolean',
-      isPublished: 'boolean',
+      editor: 'string',
+      publishState: 'string',
       publishEndDate: 'string',
       publishStartDate: 'string',
       render: 'string',
       tags: [
         {
-          tag: 'string',
-          title: 'string'
+          tag: 'string'
         }
       ],
       extra: {
@@ -301,10 +291,9 @@ module.exports = class Page extends Model {
       creatorId: opts.user.id,
       contentType: _.get(_.find(WIKI.data.editors, ['key', opts.editor]), `contentType`, 'text'),
       description: opts.description,
-      editorKey: opts.editor,
-      hash: pageHelper.generateHash({ path: opts.path, locale: opts.locale, privateNS: opts.isPrivate ? 'TODO' : '' }),
-      isPrivate: opts.isPrivate,
-      isPublished: opts.isPublished,
+      editor: opts.editor,
+      hash: pageHelper.generateHash({ path: opts.path, locale: opts.locale }),
+      publishState: opts.publishState,
       localeCode: opts.locale,
       path: opts.path,
       publishEndDate: opts.publishEndDate || '',
@@ -319,8 +308,7 @@ module.exports = class Page extends Model {
     const page = await WIKI.models.pages.getPageFromDb({
       path: opts.path,
       locale: opts.locale,
-      userId: opts.user.id,
-      isPrivate: opts.isPrivate
+      userId: opts.user.id
     })
 
     // -> Save Tags
@@ -389,7 +377,6 @@ module.exports = class Page extends Model {
     // -> Create version snapshot
     await WIKI.models.pageHistory.addVersion({
       ...ogPage,
-      isPublished: ogPage.isPublished === true || ogPage.isPublished === 1,
       action: opts.action ? opts.action : 'updated',
       versionDate: ogPage.updatedAt
     })
@@ -426,7 +413,7 @@ module.exports = class Page extends Model {
       authorId: opts.user.id,
       content: opts.content,
       description: opts.description,
-      isPublished: opts.isPublished === true || opts.isPublished === 1,
+      publishState: opts.publishState,
       publishEndDate: opts.publishEndDate || '',
       publishStartDate: opts.publishStartDate || '',
       title: opts.title,
@@ -500,7 +487,7 @@ module.exports = class Page extends Model {
       throw new Error('Invalid Page Id')
     }
 
-    if (ogPage.editorKey === opts.editor) {
+    if (ogPage.editor === opts.editor) {
       throw new Error('Page is already using this editor. Nothing to convert.')
     }
 
@@ -631,7 +618,6 @@ module.exports = class Page extends Model {
     if (shouldConvert) {
       await WIKI.models.pageHistory.addVersion({
         ...ogPage,
-        isPublished: ogPage.isPublished === true || ogPage.isPublished === 1,
         action: 'updated',
         versionDate: ogPage.updatedAt
       })
@@ -640,7 +626,7 @@ module.exports = class Page extends Model {
     // -> Update page
     await WIKI.models.pages.query().patch({
       contentType: targetContentType,
-      editorKey: opts.editor,
+      editor: opts.editor,
       ...(convertedContent ? { content: convertedContent } : {})
     }).where('id', ogPage.id)
     const page = await WIKI.models.pages.getPageFromDb(ogPage.id)
@@ -721,7 +707,7 @@ module.exports = class Page extends Model {
       versionDate: page.updatedAt
     })
 
-    const destinationHash = pageHelper.generateHash({ path: opts.destinationPath, locale: opts.destinationLocale, privateNS: opts.isPrivate ? 'TODO' : '' })
+    const destinationHash = pageHelper.generateHash({ path: opts.destinationPath, locale: opts.destinationLocale })
 
     // -> Move page
     const destinationTitle = (page.title === page.path ? opts.destinationPath : page.title)
@@ -991,9 +977,7 @@ module.exports = class Page extends Model {
           'pages.hash',
           'pages.title',
           'pages.description',
-          'pages.isPrivate',
-          'pages.isPublished',
-          'pages.privateNS',
+          'pages.publishState',
           'pages.publishStartDate',
           'pages.publishEndDate',
           'pages.content',
@@ -1002,7 +986,7 @@ module.exports = class Page extends Model {
           'pages.contentType',
           'pages.createdAt',
           'pages.updatedAt',
-          'pages.editorKey',
+          'pages.editor',
           'pages.localeCode',
           'pages.authorId',
           'pages.creatorId',
@@ -1018,7 +1002,7 @@ module.exports = class Page extends Model {
         .joinRelated('creator')
         .withGraphJoined('tags')
         .modifyGraph('tags', builder => {
-          builder.select('tag', 'title')
+          builder.select('tag')
         })
         .where(queryModeID ? {
           'pages.id': opts
@@ -1066,17 +1050,16 @@ module.exports = class Page extends Model {
       creatorId: page.creatorId,
       creatorName: page.creatorName,
       description: page.description,
-      editorKey: page.editorKey,
+      editor: page.editor,
       extra: {
         css: _.get(page, 'extra.css', ''),
         js: _.get(page, 'extra.js', '')
       },
-      isPrivate: page.isPrivate === 1 || page.isPrivate === true,
-      isPublished: page.isPublished === 1 || page.isPublished === true,
+      publishState: page.publishState,
       publishEndDate: page.publishEndDate,
       publishStartDate: page.publishStartDate,
       render: page.render,
-      tags: page.tags.map(t => _.pick(t, ['tag', 'title'])),
+      tags: page.tags.map(t => _.pick(t, ['tag'])),
       title: page.title,
       toc: _.isString(page.toc) ? page.toc : JSON.stringify(page.toc),
       updatedAt: page.updatedAt
@@ -1090,7 +1073,7 @@ module.exports = class Page extends Model {
    * @returns {Promise} Promise of the Page Model Instance
    */
   static async getPageFromCache(opts) {
-    const pageHash = pageHelper.generateHash({ path: opts.path, locale: opts.locale, privateNS: opts.isPrivate ? 'TODO' : '' })
+    const pageHash = pageHelper.generateHash({ path: opts.path, locale: opts.locale })
     const cachePath = path.resolve(WIKI.ROOTPATH, WIKI.config.dataPath, `cache/${pageHash}.bin`)
 
     try {

+ 23 - 65
server/models/renderers.js

@@ -13,15 +13,15 @@ const commonHelper = require('../helpers/common')
  */
 module.exports = class Renderer extends Model {
   static get tableName() { return 'renderers' }
-  static get idColumn() { return 'key' }
 
   static get jsonSchema () {
     return {
       type: 'object',
-      required: ['key', 'isEnabled'],
+      required: ['module', 'isEnabled'],
 
       properties: {
-        key: {type: 'string'},
+        id: {type: 'string'},
+        module: {type: 'string'},
         isEnabled: {type: 'boolean'}
       }
     }
@@ -36,77 +36,35 @@ module.exports = class Renderer extends Model {
   }
 
   static async fetchDefinitions() {
-    const rendererDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering'))
-    let diskRenderers = []
-    for (let dir of rendererDirs) {
-      const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8')
-      diskRenderers.push(yaml.safeLoad(def))
-    }
-    WIKI.data.renderers = diskRenderers.map(renderer => ({
-      ...renderer,
-      props: commonHelper.parseModuleProps(renderer.props)
-    }))
-  }
-
-  static async refreshRenderersFromDisk() {
-    let trx
     try {
-      const dbRenderers = await WIKI.models.renderers.query()
-
       // -> Fetch definitions from disk
-      await WIKI.models.renderers.fetchDefinitions()
-
-      // -> Insert new Renderers
-      let newRenderers = []
-      for (let renderer of WIKI.data.renderers) {
-        if (!_.some(dbRenderers, ['key', renderer.key])) {
-          newRenderers.push({
-            key: renderer.key,
-            isEnabled: _.get(renderer, 'enabledDefault', true),
-            config: _.transform(renderer.props, (result, value, key) => {
-              _.set(result, key, value.default)
-              return result
-            }, {})
-          })
-        } else {
-          const rendererConfig = _.get(_.find(dbRenderers, ['key', renderer.key]), 'config', {})
-          await WIKI.models.renderers.query().patch({
-            config: _.transform(renderer.props, (result, value, key) => {
-              if (!_.has(result, key)) {
-                _.set(result, key, value.default)
-              }
-              return result
-            }, rendererConfig)
-          }).where('key', renderer.key)
-        }
-      }
-      if (newRenderers.length > 0) {
-        trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
-        for (let renderer of newRenderers) {
-          await WIKI.models.renderers.query(trx).insert(renderer)
-        }
-        await trx.commit()
-        WIKI.logger.info(`Loaded ${newRenderers.length} new renderers: [ OK ]`)
-      } else {
-        WIKI.logger.info(`No new renderers found: [ SKIPPED ]`)
+      const renderersDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/rendering'))
+      WIKI.data.renderers = []
+      for (const dir of renderersDirs) {
+        const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/rendering', dir, 'definition.yml'), 'utf8')
+        const defParsed = yaml.load(def)
+        defParsed.key = dir
+        defParsed.props = commonHelper.parseModuleProps(defParsed.props)
+        WIKI.data.renderers.push(defParsed)
+        WIKI.logger.debug(`Loaded renderers module definition ${dir}: [ OK ]`)
       }
 
-      // -> Delete removed Renderers
-      for (const renderer of dbRenderers) {
-        if (!_.some(WIKI.data.renderers, ['key', renderer.key])) {
-          await WIKI.models.renderers.query().where('key', renderer.key).del()
-          WIKI.logger.info(`Removed renderer ${renderer.key} because it is no longer present in the modules folder: [ OK ]`)
-        }
-      }
+      WIKI.logger.info(`Loaded ${WIKI.data.renderers.length} renderers module definitions: [ OK ]`)
     } catch (err) {
-      WIKI.logger.error(`Failed to scan or load new renderers: [ FAILED ]`)
+      WIKI.logger.error(`Failed to scan or load renderers providers: [ FAILED ]`)
       WIKI.logger.error(err)
-      if (trx) {
-        trx.rollback()
-      }
     }
   }
 
+  static async refreshRenderersFromDisk() {
+    // const dbRenderers = await WIKI.models.renderers.query()
+
+    // -> Fetch definitions from disk
+    await WIKI.models.renderers.fetchDefinitions()
+
+    // TODO: Merge existing configs with updated modules
+  }
+
   static async getRenderingPipeline(contentType) {
     const renderersDb = await WIKI.models.renderers.query().where('isEnabled', true)
     if (renderersDb && renderersDb.length > 0) {

+ 108 - 0
server/models/sites.js

@@ -0,0 +1,108 @@
+const Model = require('objection').Model
+const crypto = require('crypto')
+const pem2jwk = require('pem-jwk').pem2jwk
+const _ = require('lodash')
+
+/* global WIKI */
+
+/**
+ * Site model
+ */
+module.exports = class Site extends Model {
+  static get tableName () { return 'sites' }
+
+  static get jsonSchema () {
+    return {
+      type: 'object',
+      required: ['hostname'],
+
+      properties: {
+        id: { type: 'string' },
+        hostname: { type: 'string' },
+        isEnabled: { type: 'boolean', default: false }
+      }
+    }
+  }
+
+  static get jsonAttributes () {
+    return ['config']
+  }
+
+  static async createSite (hostname, config) {
+    const newSite = await WIKI.models.sites.query().insertAndFetch({
+      hostname,
+      isEnabled: true,
+      config: _.defaultsDeep(config, {
+        title: 'My Wiki Site',
+        description: '',
+        company: '',
+        contentLicense: '',
+        defaults: {
+          timezone: 'America/New_York',
+          dateFormat: 'YYYY-MM-DD',
+          timeFormat: '12h'
+        },
+        features: {
+          ratings: false,
+          ratingsMode: 'off',
+          comments: false,
+          contributions: false,
+          profile: true,
+          search: true
+        },
+        logoUrl: '',
+        logoText: true,
+        robots: {
+          index: true,
+          follow: true
+        },
+        locale: 'en',
+        localeNamespacing: false,
+        localeNamespaces: [],
+        theme: {
+          dark: false,
+          colorPrimary: '#1976d2',
+          colorSecondary: '#02c39a',
+          colorAccent: '#f03a47',
+          colorHeader: '#000000',
+          colorSidebar: '#1976d2',
+          injectCSS: '',
+          injectHead: '',
+          injectBody: '',
+          sidebarPosition: 'left',
+          tocPosition: 'right',
+          showSharingMenu: true,
+          showPrintBtn: true
+        }
+      })
+    })
+
+    await WIKI.models.storage.query().insert({
+      module: 'db',
+      siteId: newSite.id,
+      isEnabled: true,
+      contentTypes: {
+        activeTypes: ['pages', 'images', 'documents', 'others', 'large'],
+        largeThreshold: '5MB'
+      },
+      assetDelivery: {
+        streaming: true,
+        directAccess: false
+      },
+      state: {
+        current: 'ok'
+      }
+    })
+
+    return newSite
+  }
+
+  static async updateSite (id, patch) {
+    return WIKI.models.sites.query().findById(id).patch(patch)
+  }
+
+  static async deleteSite (id) {
+    await WIKI.models.storage.query().delete().where('siteId', id)
+    return WIKI.models.sites.query().deleteById(id)
+  }
+}

+ 97 - 120
server/models/storage.js

@@ -17,93 +17,45 @@ module.exports = class Storage extends Model {
   static get jsonSchema () {
     return {
       type: 'object',
-      required: ['key', 'isEnabled'],
+      required: ['module', 'isEnabled', 'siteId'],
 
       properties: {
-        key: {type: 'string'},
+        module: {type: 'string'},
         isEnabled: {type: 'boolean'},
-        mode: {type: 'string'}
+        SVGAnimatedInteger: {type: 'string'}
       }
     }
   }
 
   static get jsonAttributes() {
-    return ['config', 'state']
+    return ['contentTypes', 'assetDelivery', 'versioning', 'schedule', 'config', 'state']
   }
 
-  static async getTargets() {
-    return WIKI.models.storage.query()
+  static async getTargets ({ siteId }) {
+    return WIKI.models.storage.query().where(builder => {
+      if (siteId) {
+        builder.where('siteId', siteId)
+      }
+    })
   }
 
-  static async refreshTargetsFromDisk() {
+  static async refreshTargetsFromDisk () {
     let trx
     try {
-      const dbTargets = await WIKI.models.storage.query()
-
       // -> Fetch definitions from disk
       const storageDirs = await fs.readdir(path.join(WIKI.SERVERPATH, 'modules/storage'))
-      let diskTargets = []
-      for (let dir of storageDirs) {
+      WIKI.storage.defs = []
+      for (const dir of storageDirs) {
         const def = await fs.readFile(path.join(WIKI.SERVERPATH, 'modules/storage', dir, 'definition.yml'), 'utf8')
-        diskTargets.push(yaml.safeLoad(def))
-      }
-      WIKI.data.storage = diskTargets.map(target => ({
-        ...target,
-        isAvailable: _.get(target, 'isAvailable', false),
-        props: commonHelper.parseModuleProps(target.props)
-      }))
-
-      // -> Insert new targets
-      let newTargets = []
-      for (let target of WIKI.data.storage) {
-        if (!_.some(dbTargets, ['key', target.key])) {
-          newTargets.push({
-            key: target.key,
-            isEnabled: false,
-            mode: target.defaultMode || 'push',
-            syncInterval: target.schedule || 'P0D',
-            config: _.transform(target.props, (result, value, key) => {
-              _.set(result, key, value.default)
-              return result
-            }, {}),
-            state: {
-              status: 'pending',
-              message: '',
-              lastAttempt: null
-            }
-          })
-        } else {
-          const targetConfig = _.get(_.find(dbTargets, ['key', target.key]), 'config', {})
-          await WIKI.models.storage.query().patch({
-            config: _.transform(target.props, (result, value, key) => {
-              if (!_.has(result, key)) {
-                _.set(result, key, value.default)
-              }
-              return result
-            }, targetConfig)
-          }).where('key', target.key)
-        }
-      }
-      if (newTargets.length > 0) {
-        trx = await WIKI.models.Objection.transaction.start(WIKI.models.knex)
-        for (let target of newTargets) {
-          await WIKI.models.storage.query(trx).insert(target)
-        }
-        await trx.commit()
-        WIKI.logger.info(`Loaded ${newTargets.length} new storage targets: [ OK ]`)
-      } else {
-        WIKI.logger.info(`No new storage targets found: [ SKIPPED ]`)
-      }
-
-      // -> Delete removed targets
-      for (const target of dbTargets) {
-        if (!_.some(WIKI.data.storage, ['key', target.key])) {
-          await WIKI.models.storage.query().where('key', target.key).del()
-          WIKI.logger.info(`Removed target ${target.key} because it is no longer present in the modules folder: [ OK ]`)
-        }
+        const defParsed = yaml.load(def)
+        defParsed.key = dir
+        defParsed.isLoaded = false
+        WIKI.storage.defs.push(defParsed)
+        WIKI.logger.debug(`Loaded storage module definition ${dir}: [ OK ]`)
       }
+      WIKI.logger.info(`Loaded ${WIKI.storage.defs.length} storage module definitions: [ OK ]`)
     } catch (err) {
-      WIKI.logger.error(`Failed to scan or load new storage providers: [ FAILED ]`)
+      WIKI.logger.error('Failed to scan or load new storage providers: [ FAILED ]')
       WIKI.logger.error(err)
       if (trx) {
         trx.rollback()
@@ -111,66 +63,91 @@ module.exports = class Storage extends Model {
     }
   }
 
+  /**
+ * Ensure a storage module is loaded
+ */
+  static async ensureModule (moduleName) {
+    if (!_.has(WIKI.storage.modules, moduleName)) {
+      try {
+        WIKI.storage.modules[moduleName] = require(`../modules/storage/${moduleName}/storage`)
+        WIKI.logger.debug(`Activated storage module ${moduleName}: [ OK ]`)
+        return true
+      } catch (err) {
+        WIKI.logger.warn(`Failed to load storage module ${moduleName}: [ FAILED ]`)
+        WIKI.logger.warn(err)
+        return false
+      }
+    } else {
+      return true
+    }
+  }
+
   /**
    * Initialize active storage targets
    */
-  static async initTargets() {
-    this.targets = await WIKI.models.storage.query().where('isEnabled', true).orderBy('key')
+  static async initTargets () {
+    const dbTargets = await WIKI.models.storage.query().where('isEnabled', true)
+    const activeModules = _.uniq(dbTargets.map(t => t.module))
     try {
       // -> Stop and delete existing jobs
-      const prevjobs = _.remove(WIKI.scheduler.jobs, job => job.name === `sync-storage`)
-      if (prevjobs.length > 0) {
-        prevjobs.forEach(job => job.stop())
+      // const prevjobs = _.remove(WIKI.scheduler.jobs, job => job.name === 'sync-storage')
+      // if (prevjobs.length > 0) {
+      //   prevjobs.forEach(job => job.stop())
+      // }
+
+      // -> Load active modules
+      for (const md of activeModules) {
+        this.ensureModule(md)
       }
 
       // -> Initialize targets
-      for (let target of this.targets) {
-        const targetDef = _.find(WIKI.data.storage, ['key', target.key])
-        target.fn = require(`../modules/storage/${target.key}/storage`)
-        target.fn.config = target.config
-        target.fn.mode = target.mode
-        try {
-          await target.fn.init()
-
-          // -> Save succeeded init state
-          await WIKI.models.storage.query().patch({
-            state: {
-              status: 'operational',
-              message: '',
-              lastAttempt: new Date().toISOString()
-            }
-          }).where('key', target.key)
-
-          // -> Set recurring sync job
-          if (targetDef.schedule && target.syncInterval !== `P0D`) {
-            WIKI.scheduler.registerJob({
-              name: `sync-storage`,
-              immediate: false,
-              schedule: target.syncInterval,
-              repeat: true
-            }, target.key)
-          }
-
-          // -> Set internal recurring sync job
-          if (targetDef.internalSchedule && targetDef.internalSchedule !== `P0D`) {
-            WIKI.scheduler.registerJob({
-              name: `sync-storage`,
-              immediate: false,
-              schedule: target.internalSchedule,
-              repeat: true
-            }, target.key)
-          }
-        } catch (err) {
-          // -> Save initialization error
-          await WIKI.models.storage.query().patch({
-            state: {
-              status: 'error',
-              message: err.message,
-              lastAttempt: new Date().toISOString()
-            }
-          }).where('key', target.key)
-        }
-      }
+      // for (const target of this.targets) {
+      //   const targetDef = _.find(WIKI.data.storage, ['key', target.key])
+      //   target.fn = require(`../modules/storage/${target.key}/storage`)
+      //   target.fn.config = target.config
+      //   target.fn.mode = target.mode
+      //   try {
+      //     await target.fn.init()
+
+      //     // -> Save succeeded init state
+      //     await WIKI.models.storage.query().patch({
+      //       state: {
+      //         status: 'operational',
+      //         message: '',
+      //         lastAttempt: new Date().toISOString()
+      //       }
+      //     }).where('key', target.key)
+
+      //     // -> Set recurring sync job
+      //     if (targetDef.schedule && target.syncInterval !== 'P0D') {
+      //       WIKI.scheduler.registerJob({
+      //         name: 'sync-storage',
+      //         immediate: false,
+      //         schedule: target.syncInterval,
+      //         repeat: true
+      //       }, target.key)
+      //     }
+
+      //     // -> Set internal recurring sync job
+      //     if (targetDef.internalSchedule && targetDef.internalSchedule !== 'P0D') {
+      //       WIKI.scheduler.registerJob({
+      //         name: 'sync-storage',
+      //         immediate: false,
+      //         schedule: target.internalSchedule,
+      //         repeat: true
+      //       }, target.key)
+      //     }
+      //   } catch (err) {
+      //     // -> Save initialization error
+      //     await WIKI.models.storage.query().patch({
+      //       state: {
+      //         status: 'error',
+      //         message: err.message,
+      //         lastAttempt: new Date().toISOString()
+      //       }
+      //     }).where('key', target.key)
+      //   }
+      // }
     } catch (err) {
       WIKI.logger.warn(err)
       throw err

+ 1 - 5
server/models/tags.js

@@ -17,7 +17,6 @@ module.exports = class Tag extends Model {
       properties: {
         id: {type: 'integer'},
         tag: {type: 'string'},
-        title: {type: 'string'},
 
         createdAt: {type: 'string'},
         updatedAt: {type: 'string'}
@@ -59,10 +58,7 @@ module.exports = class Tag extends Model {
 
     // Create missing tags
 
-    const newTags = _.filter(tags, t => !_.some(existingTags, ['tag', t])).map(t => ({
-      tag: t,
-      title: t
-    }))
+    const newTags = _.filter(tags, t => !_.some(existingTags, ['tag', t])).map(t => ({ tag: t }))
     if (newTags.length > 0) {
       if (WIKI.config.db.type === 'postgres') {
         const createdTags = await WIKI.models.tags.query().insert(newTags)

+ 3 - 3
server/models/users.js

@@ -861,7 +861,7 @@ module.exports = class User extends Model {
    * Logout the current user
    */
   static async logout (context) {
-    if (!context.req.user || context.req.user.id === 2) {
+    if (!context.req.user || context.req.user.id === WIKI.config.auth.guestUserId) {
       return '/'
     }
     const usr = await WIKI.models.users.query().findById(context.req.user.id).select('providerKey')
@@ -870,7 +870,7 @@ module.exports = class User extends Model {
   }
 
   static async getGuestUser () {
-    const user = await WIKI.models.users.query().findById(2).withGraphJoined('groups').modifyGraph('groups', builder => {
+    const user = await WIKI.models.users.query().findById(WIKI.config.auth.guestUserId).withGraphJoined('groups').modifyGraph('groups', builder => {
       builder.select('groups.id', 'permissions')
     })
     if (!user) {
@@ -882,7 +882,7 @@ module.exports = class User extends Model {
   }
 
   static async getRootUser () {
-    let user = await WIKI.models.users.query().findById(1)
+    let user = await WIKI.models.users.query().findById(WIKI.config.auth.rootAdminUserId)
     if (!user) {
       WIKI.logger.error('CRITICAL ERROR: Root Administrator user is missing!')
       process.exit(1)

+ 0 - 6
server/modules/editor/api/definition.yml

@@ -1,6 +0,0 @@
-key: api
-title: API Docs
-description: REST / GraphQL Editor
-contentType: yml
-author: requarks.io
-props: {}

+ 0 - 6
server/modules/editor/ckeditor/definition.yml

@@ -1,6 +0,0 @@
-key: ckeditor
-title: Visual Editor
-description: Rich-text WYSIWYG Editor
-contentType: html
-author: requarks.io
-props: {}

+ 0 - 6
server/modules/editor/code/definition.yml

@@ -1,6 +0,0 @@
-key: code
-title: Code
-description: Raw HTML editor
-contentType: html
-author: requarks.io
-props: {}

+ 0 - 6
server/modules/editor/markdown/definition.yml

@@ -1,6 +0,0 @@
-key: markdown
-title: Markdown
-description: Basic Markdown editor
-contentType: markdown
-author: requarks.io
-props: {}

+ 0 - 6
server/modules/editor/redirect/definition.yml

@@ -1,6 +0,0 @@
-key: redirect
-title: Redirection
-description: Redirect the user
-contentType: redirect
-author: requarks.io
-props: {}

+ 0 - 6
server/modules/editor/wysiwyg/definition.yml

@@ -1,6 +0,0 @@
-key: wysiwyg
-title: WYSIWYG
-description: Advanced Visual HTML Builder
-contentType: html
-author: requarks.io
-props: {}

+ 27 - 15
server/modules/storage/azure/definition.yml

@@ -1,44 +1,56 @@
-key: azure
 title: Azure Blob Storage
-description: Azure Blob Storage by Microsoft provides massively scalable object storage for unstructured data.
-author: requarks.io
-logo: https://static.requarks.io/logo/azure.svg
-website: https://azure.microsoft.com/services/storage/blobs/
-isAvailable: true
-supportedModes:
-  - push
-defaultMode: push
-schedule: false
+icon: '/_assets/icons/ultraviolet-azure.svg'
+banner: '/_assets/storage/azure.jpg'
+description: Azure Blob Storage is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data.
+vendor: Microsoft Corporation
+website: 'https://azure.microsoft.com'
+assetDelivery:
+  isStreamingSupported: true
+  isDirectAccessSupported: true
+  defaultStreamingEnabled: true
+  defaultDirectAccessEnabled: true
+contentTypes:
+  defaultTypesEnabled: ['images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: false
+  defaultEnabled: false
+sync: false
 props:
   accountName:
     type: String
     title: Account Name
     default: ''
     hint: Your unique account name.
+    icon: 3d-touch
     order: 1
   accountKey:
     type: String
     title: Account Access Key
     default: ''
     hint: Either key 1 or key 2.
+    icon: key
     sensitive: true
     order: 2
   containerName:
     type: String
     title: Container Name
-    default: 'wiki'
+    default: wiki
     hint: Will automatically be created if it doesn't exist yet.
+    icon: shipping-container
     order: 3
   storageTier:
     type: String
     title: Storage Tier
     hint: Represents the access tier on a blob. Use Cool for lower storage costs but at higher retrieval costs.
+    icon: scan-stock
     order: 4
-    default: 'Cool'
+    default: cool
     enum:
-        - 'Hot'
-        - 'Cool'
+        - hot|Hot
+        - cool|Cool
 actions:
   - handler: exportAll
-    label: Export All
+    label: Export All DB Assets to Azure
     hint: Output all content from the DB to Azure Blog Storage, overwriting any existing data. If you enabled Azure Blog Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
+    icon: this-way-up

+ 0 - 10
server/modules/storage/box/definition.yml

@@ -1,10 +0,0 @@
-key: box
-title: Box
-description: Box is a cloud content management and file sharing service for businesses.
-author: requarks.io
-logo: https://static.requarks.io/logo/box.svg
-website: https://www.box.com/platform
-props:
-  clientId: String
-  clientSecret: String
-  rootFolder: String

+ 0 - 26
server/modules/storage/box/storage.js

@@ -1,26 +0,0 @@
-module.exports = {
-  async activated() {
-
-  },
-  async deactivated() {
-
-  },
-  async init() {
-
-  },
-  async created() {
-
-  },
-  async updated() {
-
-  },
-  async deleted() {
-
-  },
-  async renamed() {
-
-  },
-  async getLocalLocation () {
-
-  }
-}

+ 25 - 0
server/modules/storage/db/definition.yml

@@ -0,0 +1,25 @@
+title: 'Database'
+icon: '/_assets/icons/ultraviolet-database.svg'
+banner: '/_assets/storage/database.jpg'
+description: 'The local PostgreSQL database can store any assets. It is however not recommended to store large files directly in the database as this can cause performance issues.'
+vendor: 'Wiki.js'
+website: 'https://js.wiki'
+assetDelivery:
+  isStreamingSupported: true
+  isDirectAccessSupported: false
+  defaultStreamingEnabled: true
+  defaultDirectAccessEnabled: false
+contentTypes:
+  defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: true
+  defaultEnabled: false
+sync: false
+props: {}
+actions:
+  - handler: purge
+    label: Purge All Assets
+    hint: Delete all asset data from the database (not the metadata). Useful if you moved assets to another storage target and want to reduce the size of the database.
+    warn: This is a destructive action! Make sure all asset files are properly stored on another storage module! This action cannot be undone!
+    icon: explosion

+ 14 - 0
server/modules/storage/db/storage.js

@@ -0,0 +1,14 @@
+module.exports = {
+  async activated () { },
+  async deactivated () { },
+  async init () { },
+  async created (page) { },
+  async updated (page) { },
+  async deleted (page) { },
+  async renamed (page) { },
+  async assetUploaded (asset) { },
+  async assetDeleted (asset) { },
+  async assetRenamed (asset) { },
+  async getLocalLocation () { },
+  async exportAll () { }
+}

+ 0 - 45
server/modules/storage/digitalocean/definition.yml

@@ -1,45 +0,0 @@
-key: digitalocean
-title: DigitalOcean Spaces
-description: DigitalOcean provides developers and businesses a reliable, easy-to-use cloud computing platform of virtual servers (Droplets), object storage (Spaces) and more.
-author: andrewsim
-logo: https://static.requarks.io/logo/digitalocean.svg
-website: https://www.digitalocean.com/products/spaces/
-isAvailable: true
-supportedModes:
-  - push
-defaultMode: push
-schedule: false
-props:
-  endpoint:
-    type: String
-    title: Endpoint
-    hint: The DigitalOcean spaces endpoint that has the form ${REGION}.digitaloceanspaces.com
-    default: nyc3.digitaloceanspaces.com
-    enum:
-      - ams3.digitaloceanspaces.com
-      - fra1.digitaloceanspaces.com
-      - nyc3.digitaloceanspaces.com
-      - sfo2.digitaloceanspaces.com
-      - sgp1.digitaloceanspaces.com
-    order: 1
-  bucket:
-    type: String
-    title: Space Unique Name
-    hint: The unique space name to create (e.g. wiki-johndoe)
-    order: 2
-  accessKeyId:
-    type: String
-    title: Access Key ID
-    hint: The Access Key (Generated in API > Tokens/Keys > Spaces access keys).
-    order: 3
-  secretAccessKey :
-    type: String
-    title: Access Key Secret
-    hint: The Access Key Secret for the Access Key ID you created above.
-    sensitive: true
-    order: 4
-actions:
-  - handler: exportAll
-    label: Export All
-    hint: Output all content from the DB to DigitalOcean Spaces, overwriting any existing data. If you enabled DigitalOcean Spaces after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
-

+ 0 - 3
server/modules/storage/digitalocean/storage.js

@@ -1,3 +0,0 @@
-const S3CompatibleStorage = require('../s3/common')
-
-module.exports = new S3CompatibleStorage('Digitalocean')

+ 22 - 10
server/modules/storage/disk/definition.yml

@@ -1,34 +1,46 @@
-key: disk
 title: Local File System
-description: Local storage on disk or network shares.
-author: requarks.io
-logo: https://static.requarks.io/logo/local-fs.svg
-website: https://wiki.js.org
-isAvailable: true
-supportedModes:
-  - push
-defaultMode: push
-schedule: false
+icon: '/_assets/icons/ultraviolet-hdd.svg'
+banner: '/_assets/storage/disk.jpg'
+description: Store files on the local file system or over network attached storage. Note that you must use replicated storage if using high-availability instances.
+vendor: Wiki.js
+website: 'https://js.wiki'
+assetDelivery:
+  isStreamingSupported: true
+  isDirectAccessSupported: false
+  defaultStreamingEnabled: true
+  defaultDirectAccessEnabled: false
+contentTypes:
+  defaultTypesEnabled: ['images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: false
+  defaultEnabled: false
+sync: false
 internalSchedule: P1D
 props:
   path:
     type: String
     title: Path
     hint: Absolute path without a trailing slash (e.g. /home/wiki/backup, C:\wiki\backup)
+    icon: symlink-directory
     order: 1
   createDailyBackups:
     type: Boolean
     default: false
     title: Create Daily Backups
     hint: A tar.gz archive containing all content will be created daily in subfolder named _daily. Archives are kept for a month.
+    icon: archive-folder
     order: 2
 actions:
   - handler: dump
     label: Dump all content to disk
     hint: Output all content from the DB to the local disk. If you enabled this module after content was created or you temporarily disabled this module, you'll want to execute this action to add the missing files.
+    icon: downloads
   - handler: backup
     label: Create Backup
     hint: Will create a manual backup archive at this point in time, in a subfolder named _manual, from the contents currently on disk.
+    icon: archive-folder
   - handler: importAll
     label: Import Everything
     hint: Will import all content currently in the local disk folder.
+    icon: database-daily-import

+ 1 - 4
server/modules/storage/disk/storage.js

@@ -127,15 +127,12 @@ module.exports = {
 
     // -> Pages
     await pipeline(
-      WIKI.models.knex.column('id', 'path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt', 'editorKey').select().from('pages').where({
+      WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
         isPrivate: false
       }).stream(),
       new stream.Transform({
         objectMode: true,
         transform: async (page, enc, cb) => {
-          const pageObject = await WIKI.models.pages.query().findById(page.id)
-          page.tags = await pageObject.$relatedQuery('tags')
-          
           let fileName = `${page.path}.${pageHelper.getFileExtension(page.contentType)}`
           if (WIKI.config.lang.code !== page.localeCode) {
             fileName = `${page.localeCode}/${fileName}`

+ 0 - 9
server/modules/storage/dropbox/definition.yml

@@ -1,9 +0,0 @@
-key: dropbox
-title: Dropbox
-description: Dropbox is a file hosting service that offers cloud storage, file synchronization, personal cloud, and client software.
-author: requarks.io
-logo: https://static.requarks.io/logo/dropbox.svg
-website: https://dropbox.com
-props:
-  appKey: String
-  appSecret: String

+ 0 - 26
server/modules/storage/dropbox/storage.js

@@ -1,26 +0,0 @@
-module.exports = {
-  async activated() {
-
-  },
-  async deactivated() {
-
-  },
-  async init() {
-
-  },
-  async created() {
-
-  },
-  async updated() {
-
-  },
-  async deleted() {
-
-  },
-  async renamed() {
-
-  },
-  async getLocalLocation () {
-
-  }
-}

+ 65 - 0
server/modules/storage/gcs/definition.yml

@@ -0,0 +1,65 @@
+title: Google Cloud Storage
+icon: '/_assets/icons/ultraviolet-google.svg'
+banner: '/_assets/storage/gcs.jpg'
+description: Google Cloud Storage is an online file storage web service for storing and accessing data on Google Cloud Platform infrastructure.
+vendor: Alphabet Inc.
+website: 'https://cloud.google.com'
+assetDelivery:
+  isStreamingSupported: true
+  isDirectAccessSupported: true
+  defaultStreamingEnabled: true
+  defaultDirectAccessEnabled: true
+contentTypes:
+  defaultTypesEnabled: ['images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: false
+  defaultEnabled: false
+sync: false
+props:
+  accountName:
+    type: String
+    title: Project ID
+    hint: The project ID from the Google Developer's Console (e.g. grape-spaceship-123).
+    icon: 3d-touch
+    default: ''
+    order: 1
+  credentialsJSON:
+    type: String
+    title: JSON Credentials
+    hint: Contents of the JSON credentials file for the service account having Cloud Storage permissions.
+    icon: key
+    default: ''
+    multiline: true
+    sensitive: true
+    order: 2
+  bucket:
+    type: String
+    title: Unique bucket name
+    hint: The unique bucket name to create (e.g. wiki-johndoe).
+    icon: open-box
+    order: 3
+  storageTier:
+    type: String
+    title: Storage Tier
+    hint: Select the storage class to use when uploading new assets.
+    icon: scan-stock
+    order: 4
+    default: STANDARD
+    enum:
+      - STANDARD|Standard
+      - NEARLINE|Nearline
+      - COLDLINE|Coldline
+      - ARCHIVE|Archive
+  apiEndpoint:
+    type: String
+    title: API Endpoint
+    hint: The API endpoint of the service used to make requests.
+    icon: api
+    default: storage.google.com
+    order: 5
+actions:
+  - handler: exportAll
+    label: Export All DB Assets to GCS
+    hint: Output all content from the DB to Google Cloud Storage, overwriting any existing data. If you enabled Google Cloud Storage after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
+    icon: this-way-up

+ 164 - 0
server/modules/storage/gcs/storage.js

@@ -0,0 +1,164 @@
+const { BlobServiceClient, StorageSharedKeyCredential } = require('@azure/storage-blob')
+const stream = require('stream')
+const Promise = require('bluebird')
+const pipeline = Promise.promisify(stream.pipeline)
+const pageHelper = require('../../../helpers/page.js')
+const _ = require('lodash')
+
+/* global WIKI */
+
+const getFilePath = (page, pathKey) => {
+  const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
+  const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
+  return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
+}
+
+module.exports = {
+  async activated() {
+
+  },
+  async deactivated() {
+
+  },
+  async init() {
+    WIKI.logger.info(`(STORAGE/AZURE) Initializing...`)
+    const { accountName, accountKey, containerName } = this.config
+    this.client = new BlobServiceClient(
+      `https://${accountName}.blob.core.windows.net`,
+      new StorageSharedKeyCredential(accountName, accountKey)
+    )
+    this.container = this.client.getContainerClient(containerName)
+    try {
+      await this.container.create()
+    } catch (err) {
+      if (err.statusCode !== 409) {
+        WIKI.logger.warn(err)
+        throw err
+      }
+    }
+    WIKI.logger.info(`(STORAGE/AZURE) Initialization completed.`)
+  },
+  async created (page) {
+    WIKI.logger.info(`(STORAGE/AZURE) Creating file ${page.path}...`)
+    const filePath = getFilePath(page, 'path')
+    const pageContent = page.injectMetadata()
+    const blockBlobClient = this.container.getBlockBlobClient(filePath)
+    await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
+  },
+  async updated (page) {
+    WIKI.logger.info(`(STORAGE/AZURE) Updating file ${page.path}...`)
+    const filePath = getFilePath(page, 'path')
+    const pageContent = page.injectMetadata()
+    const blockBlobClient = this.container.getBlockBlobClient(filePath)
+    await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
+  },
+  async deleted (page) {
+    WIKI.logger.info(`(STORAGE/AZURE) Deleting file ${page.path}...`)
+    const filePath = getFilePath(page, 'path')
+    const blockBlobClient = this.container.getBlockBlobClient(filePath)
+    await blockBlobClient.delete({
+      deleteSnapshots: 'include'
+    })
+  },
+  async renamed(page) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
+    let sourceFilePath = getFilePath(page, 'path')
+    let destinationFilePath = getFilePath(page, 'destinationPath')
+    if (WIKI.config.lang.namespacing) {
+      if (WIKI.config.lang.code !== page.localeCode) {
+        sourceFilePath = `${page.localeCode}/${sourceFilePath}`
+      }
+      if (WIKI.config.lang.code !== page.destinationLocaleCode) {
+        destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
+      }
+    }
+    const sourceBlockBlobClient = this.container.getBlockBlobClient(sourceFilePath)
+    const destBlockBlobClient = this.container.getBlockBlobClient(destinationFilePath)
+    await destBlockBlobClient.syncCopyFromURL(sourceBlockBlobClient.url)
+    await sourceBlockBlobClient.delete({
+      deleteSnapshots: 'include'
+    })
+  },
+  /**
+   * ASSET UPLOAD
+   *
+   * @param {Object} asset Asset to upload
+   */
+  async assetUploaded (asset) {
+    WIKI.logger.info(`(STORAGE/AZURE) Creating new file ${asset.path}...`)
+    const blockBlobClient = this.container.getBlockBlobClient(asset.path)
+    await blockBlobClient.upload(asset.data, asset.data.length, { tier: this.config.storageTier })
+  },
+  /**
+   * ASSET DELETE
+   *
+   * @param {Object} asset Asset to delete
+   */
+  async assetDeleted (asset) {
+    WIKI.logger.info(`(STORAGE/AZURE) Deleting file ${asset.path}...`)
+    const blockBlobClient = this.container.getBlockBlobClient(asset.path)
+    await blockBlobClient.delete({
+      deleteSnapshots: 'include'
+    })
+  },
+  /**
+   * ASSET RENAME
+   *
+   * @param {Object} asset Asset to rename
+   */
+  async assetRenamed (asset) {
+    WIKI.logger.info(`(STORAGE/AZURE) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
+    const sourceBlockBlobClient = this.container.getBlockBlobClient(asset.path)
+    const destBlockBlobClient = this.container.getBlockBlobClient(asset.destinationPath)
+    await destBlockBlobClient.syncCopyFromURL(sourceBlockBlobClient.url)
+    await sourceBlockBlobClient.delete({
+      deleteSnapshots: 'include'
+    })
+  },
+  async getLocalLocation () {
+
+  },
+  /**
+   * HANDLERS
+   */
+  async exportAll() {
+    WIKI.logger.info(`(STORAGE/AZURE) Exporting all content to Azure Blob Storage...`)
+
+    // -> Pages
+    await pipeline(
+      WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
+        isPrivate: false
+      }).stream(),
+      new stream.Transform({
+        objectMode: true,
+        transform: async (page, enc, cb) => {
+          const filePath = getFilePath(page, 'path')
+          WIKI.logger.info(`(STORAGE/AZURE) Adding page ${filePath}...`)
+          const pageContent = pageHelper.injectPageMetadata(page)
+          const blockBlobClient = this.container.getBlockBlobClient(filePath)
+          await blockBlobClient.upload(pageContent, pageContent.length, { tier: this.config.storageTier })
+          cb()
+        }
+      })
+    )
+
+    // -> Assets
+    const assetFolders = await WIKI.models.assetFolders.getAllPaths()
+
+    await pipeline(
+      WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
+      new stream.Transform({
+        objectMode: true,
+        transform: async (asset, enc, cb) => {
+          const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
+          WIKI.logger.info(`(STORAGE/AZURE) Adding asset ${filename}...`)
+          const blockBlobClient = this.container.getBlockBlobClient(filename)
+          await blockBlobClient.upload(asset.data, asset.data.length, { tier: this.config.storageTier })
+          cb()
+        }
+      })
+    )
+
+    WIKI.logger.info('(STORAGE/AZURE) All content has been pushed to Azure Blob Storage.')
+  }
+}

+ 0 - 9
server/modules/storage/gdrive/definition.yml

@@ -1,9 +0,0 @@
-key: gdrive
-title: Google Drive
-description: Google Drive is a file storage and synchronization service developed by Google.
-author: requarks.io
-logo: https://static.requarks.io/logo/google-drive.svg
-website: https://www.google.com/drive/
-props:
-  clientId: String
-  clientSecret: String

+ 0 - 26
server/modules/storage/gdrive/storage.js

@@ -1,26 +0,0 @@
-module.exports = {
-  async activated() {
-
-  },
-  async deactivated() {
-
-  },
-  async init() {
-
-  },
-  async created() {
-
-  },
-  async updated() {
-
-  },
-  async deleted() {
-
-  },
-  async renamed() {
-
-  },
-  async getLocalLocation () {
-
-  }
-}

+ 70 - 27
server/modules/storage/git/definition.yml

@@ -1,108 +1,151 @@
-key: git
 title: Git
-description: Git is a version control system for tracking changes in computer files and coordinating work on those files among multiple people.
-author: requarks.io
-logo: https://static.requarks.io/logo/git-alt.svg
-website: https://git-scm.com/
-isAvailable: true
-supportedModes:
-  - sync
-  - push
-  - pull
-defaultMode: sync
-schedule: PT5M
+icon: '/_assets/icons/ultraviolet-git.svg'
+banner: '/_assets/storage/git.jpg'
+description: Git is a version control system for tracking changes in computer files and coordinating work on those files among multiple people. If using GitHub, use the GitHub module instead!
+vendor: Software Freedom Conservancy, Inc.
+website: 'https://git-scm.com'
+assetDelivery:
+  isStreamingSupported: true
+  isDirectAccessSupported: false
+  defaultStreamingEnabled: true
+  defaultDirectAccessEnabled: false
+contentTypes:
+  defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: true
+  defaultEnabled: true
+  isForceEnabled: true
+sync:
+  supportedModes:
+    - sync
+    - push
+    - pull
+  defaultMode: sync
+  schedule: PT5M
 props:
   authType:
     type: String
     default: 'ssh'
     title: Authentication Type
     hint: Use SSH for maximum security.
+    icon: security-configuration
     enum:
-      - 'basic'
-      - 'ssh'
+      - basic|Basic
+      - ssh|SSH
+    enumDisplay: buttons
     order: 1
   repoUrl:
     type: String
     title: Repository URI
-    hint: Git-compliant URI (e.g. git@github.com:org/repo.git for ssh, https://github.com/org/repo.git for basic)
+    hint: Git-compliant URI (e.g. git@server.com:org/repo.git for ssh, https://server.com/org/repo.git for basic)
+    icon: dns
     order: 2
   branch:
     type: String
-    default: 'master'
+    default: 'main'
+    title: Branch
     hint: The branch to use during pull / push
+    icon: code-fork
     order: 3
   sshPrivateKeyMode:
     type: String
     title: SSH Private Key Mode
-    hint: SSH Authentication Only - The mode to use to load the private key. Fill in the corresponding field below.
+    hint: The mode to use to load the private key. Fill in the corresponding field below.
+    icon: grand-master-key
     order: 11
-    default: 'path'
+    default: inline
     enum:
-        - 'path'
-        - 'contents'
+        - path|File Path
+        - inline|Inline Contents
+    enumDisplay: buttons
+    if:
+      - { key: 'authType', eq: 'ssh' }
   sshPrivateKeyPath:
     type: String
-    title: A - SSH Private Key Path
-    hint: SSH Authentication Only - Absolute path to the key. The key must NOT be passphrase-protected. Mode must be set to path to use this option.
+    title: SSH Private Key Path
+    hint: Absolute path to the key. The key must NOT be passphrase-protected.
+    icon: key
     order: 12
+    if:
+      - { key: 'authType', eq: 'ssh' }
+      - { key: 'sshPrivateKeyMode', eq: 'path' }
   sshPrivateKeyContent:
     type: String
-    title: B - SSH Private Key Contents
-    hint: SSH Authentication Only - Paste the contents of the private key. The key must NOT be passphrase-protected. Mode must be set to contents to use this option.
+    title: SSH Private Key Contents
+    hint: Paste the contents of the private key. The key must NOT be passphrase-protected.
+    icon: key
     multiline: true
     sensitive: true
     order: 13
+    if:
+      - { key: 'sshPrivateKeyMode', eq: 'inline' }
   verifySSL:
     type: Boolean
     default: true
     title: Verify SSL Certificate
     hint: Some hosts requires SSL certificate checking to be disabled. Leave enabled for proper security.
+    icon: security-ssl
     order: 14
   basicUsername:
     type: String
     title: Username
     hint: Basic Authentication Only
+    icon: test-account
     order: 20
+    if:
+      - { key: 'authType', eq: 'basic' }
   basicPassword:
     type: String
     title: Password / PAT
     hint: Basic Authentication Only
+    icon: password
     sensitive: true
     order: 21
+    if:
+      - { key: 'authType', eq: 'basic' }
   defaultEmail:
     type: String
     title: Default Author Email
     default: 'name@company.com'
     hint: 'Used as fallback in case the author of the change is not present.'
-    order: 22
+    icon: email
+    order: 30
   defaultName:
     type: String
     title: Default Author Name
     default: 'John Smith'
     hint: 'Used as fallback in case the author of the change is not present.'
-    order: 23
+    icon: customer
+    order: 31
   localRepoPath:
     type: String
     title: Local Repository Path
     default: './data/repo'
     hint: 'Path where the local git repository will be created.'
-    order: 30
+    icon: symlink-directory
+    order: 32
   gitBinaryPath:
     type: String
     title: Git Binary Path
     default: ''
     hint: Optional - Absolute path to the Git binary, when not available in PATH. Leave empty to use the default PATH location (recommended).
+    icon: run-command
     order: 50
 actions:
   - handler: syncUntracked
     label: Add Untracked Changes
     hint: Output all content from the DB to the local Git repository to ensure all untracked content is saved. If you enabled Git after content was created or you temporarily disabled Git, you'll want to execute this action to add the missing untracked changes.
+    icon: database-daily-export
   - handler: sync
     label: Force Sync
     hint: Will trigger an immediate sync operation, regardless of the current sync schedule. The sync direction is respected.
+    icon: synchronize
   - handler: importAll
     label: Import Everything
     hint: Will import all content currently in the local Git repository, regardless of the latest commit state. Useful for importing content from the remote repository created before git was enabled.
+    icon: database-daily-import
   - handler: purge
     label: Purge Local Repository
     hint: If you have unrelated merge histories, clearing the local repository can resolve this issue. This will not affect the remote repository or perform any commit.
+    icon: trash

+ 8 - 45
server/modules/storage/git/storage.js

@@ -73,7 +73,7 @@ module.exports = {
               mode: 0o600
             })
           } catch (err) {
-            WIKI.logger.error(err)
+            console.error(err)
             throw err
           }
         }
@@ -142,9 +142,7 @@ module.exports = {
       if (_.get(diff, 'files', []).length > 0) {
         let filesToProcess = []
         for (const f of diff.files) {
-          const fMoved = f.file.split(' => ')
-          const fName = fMoved.length === 2 ? fMoved[1] : fMoved[0]
-          const fPath = path.join(this.repoPath, fName)
+          const fPath = path.join(this.repoPath, f.file)
           let fStats = { size: 0 }
           try {
             fStats = await fs.stat(fPath)
@@ -161,8 +159,7 @@ module.exports = {
               path: fPath,
               stats: fStats
             },
-            oldPath: fMoved[0],
-            relPath: fName
+            relPath: f.file
           })
         }
         await this.processFiles(filesToProcess, rootUser)
@@ -177,25 +174,11 @@ module.exports = {
   async processFiles(files, user) {
     for (const item of files) {
       const contentType = pageHelper.getContentType(item.relPath)
-      const fileExists = await fs.pathExists(item.file.path)
+      const fileExists = await fs.pathExists(item.file)
       if (!item.binary && contentType) {
         // -> Page
 
-        if (fileExists && !item.importAll && item.relPath !== item.oldPath) {
-          // Page was renamed by git, so rename in DB
-          WIKI.logger.info(`(STORAGE/GIT) Page marked as renamed: from ${item.oldPath} to ${item.relPath}`)
-
-          const contentPath = pageHelper.getPagePath(item.oldPath)
-          const contentDestinationPath = pageHelper.getPagePath(item.relPath)
-          await WIKI.models.pages.movePage({
-            user: user,
-            path: contentPath.path,
-            destinationPath: contentDestinationPath.path,
-            locale: contentPath.locale,
-            destinationLocale: contentPath.locale,
-            skipStorage: true
-          })
-        } else if (!fileExists && !item.importAll && item.deletions > 0 && item.insertions === 0) {
+        if (!fileExists && item.deletions > 0 && item.insertions === 0) {
           // Page was deleted by git, can safely mark as deleted in DB
           WIKI.logger.info(`(STORAGE/GIT) Page marked as deleted: ${item.relPath}`)
 
@@ -224,23 +207,7 @@ module.exports = {
       } else {
         // -> Asset
 
-        if (fileExists && !item.importAll && ((item.before === item.after) || (item.deletions === 0 && item.insertions === 0))) {
-          // Asset was renamed by git, so rename in DB
-          WIKI.logger.info(`(STORAGE/GIT) Asset marked as renamed: from ${item.oldPath} to ${item.relPath}`)
-
-          const fileHash = assetHelper.generateHash(item.relPath)
-          const assetToRename = await WIKI.models.assets.query().findOne({ hash: fileHash })
-          if (assetToRename) {
-            await WIKI.models.assets.query().patch({
-              filename: item.relPath,
-              hash: fileHash
-            }).findById(assetToRename.id)
-            await assetToRename.deleteAssetCache()
-          } else {
-            WIKI.logger.info(`(STORAGE/GIT) Asset was not found in the DB, nothing to rename: ${item.relPath}`)
-          }
-          continue
-        } else if (!fileExists && !item.importAll && ((item.before > 0 && item.after === 0) || (item.deletions > 0 && item.insertions === 0))) {
+        if (!fileExists && ((item.before > 0 && item.after === 0) || (item.deletions > 0 && item.insertions === 0))) {
           // Asset was deleted by git, can safely mark as deleted in DB
           WIKI.logger.info(`(STORAGE/GIT) Asset marked as deleted: ${item.relPath}`)
 
@@ -427,8 +394,7 @@ module.exports = {
               relPath,
               file,
               deletions: 0,
-              insertions: 0,
-              importAll: true
+              insertions: 0
             }], rootUser)
           }
           cb()
@@ -445,15 +411,12 @@ module.exports = {
 
     // -> Pages
     await pipeline(
-      WIKI.models.knex.column('id', 'path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt', 'editorKey').select().from('pages').where({
+      WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
         isPrivate: false
       }).stream(),
       new stream.Transform({
         objectMode: true,
         transform: async (page, enc, cb) => {
-          const pageObject = await WIKI.models.pages.query().findById(page.id)
-          page.tags = await pageObject.$relatedQuery('tags')
-
           let fileName = `${page.path}.${pageHelper.getFileExtension(page.contentType)}`
           if (WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode) {
             fileName = `${page.localeCode}/${fileName}`

+ 49 - 0
server/modules/storage/github/definition.yml

@@ -0,0 +1,49 @@
+title: GitHub
+icon: '/_assets/icons/ultraviolet-github.svg'
+banner: '/_assets/storage/github.jpg'
+description: Millions of developers and companies build, ship, and maintain their software on GitHub - the largest and most advanced development platform in the world.
+vendor: GitHub, Inc.
+website: 'https://github.com'
+assetDelivery:
+  isStreamingSupported: false
+  isDirectAccessSupported: false
+  defaultStreamingEnabled: false
+  defaultDirectAccessEnabled: false
+contentTypes:
+  defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: true
+  defaultEnabled: true
+  isForceEnabled: true
+sync: false
+setup:
+  handler: github
+  defaultValues:
+    accountType: org
+    org: ''
+    publicUrl: https://
+props:
+  appName:
+    readOnly: true
+    type: String
+    title: App Name
+    hint: Name of the generated app in GitHub.
+    icon: 3d-touch
+  repoFullName:
+    readOnly: true
+    type: String
+    title: GitHub Repository
+    hint: The GitHub repository used for content synchronization.
+    icon: github
+  repoDefaultBranch:
+    readOnly: true
+    type: String
+    title: Default Branch
+    hint: The repository default branch.
+    icon: code-fork
+actions:
+  - handler: exportAll
+    label: Export All DB Assets to GitHub
+    hint: Output all content from the DB to GitHub, overwriting any existing data. If you enabled GitHub after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
+    icon: this-way-up

+ 211 - 0
server/modules/storage/github/storage.js

@@ -0,0 +1,211 @@
+const { Octokit, App } = require('octokit')
+
+/* global WIKI */
+
+module.exports = {
+  async activated () { },
+  async deactivated () { },
+  async init () { },
+
+  /**
+   * SETUP FUNCTIONS
+   */
+  async setup (id, state) {
+    try {
+      switch (state.step) {
+        // --------------------------------------------
+        // -> VALIDATE CALLBACK CODE AFTER APP CREATION
+        // --------------------------------------------
+        case 'connect': {
+          const gh = new Octokit({
+            userAgent: 'wikijs'
+          })
+          const resp = await gh.request('POST /app-manifests/{code}/conversions', {
+            code: state.code
+          })
+          if (resp.status > 200 && resp.status < 300) {
+            await WIKI.models.storage.query().patch({
+              config: {
+                appId: resp.data.id,
+                appName: resp.data.name,
+                appSlug: resp.data.slug,
+                appClientId: resp.data.client_id,
+                appClientSecret: resp.data.client_secret,
+                appWebhookSecret: resp.data.webhook_secret,
+                appPem: resp.data.pem,
+                appPermissions: resp.data.permissions,
+                appEvents: resp.data.events,
+                ownerLogin: resp.data.owner?.login,
+                ownerId: resp.data.owner?.id
+              },
+              state: {
+                current: 'ok',
+                setup: 'pendinginstall'
+              }
+            }).where('id', id)
+            return {
+              nextStep: 'installApp',
+              url: `https://github.com/apps/${resp.data.slug}/installations/new/permissions?target_id=${resp.data.owner?.id}`
+            }
+          } else {
+            throw new Error('GitHub refused the code or could not be reached.')
+          }
+        }
+        // -----------------------
+        // VERIFY APP INSTALLATION
+        // -----------------------
+        case 'verify': {
+          const tgt = await WIKI.models.storage.query().findById(id)
+          if (!tgt) {
+            throw new Error('Invalid Target ID')
+          }
+
+          const ghApp = new App({
+            appId: tgt.config.appId,
+            privateKey: tgt.config.appPem,
+            Octokit: Octokit.defaults({
+              userAgent: 'wikijs'
+            }),
+            oauth: {
+              clientId: tgt.config.appClientId,
+              clientSecret: tgt.config.appClientSecret
+            },
+            webhooks: {
+              secret: tgt.config.appWebhookSecret
+            }
+          })
+
+          // -> Find Installation ID
+
+          let installId = null
+          let installTotal = 0
+          for await (const { installation } of ghApp.eachInstallation.iterator()) {
+            if (installTotal < 1) {
+              installId = installation.id
+              WIKI.logger.debug(`Using GitHub App installation ID ${installId}`)
+            }
+            installTotal++
+          }
+          if (installTotal < 1) {
+            throw new Error('App is not installed on any GitHub account!')
+          } else if (installTotal > 1) {
+            WIKI.logger.warn(`GitHub App ${tgt.config.appName} is installed on more than 1 account. Only the first one ${installId} will be used.`)
+          }
+
+          // -> Fetch Repository Info
+
+          let repo = null
+          let repoTotal = 0
+          for await (const { repository } of ghApp.eachRepository.iterator({ installationId: installId })) {
+            if (repository.archived || repository.disabled) {
+              WIKI.logger.debug(`Skipping GitHub Repository ${repo.id} because of it is archived or disabled.`)
+              continue
+            }
+            if (repoTotal < 1) {
+              repo = repository
+              WIKI.logger.debug(`Using GitHub Repository ${repo.id}`)
+            }
+            repoTotal++
+          }
+          if (repoTotal < 1) {
+            throw new Error('App is not installed on any GitHub repository!')
+          } else if (repoTotal > 1) {
+            WIKI.logger.warn(`GitHub App ${tgt.config.appName} is installed on more than 1 repository. Only the first one (${repo.full_name}) will be used.`)
+          }
+
+          // -> Save install/repo info
+
+          await WIKI.models.storage.query().patch({
+            isEnabled: true,
+            config: {
+              ...tgt.config,
+              installId,
+              repoId: repo.id,
+              repoName: repo.name,
+              repoOwner: repo.owner?.login,
+              repoDefaultBranch: repo.default_branch,
+              repoFullName: repo.full_name
+            },
+            state: {
+              current: 'ok',
+              setup: 'configured'
+            }
+          }).where('id', id)
+
+          return {
+            nextStep: 'completed'
+          }
+        }
+        default: {
+          throw new Error('Invalid Setup Step')
+        }
+      }
+    } catch (err) {
+      WIKI.logger.warn('GitHub Storage Module Setup Failed:')
+      WIKI.logger.warn(err)
+      throw err
+    }
+  },
+  async setupDestroy (id) {
+    try {
+      const tgt = await WIKI.models.storage.query().findById(id)
+      if (!tgt) {
+        throw new Error('Invalid Target ID')
+      }
+
+      WIKI.logger.info('Resetting GitHub storage configuration...')
+
+      const ghApp = new App({
+        appId: tgt.config.appId,
+        privateKey: tgt.config.appPem,
+        Octokit: Octokit.defaults({
+          userAgent: 'wikijs'
+        }),
+        oauth: {
+          clientId: tgt.config.appClientId,
+          clientSecret: tgt.config.appClientSecret
+        },
+        webhooks: {
+          secret: tgt.config.appWebhookSecret
+        }
+      })
+
+      // -> Reset storage module config
+
+      await WIKI.models.storage.query().patch({
+        isEnabled: false,
+        config: {},
+        state: {
+          current: 'ok',
+          setup: 'notconfigured'
+        }
+      }).where('id', id)
+
+      // -> Try to delete installation on GitHub
+
+      if (tgt.config.installId) {
+        try {
+          await ghApp.octokit.request('DELETE /app/installations/{installation_id}', {
+            installation_id: tgt.config.installId
+          })
+          WIKI.logger.info('Deleted GitHub installation successfully.')
+        } catch (err) {
+          WIKI.logger.warn('Could not delete GitHub installation automatically. Please remove the installation on GitHub.')
+        }
+      }
+    } catch (err) {
+      WIKI.logger.warn('GitHub Storage Module Destroy Failed:')
+      WIKI.logger.warn(err)
+      throw err
+    }
+  },
+  async created (page) { },
+  async updated (page) { },
+  async deleted (page) { },
+  async renamed (page) { },
+  async assetUploaded (asset) { },
+  async assetDeleted (asset) { },
+  async assetRenamed (asset) { },
+  async getLocalLocation () { },
+  async exportAll () { }
+}

+ 0 - 9
server/modules/storage/onedrive/definition.yml

@@ -1,9 +0,0 @@
-key: onedrive
-title: OneDrive
-description: OneDrive is a file hosting service operated by Microsoft as part of its suite of Office Online services.
-author: requarks.io
-logo: https://static.requarks.io/logo/onedrive.svg
-website: https://onedrive.live.com/about/
-props:
-  clientId: String
-  clientSecret: String

+ 0 - 26
server/modules/storage/onedrive/storage.js

@@ -1,26 +0,0 @@
-module.exports = {
-  async activated() {
-
-  },
-  async deactivated() {
-
-  },
-  async init() {
-
-  },
-  async created() {
-
-  },
-  async updated() {
-
-  },
-  async deleted() {
-
-  },
-  async renamed() {
-
-  },
-  async getLocalLocation () {
-
-  }
-}

+ 0 - 168
server/modules/storage/s3/common.js

@@ -1,168 +0,0 @@
-const S3 = require('aws-sdk/clients/s3')
-const stream = require('stream')
-const Promise = require('bluebird')
-const pipeline = Promise.promisify(stream.pipeline)
-const _ = require('lodash')
-const pageHelper = require('../../../helpers/page.js')
-
-/* global WIKI */
-
-/**
- * Deduce the file path given the `page` object and the object's key to the page's path.
- */
-const getFilePath = (page, pathKey) => {
-  const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
-  const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
-  return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
-}
-
-/**
- * Can be used with S3 compatible storage.
- */
-module.exports = class S3CompatibleStorage {
-  constructor(storageName) {
-    this.storageName = storageName
-    this.bucketName = ""
-  }
-  async activated() {
-    // not used
-  }
-  async deactivated() {
-    // not used
-  }
-  async init() {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Initializing...`)
-    const { accessKeyId, secretAccessKey, bucket } = this.config
-    const s3Config = {
-      accessKeyId,
-      secretAccessKey,
-      params: { Bucket: bucket },
-      apiVersions: '2006-03-01'
-    }
-
-    if (!_.isNil(this.config.region)) {
-      s3Config.region = this.config.region
-    }
-    if (!_.isNil(this.config.endpoint)) {
-      s3Config.endpoint = this.config.endpoint
-    }
-    if (!_.isNil(this.config.sslEnabled)) {
-      s3Config.sslEnabled = this.config.sslEnabled
-    }
-    if (!_.isNil(this.config.s3ForcePathStyle)) {
-      s3Config.s3ForcePathStyle = this.config.s3ForcePathStyle
-    }
-    if (!_.isNil(this.config.s3BucketEndpoint)) {
-      s3Config.s3BucketEndpoint = this.config.s3BucketEndpoint
-    }
-
-    this.s3 = new S3(s3Config)
-    this.bucketName = bucket
-
-    // determine if a bucket exists and you have permission to access it
-    await this.s3.headBucket().promise()
-
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Initialization completed.`)
-  }
-  async created(page) {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Creating file ${page.path}...`)
-    const filePath = getFilePath(page, 'path')
-    await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
-  }
-  async updated(page) {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Updating file ${page.path}...`)
-    const filePath = getFilePath(page, 'path')
-    await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
-  }
-  async deleted(page) {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${page.path}...`)
-    const filePath = getFilePath(page, 'path')
-    await this.s3.deleteObject({ Key: filePath }).promise()
-  }
-  async renamed(page) {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
-    let sourceFilePath = getFilePath(page, 'path')
-    let destinationFilePath = getFilePath(page, 'destinationPath')
-    if (WIKI.config.lang.namespacing) {
-      if (WIKI.config.lang.code !== page.localeCode) {
-        sourceFilePath = `${page.localeCode}/${sourceFilePath}`
-      }
-      if (WIKI.config.lang.code !== page.destinationLocaleCode) {
-        destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
-      }
-    }
-    await this.s3.copyObject({ CopySource: `${this.bucketName}/${sourceFilePath}`, Key: destinationFilePath }).promise()
-    await this.s3.deleteObject({ Key: sourceFilePath }).promise()
-  }
-  /**
-   * ASSET UPLOAD
-   *
-   * @param {Object} asset Asset to upload
-   */
-  async assetUploaded (asset) {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Creating new file ${asset.path}...`)
-    await this.s3.putObject({ Key: asset.path, Body: asset.data }).promise()
-  }
-  /**
-   * ASSET DELETE
-   *
-   * @param {Object} asset Asset to delete
-   */
-  async assetDeleted (asset) {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${asset.path}...`)
-    await this.s3.deleteObject({ Key: asset.path }).promise()
-  }
-  /**
-   * ASSET RENAME
-   *
-   * @param {Object} asset Asset to rename
-   */
-  async assetRenamed (asset) {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
-    await this.s3.copyObject({ CopySource: `${this.bucketName}/${asset.path}`, Key: asset.destinationPath }).promise()
-    await this.s3.deleteObject({ Key: asset.path }).promise()
-  }
-  async getLocalLocation () {
-
-  }
-  /**
-   * HANDLERS
-   */
-  async exportAll() {
-    WIKI.logger.info(`(STORAGE/${this.storageName}) Exporting all content to the cloud provider...`)
-
-    // -> Pages
-    await pipeline(
-      WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
-        isPrivate: false
-      }).stream(),
-      new stream.Transform({
-        objectMode: true,
-        transform: async (page, enc, cb) => {
-          const filePath = getFilePath(page, 'path')
-          WIKI.logger.info(`(STORAGE/${this.storageName}) Adding page ${filePath}...`)
-          await this.s3.putObject({ Key: filePath, Body: pageHelper.injectPageMetadata(page) }).promise()
-          cb()
-        }
-      })
-    )
-
-    // -> Assets
-    const assetFolders = await WIKI.models.assetFolders.getAllPaths()
-
-    await pipeline(
-      WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
-      new stream.Transform({
-        objectMode: true,
-        transform: async (asset, enc, cb) => {
-          const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
-          WIKI.logger.info(`(STORAGE/${this.storageName}) Adding asset ${filename}...`)
-          await this.s3.putObject({ Key: filename, Body: asset.data }).promise()
-          cb()
-        }
-      })
-    )
-
-    WIKI.logger.info(`(STORAGE/${this.storageName}) All content has been pushed to the cloud provider.`)
-  }
-}

+ 139 - 17
server/modules/storage/s3/definition.yml

@@ -1,37 +1,159 @@
-key: s3
-title: Amazon S3
-description: Amazon S3 is a cloud computing web service offered by Amazon Web Services which provides object storage.
-author: andrewsim
-logo: https://static.requarks.io/logo/aws-s3.svg
-website: https://aws.amazon.com/s3/
-isAvailable: true
-supportedModes:
-  - push
-defaultMode: push
-schedule: false
+title: AWS S3 / DigitalOcean Spaces
+icon: '/_assets/icons/ultraviolet-amazon-web-services.svg'
+banner: '/_assets/storage/s3.jpg'
+description: Amazon Simple Storage Service (Amazon S3) is an object storage service offering industry-leading scalability, data availability, security, and performance.
+vendor: Amazon.com, Inc.
+website: 'https://aws.amazon.com'
+assetDelivery:
+  isStreamingSupported: true
+  isDirectAccessSupported: true
+  defaultStreamingEnabled: true
+  defaultDirectAccessEnabled: true
+contentTypes:
+  defaultTypesEnabled: ['images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: false
+  defaultEnabled: false
+sync: false
 props:
-  region:
+  mode:
+    type: String
+    title: Mode
+    hint: Select a preset configuration mode or define a custom one.
+    icon: tune
+    default: aws
+    order: 1
+    enum:
+      - aws|AWS S3
+      - do|DigitalOcean Spaces
+      - custom|Custom
+  awsRegion:
     type: String
     title: Region
     hint: The AWS datacenter region where the bucket will be created.
-    order: 1
+    icon: geography
+    default: us-east-1
+    enum:
+      - af-south-1|af-south-1 - Africa (Cape Town)
+      - ap-east-1|ap-east-1 - Asia Pacific (Hong Kong)
+      - ap-southeast-3|ap-southeast-3 - Asia Pacific (Jakarta)
+      - ap-south-1|ap-south-1 - 	Asia Pacific (Mumbai)
+      - ap-northeast-3|ap-northeast-3 - Asia Pacific (Osaka)
+      - ap-northeast-2|ap-northeast-2 - Asia Pacific (Seoul)
+      - ap-southeast-1|ap-southeast-1 - Asia Pacific (Singapore)
+      - ap-southeast-2|ap-southeast-2 - Asia Pacific (Sydney)
+      - ap-northeast-1|ap-northeast-1 - Asia Pacific (Tokyo)
+      - ca-central-1|ca-central-1 - Canada (Central)
+      - cn-north-1|cn-north-1 - China (Beijing)
+      - cn-northwest-1|cn-northwest-1 - China (Ningxia)
+      - eu-central-1|eu-central-1 - Europe (Frankfurt)
+      - eu-west-1|eu-west-1 - Europe (Ireland)
+      - eu-west-2|eu-west-2 - Europe (London)
+      - eu-south-1|eu-south-1 - Europe (Milan)
+      - eu-west-3|eu-west-3 - Europe (Paris)
+      - eu-north-1|eu-north-1 - Europe (Stockholm)
+      - me-south-1|me-south-1 - Middle East (Bahrain)
+      - sa-east-1|sa-east-1 - South America (São Paulo)
+      - us-east-1|us-east-1 - US East (N. Virginia)
+      - us-east-2|us-east-2 - US East (Ohio)
+      - us-west-1|us-west-1 - US West (N. California)
+      - us-west-2|us-west-2 - US West (Oregon)
+    order: 2
+    if:
+      - { key: 'mode', eq: 'aws' }
+  doRegion:
+    type: String
+    title: Region
+    hint: The DigitalOcean Spaces region
+    icon: geography
+    default: nyc3
+    enum:
+      - ams3|Amsterdam
+      - fra1|Frankfurt
+      - nyc3|New York
+      - sfo2|San Francisco 2
+      - sfo3|San Francisco 3
+      - sgp1|Singapore
+    order: 2
+    if:
+      - { key: 'mode', eq: 'do' }
+  endpoint:
+    type: String
+    title: Endpoint URI
+    hint: The full S3-compliant endpoint URI.
+    icon: dns
+    default: https://service.region.example.com
+    order: 2
+    if:
+      - { key: 'mode', eq: 'custom' }
   bucket:
     type: String
     title: Unique bucket name
     hint: The unique bucket name to create (e.g. wiki-johndoe).
-    order: 2
+    icon: open-box
+    order: 3
   accessKeyId:
     type: String
     title: Access Key ID
     hint: The Access Key.
-    order: 3
+    icon: 3d-touch
+    order: 4
   secretAccessKey:
     type: String
     title: Secret Access Key
     hint: The Secret Access Key for the Access Key ID you created above.
+    icon: key
     sensitive: true
-    order: 4
+    order: 5
+  storageTier:
+    type: String
+    title: Storage Tier
+    hint: The storage tier to use when adding files.
+    icon: scan-stock
+    order: 6
+    default: STANDARD
+    enum:
+      - STANDARD|Standard
+      - STANDARD_IA|Standard Infrequent Access
+      - INTELLIGENT_TIERING|Intelligent Tiering
+      - ONEZONE_IA|One Zone Infrequent Access
+      - REDUCED_REDUNDANCY|Reduced Redundancy
+      - GLACIER_IR|Glacier Instant Retrieval
+      - GLACIER|Glacier Flexible Retrieval
+      - DEEP_ARCHIVE|Glacier Deep Archive
+      - OUTPOSTS|Outposts
+    if:
+      - { key: 'mode', eq: 'aws' }
+  sslEnabled:
+    type: Boolean
+    title: Use SSL
+    hint: Whether to enable SSL for requests
+    icon: secure
+    default: true
+    order: 10
+    if:
+      - { key: 'mode', eq: 'custom' }
+  s3ForcePathStyle:
+    type: Boolean
+    title: Force Path Style for S3 objects
+    hint: Whether to force path style URLs for S3 objects.
+    icon: filtration
+    default: false
+    order: 11
+    if:
+      - { key: 'mode', eq: 'custom' }
+  s3BucketEndpoint:
+    type: Boolean
+    title: Single Bucket Endpoint
+    hint: Whether the provided endpoint addresses an individual bucket.
+    icon: swipe-right
+    default: false
+    order: 12
+    if:
+      - { key: 'mode', eq: 'custom' }
 actions:
   - handler: exportAll
-    label: Export All
+    label: Export All DB Assets to S3
     hint: Output all content from the DB to S3, overwriting any existing data. If you enabled S3 after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
+    icon: this-way-up

+ 165 - 2
server/modules/storage/s3/storage.js

@@ -1,3 +1,166 @@
-const S3CompatibleStorage = require('./common')
+const S3 = require('aws-sdk/clients/s3')
+const stream = require('stream')
+const Promise = require('bluebird')
+const pipeline = Promise.promisify(stream.pipeline)
+const _ = require('lodash')
+const pageHelper = require('../../../helpers/page.js')
 
-module.exports = new S3CompatibleStorage('S3')
+/* global WIKI */
+
+/**
+ * Deduce the file path given the `page` object and the object's key to the page's path.
+ */
+const getFilePath = (page, pathKey) => {
+  const fileName = `${page[pathKey]}.${pageHelper.getFileExtension(page.contentType)}`
+  const withLocaleCode = WIKI.config.lang.namespacing && WIKI.config.lang.code !== page.localeCode
+  return withLocaleCode ? `${page.localeCode}/${fileName}` : fileName
+}
+
+/**
+ * Can be used with S3 compatible storage.
+ */
+module.exports = class S3CompatibleStorage {
+  constructor(storageName) {
+    this.storageName = storageName
+  }
+  async activated() {
+    // not used
+  }
+  async deactivated() {
+    // not used
+  }
+  async init() {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Initializing...`)
+    const { accessKeyId, secretAccessKey, bucket } = this.config
+    const s3Config = {
+      accessKeyId,
+      secretAccessKey,
+      params: { Bucket: bucket },
+      apiVersions: '2006-03-01'
+    }
+
+    if (!_.isNil(this.config.region)) {
+      s3Config.region = this.config.region
+    }
+    if (!_.isNil(this.config.endpoint)) {
+      s3Config.endpoint = this.config.endpoint
+    }
+    if (!_.isNil(this.config.sslEnabled)) {
+      s3Config.sslEnabled = this.config.sslEnabled
+    }
+    if (!_.isNil(this.config.s3ForcePathStyle)) {
+      s3Config.s3ForcePathStyle = this.config.s3ForcePathStyle
+    }
+    if (!_.isNil(this.config.s3BucketEndpoint)) {
+      s3Config.s3BucketEndpoint = this.config.s3BucketEndpoint
+    }
+
+    this.s3 = new S3(s3Config)
+
+    // determine if a bucket exists and you have permission to access it
+    await this.s3.headBucket().promise()
+
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Initialization completed.`)
+  }
+  async created(page) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Creating file ${page.path}...`)
+    const filePath = getFilePath(page, 'path')
+    await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
+  }
+  async updated(page) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Updating file ${page.path}...`)
+    const filePath = getFilePath(page, 'path')
+    await this.s3.putObject({ Key: filePath, Body: page.injectMetadata() }).promise()
+  }
+  async deleted(page) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${page.path}...`)
+    const filePath = getFilePath(page, 'path')
+    await this.s3.deleteObject({ Key: filePath }).promise()
+  }
+  async renamed(page) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file ${page.path} to ${page.destinationPath}...`)
+    let sourceFilePath = getFilePath(page, 'path')
+    let destinationFilePath = getFilePath(page, 'destinationPath')
+    if (WIKI.config.lang.namespacing) {
+      if (WIKI.config.lang.code !== page.localeCode) {
+        sourceFilePath = `${page.localeCode}/${sourceFilePath}`
+      }
+      if (WIKI.config.lang.code !== page.destinationLocaleCode) {
+        destinationFilePath = `${page.destinationLocaleCode}/${destinationFilePath}`
+      }
+    }
+    await this.s3.copyObject({ CopySource: sourceFilePath, Key: destinationFilePath }).promise()
+    await this.s3.deleteObject({ Key: sourceFilePath }).promise()
+  }
+  /**
+   * ASSET UPLOAD
+   *
+   * @param {Object} asset Asset to upload
+   */
+  async assetUploaded (asset) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Creating new file ${asset.path}...`)
+    await this.s3.putObject({ Key: asset.path, Body: asset.data }).promise()
+  }
+  /**
+   * ASSET DELETE
+   *
+   * @param {Object} asset Asset to delete
+   */
+  async assetDeleted (asset) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Deleting file ${asset.path}...`)
+    await this.s3.deleteObject({ Key: asset.path }).promise()
+  }
+  /**
+   * ASSET RENAME
+   *
+   * @param {Object} asset Asset to rename
+   */
+  async assetRenamed (asset) {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Renaming file from ${asset.path} to ${asset.destinationPath}...`)
+    await this.s3.copyObject({ CopySource: asset.path, Key: asset.destinationPath }).promise()
+    await this.s3.deleteObject({ Key: asset.path }).promise()
+  }
+  async getLocalLocation () {
+
+  }
+  /**
+   * HANDLERS
+   */
+  async exportAll() {
+    WIKI.logger.info(`(STORAGE/${this.storageName}) Exporting all content to the cloud provider...`)
+
+    // -> Pages
+    await pipeline(
+      WIKI.models.knex.column('path', 'localeCode', 'title', 'description', 'contentType', 'content', 'isPublished', 'updatedAt', 'createdAt').select().from('pages').where({
+        isPrivate: false
+      }).stream(),
+      new stream.Transform({
+        objectMode: true,
+        transform: async (page, enc, cb) => {
+          const filePath = getFilePath(page, 'path')
+          WIKI.logger.info(`(STORAGE/${this.storageName}) Adding page ${filePath}...`)
+          await this.s3.putObject({ Key: filePath, Body: pageHelper.injectPageMetadata(page) }).promise()
+          cb()
+        }
+      })
+    )
+
+    // -> Assets
+    const assetFolders = await WIKI.models.assetFolders.getAllPaths()
+
+    await pipeline(
+      WIKI.models.knex.column('filename', 'folderId', 'data').select().from('assets').join('assetData', 'assets.id', '=', 'assetData.id').stream(),
+      new stream.Transform({
+        objectMode: true,
+        transform: async (asset, enc, cb) => {
+          const filename = (asset.folderId && asset.folderId > 0) ? `${_.get(assetFolders, asset.folderId)}/${asset.filename}` : asset.filename
+          WIKI.logger.info(`(STORAGE/${this.storageName}) Adding asset ${filename}...`)
+          await this.s3.putObject({ Key: filename, Body: asset.data }).promise()
+          cb()
+        }
+      })
+    )
+
+    WIKI.logger.info(`(STORAGE/${this.storageName}) All content has been pushed to the cloud provider.`)
+  }
+}

+ 0 - 57
server/modules/storage/s3generic/definition.yml

@@ -1,57 +0,0 @@
-key: s3generic
-title: S3 Generic
-description: Generic storage module for S3-compatible services.
-author: requarks.io
-logo: https://static.requarks.io/logo/aws-s3-alt.svg
-website: https://wiki.js.org
-isAvailable: true
-supportedModes:
-  - push
-defaultMode: push
-schedule: false
-props:
-  endpoint:
-    type: String
-    title: Endpoint URI
-    hint: The full S3-compliant endpoint URI.
-    default: https://service.region.example.com
-    order: 1
-  bucket:
-    type: String
-    title: Unique bucket name
-    hint: The unique bucket name to create (e.g. wiki-johndoe)
-    order: 2
-  accessKeyId:
-    type: String
-    title: Access Key ID
-    hint: The Access Key ID.
-    order: 3
-  secretAccessKey:
-    type: String
-    title: Access Key Secret
-    hint: The Access Key Secret for the Access Key ID above.
-    sensitive: true
-    order: 4
-  sslEnabled:
-    type: Boolean
-    title: Use SSL
-    hint: Whether to enable SSL for requests
-    default: true
-    order: 5
-  s3ForcePathStyle:
-    type: Boolean
-    title: Force Path Style for S3 objects
-    hint: Whether to force path style URLs for S3 objects.
-    default: false
-    order: 6
-  s3BucketEndpoint:
-    type: Boolean
-    title: Single Bucket Endpoint
-    hint: Whether the provided endpoint addresses an individual bucket.
-    default: false
-    order: 7
-actions:
-  - handler: exportAll
-    label: Export All
-    hint: Output all content from the DB to the external service, overwriting any existing data. If you enabled this module after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
-

+ 0 - 3
server/modules/storage/s3generic/storage.js

@@ -1,3 +0,0 @@
-const S3CompatibleStorage = require('../s3/common')
-
-module.exports = new S3CompatibleStorage('S3Generic')

+ 40 - 17
server/modules/storage/sftp/definition.yml

@@ -1,71 +1,94 @@
-key: sftp
-title: SFTP
-description: SFTP (SSH File Transfer Protocol) is a secure file transfer protocol. It runs over the SSH protocol. It supports the full security and authentication functionality of SSH.
-author: requarks.io
-logo: https://static.requarks.io/logo/ssh.svg
-website: https://www.ssh.com/ssh/sftp
-isAvailable: true
-supportedModes:
-  - push
-defaultMode: push
-schedule: false
+title: 'SFTP'
+icon: '/_assets/icons/ultraviolet-nas.svg'
+banner: '/_assets/storage/ssh.jpg'
+description: 'Store files over a remote connection using the SSH File Transfer Protocol.'
+vendor: 'Wiki.js'
+website: 'https://js.wiki'
+assetDelivery:
+  isStreamingSupported: false
+  isDirectAccessSupported: false
+  defaultStreamingEnabled: false
+  defaultDirectAccessEnabled: false
+contentTypes:
+  defaultTypesEnabled: ['pages', 'images', 'documents', 'others', 'large']
+  defaultLargeThreshold: '5MB'
+versioning:
+  isSupported: false
+  defaultEnabled: false
+sync: false
 props:
   host:
     type: String
     title: Host
     default: ''
     hint: Hostname or IP of the remote SSH server.
+    icon: dns
     order: 1
   port:
     type: Number
     title: Port
     default: 22
     hint: SSH port of the remote server.
+    icon: ethernet-off
     order: 2
   authMode:
     type: String
     title: Authentication Method
     default: 'privateKey'
     hint: Whether to use Private Key or Password-based authentication. A private key is highly recommended for best security.
+    icon: grand-master-key
     enum:
-      - privateKey
-      - password
+      - privateKey|Private Key
+      - password|Password
+    enumDisplay: buttons
     order: 3
   username:
     type: String
     title: Username
     default: ''
     hint: Username for authentication.
+    icon: test-account
     order: 4
   privateKey:
     type: String
     title: Private Key Contents
     default: ''
-    hint: (Private Key Authentication Only) - Contents of the private key
+    hint: Contents of the private key
+    icon: key
     multiline: true
     sensitive: true
     order: 5
+    if:
+      - { key: 'authMode', eq: 'privateKey' }
   passphrase:
     type: String
     title: Private Key Passphrase
     default: ''
-    hint: (Private Key Authentication Only) - Passphrase if the private key is encrypted, leave empty otherwise
+    hint: Passphrase if the private key is encrypted, leave empty otherwise
+    icon: password
     sensitive: true
     order: 6
+    if:
+      - { key: 'authMode', eq: 'privateKey' }
   password:
     type: String
     title: Password
     default: ''
-    hint: (Password-based Authentication Only) - Password for authentication
+    hint: Password for authentication
+    icon: password
     sensitive: true
     order: 6
+    if:
+      - { key: 'authMode', eq: 'password' }
   basePath:
     type: String
     title: Base Directory Path
     default: '/root/wiki'
     hint: Base directory where files will be transferred to. The path must already exists and be writable by the user.
+    icon: symlink-directory
 actions:
   - handler: exportAll
-    label: Export All
+    label: Export All DB Assets to Remote
     hint: Output all content from the DB to the remote SSH server, overwriting any existing data. If you enabled SFTP after content was created or you temporarily disabled it, you'll want to execute this action to add the missing content.
+    icon: this-way-up
 

+ 1 - 6
server/modules/storage/sftp/storage.js

@@ -155,12 +155,7 @@ module.exports = {
         const folderPaths = _.dropRight(filePath.split('/'))
         for (let i = 1; i <= folderPaths.length; i++) {
           const folderSection = _.take(folderPaths, i).join('/')
-          const folderDir = path.posix.join(this.config.basePath, folderSection)
-          try {
-            await this.sftp.readdir(folderDir)
-          } catch (err) {
-            await this.sftp.mkdir(folderDir)
-          }
+          await this.sftp.mkdir(path.posix.join(this.config.basePath, folderSection))
         }
       } catch (err) {}
     }

Разница между файлами не показана из-за своего большого размера
+ 396 - 122
yarn.lock


Некоторые файлы не были показаны из-за большого количества измененных файлов