From c0711967d38cb1cf99446fd9607daf0ff1c368aa Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Thu, 25 Nov 2021 11:14:56 +0100 Subject: [PATCH 1/8] No magic strings --- server/script/migrateAssetsLocation.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/script/migrateAssetsLocation.js b/server/script/migrateAssetsLocation.js index c3b83bd41..bb4ff052f 100644 --- a/server/script/migrateAssetsLocation.js +++ b/server/script/migrateAssetsLocation.js @@ -18,6 +18,7 @@ const toPairs = require('lodash/toPairs'); const ASSET_PATH_REGEX = /(?repository\/assets\/(?[^?]*))/; const CHUNK_SIZE = 2000; +const IMAGE_ELEMENT_TYPE = 'IMAGE'; const ENTITIES = { REPOSITORY: 'REPOSITORY', @@ -154,7 +155,7 @@ class RepositoryMigration { async migrateContentElementData(element) { const { type, data } = element; - if (type === 'IMAGE') return this.imageMigrationHandler(element); + if (type === IMAGE_ELEMENT_TYPE) return this.imageMigrationHandler(element); const embeds = data.embeds && (await this.embedsMigrationHandler(element)); const assets = data.assets && (await this.defaultMigrationHandler(element)); return { ...data, ...embeds, ...assets }; From 9fc5e66cdc18461f66c05695cec4c440d47044dd Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Mon, 29 Nov 2021 19:50:40 +0100 Subject: [PATCH 2/8] Move getFileMetas to separate file under util directory --- server/script/migrateAssetsLocation.js | 40 +++----------------------- server/shared/util/getFileMetas.js | 36 +++++++++++++++++++++++ 2 files changed, 40 insertions(+), 36 deletions(-) create mode 100644 server/shared/util/getFileMetas.js diff --git a/server/script/migrateAssetsLocation.js b/server/script/migrateAssetsLocation.js index bb4ff052f..5eb8b86cc 100644 --- a/server/script/migrateAssetsLocation.js +++ b/server/script/migrateAssetsLocation.js @@ -8,6 +8,7 @@ const { sequelize } = require('../shared/database'); const get = require('lodash/get'); +const getFileMetas = require('../shared/util/getFileMetas'); const Listr = require('listr'); const path = require('path'); const Promise = require('bluebird'); @@ -44,13 +45,13 @@ migrate() async function migrate() { const transaction = await sequelize.transaction(); - const metaBySchemaType = getFileMetas(SCHEMAS); - const tasks = await getTasks(metaBySchemaType, transaction); + const tasks = await getTasks(transaction); return tasks.run().then(() => transaction.commit()); } -async function getTasks(metaBySchemaType, transaction) { +async function getTasks(transaction) { const repositories = await Repository.findAll({ transaction }); + const metaBySchemaType = getFileMetas(SCHEMAS); const tasks = repositories.map(repository => ({ title: `Migrate repository "${repository.name}"`, task: () => { @@ -253,39 +254,6 @@ class RepositoryMigration { } } -function getFileMetas(schemas) { - return schemas.reduce((acc, { id, meta, structure, elementMeta }) => { - return { - ...acc, - [id]: { - repository: getFileMetaKeys(meta), - activity: getMetaByActivityType(structure), - element: getMetaByElementType(elementMeta) - } - }; - }, {}); -} - -function getMetaByActivityType(structure = []) { - return structure.reduce((acc, { type, meta }) => { - const fileMetaKeys = getFileMetaKeys(meta); - if (!fileMetaKeys.length) return acc; - return { ...acc, [type]: fileMetaKeys }; - }, {}); -} - -function getMetaByElementType(elementMeta = []) { - return elementMeta.reduce((acc, { type, inputs }) => { - const fileMetaKeys = getFileMetaKeys(inputs); - if (!fileMetaKeys.length) return acc; - return { ...acc, [type]: fileMetaKeys }; - }, {}); -} - -function getFileMetaKeys(meta = []) { - return meta.filter(it => it.type === 'FILE').map(it => it.key); -} - function resolveNewURL(assetUrl, targetDir) { if (assetUrl.startsWith(protocol)) assetUrl = assetUrl.substr(protocol.length); const result = assetUrl.match(ASSET_PATH_REGEX); diff --git a/server/shared/util/getFileMetas.js b/server/shared/util/getFileMetas.js new file mode 100644 index 000000000..c38d2c3ac --- /dev/null +++ b/server/shared/util/getFileMetas.js @@ -0,0 +1,36 @@ +'use strict'; + +const FILE_ELEMENT_TYPE = 'FILE'; + +function getMetaByActivityType(structure = []) { + return structure.reduce((acc, { type, meta }) => { + const fileMetaKeys = getFileMetaKeys(meta); + if (!fileMetaKeys.length) return acc; + return { ...acc, [type]: fileMetaKeys }; + }, {}); +} + +function getMetaByElementType(elementMeta = []) { + return elementMeta.reduce((acc, { type, inputs }) => { + const fileMetaKeys = getFileMetaKeys(inputs); + if (!fileMetaKeys.length) return acc; + return { ...acc, [type]: fileMetaKeys }; + }, {}); +} + +function getFileMetaKeys(meta = []) { + return meta.filter(it => it.type === FILE_ELEMENT_TYPE).map(it => it.key); +} + +module.exports = function (schemas) { + return schemas.reduce((acc, { id, meta, structure, elementMeta }) => { + return { + ...acc, + [id]: { + repository: getFileMetaKeys(meta), + activity: getMetaByActivityType(structure), + element: getMetaByElementType(elementMeta) + } + }; + }, {}); +}; From 25744b7bc3b134748629caa9e3564626655c33f7 Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Wed, 1 Dec 2021 14:48:54 +0100 Subject: [PATCH 3/8] Extract resolveNewURL into separate util function --- server/script/migrateAssetsLocation.js | 12 +------- server/shared/util/resolveNewURL.js | 40 ++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 11 deletions(-) create mode 100644 server/shared/util/resolveNewURL.js diff --git a/server/script/migrateAssetsLocation.js b/server/script/migrateAssetsLocation.js index 5eb8b86cc..5a674a1bd 100644 --- a/server/script/migrateAssetsLocation.js +++ b/server/script/migrateAssetsLocation.js @@ -10,14 +10,13 @@ const { const get = require('lodash/get'); const getFileMetas = require('../shared/util/getFileMetas'); const Listr = require('listr'); -const path = require('path'); const Promise = require('bluebird'); const { protocol } = require('../../config/server/storage'); +const resolveNewURL = require('../shared/util/resolveNewURL'); const { SCHEMAS } = require('../../config/shared/activities'); const storage = require('../repository/storage'); const toPairs = require('lodash/toPairs'); -const ASSET_PATH_REGEX = /(?repository\/assets\/(?[^?]*))/; const CHUNK_SIZE = 2000; const IMAGE_ELEMENT_TYPE = 'IMAGE'; @@ -253,12 +252,3 @@ class RepositoryMigration { return { ...metaInputs, ...newMeta }; } } - -function resolveNewURL(assetUrl, targetDir) { - if (assetUrl.startsWith(protocol)) assetUrl = assetUrl.substr(protocol.length); - const result = assetUrl.match(ASSET_PATH_REGEX); - if (!result) return; - const { groups: { directory, fileName } } = result; - const newKey = path.join(targetDir, fileName); - return { key: directory, newKey }; -} diff --git a/server/shared/util/resolveNewURL.js b/server/shared/util/resolveNewURL.js new file mode 100644 index 000000000..75a315b28 --- /dev/null +++ b/server/shared/util/resolveNewURL.js @@ -0,0 +1,40 @@ +'use strict'; + +const path = require('path'); +const { protocol } = require('../../../config/server/storage'); + +/** + * The regular expression matching old assets directory structure. + * @type {RegExp} + * @const + * @private + */ +const OLD_ASSET_PATH_REGEX = /(?repository\/assets\/(?[^?]*))/; + +/** + * The regular expression matching new assets directory structure. + * @type {RegExp} + * @const + * @private + */ +const NEW_ASSET_PATH_REGEX = /(?repository\/\d+\/assets\/(?[^?]*))/; + +/** + * Resolves with a new asset URL if `assetUrl` can be matched with regular expression matching old, or new, assets + * directory structure. `undefined` value is returned in case regular expressions can't be matched or if `assetUrl` + * contains `targetDir`, meaning they already have the same path. + * @param {string} assetUrl The current URL of an asset. + * @param {string} targetDir The target directory in which an asset should be stored. + * @return {Object} An object containing old and new directory path, or an `undefined` value if `assetUrl` includes + * `targetDir` or if `assetsUrl` can't be matched with either regular expression. + * @public + */ +module.exports = (assetUrl, targetDir) => { + if (assetUrl.startsWith(protocol)) assetUrl = assetUrl.slice(protocol.length); + if (assetUrl.includes(targetDir)) return; + const result = assetUrl.match(OLD_ASSET_PATH_REGEX) || assetUrl.match(NEW_ASSET_PATH_REGEX); + if (!result) return; + const { groups: { directory, fileName } } = result; + const newKey = path.join(targetDir, fileName); + return { key: directory, newKey }; +}; From 97da14abb54fd276493fde2b8d0e02e918f3adcb Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Thu, 2 Dec 2021 12:47:08 +0100 Subject: [PATCH 4/8] Rename resolveNewURL to resolveAssetURL --- server/shared/util/getFileMetas.js | 2 +- server/shared/util/{resolveNewURL.js => resolveAssetURL.js} | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) rename server/shared/util/{resolveNewURL.js => resolveAssetURL.js} (93%) diff --git a/server/shared/util/getFileMetas.js b/server/shared/util/getFileMetas.js index c38d2c3ac..5685cb5b9 100644 --- a/server/shared/util/getFileMetas.js +++ b/server/shared/util/getFileMetas.js @@ -22,7 +22,7 @@ function getFileMetaKeys(meta = []) { return meta.filter(it => it.type === FILE_ELEMENT_TYPE).map(it => it.key); } -module.exports = function (schemas) { +module.exports = schemas => { return schemas.reduce((acc, { id, meta, structure, elementMeta }) => { return { ...acc, diff --git a/server/shared/util/resolveNewURL.js b/server/shared/util/resolveAssetURL.js similarity index 93% rename from server/shared/util/resolveNewURL.js rename to server/shared/util/resolveAssetURL.js index 75a315b28..05b10358d 100644 --- a/server/shared/util/resolveNewURL.js +++ b/server/shared/util/resolveAssetURL.js @@ -25,13 +25,13 @@ const NEW_ASSET_PATH_REGEX = /(?repository\/\d+\/assets\/(? * contains `targetDir`, meaning they already have the same path. * @param {string} assetUrl The current URL of an asset. * @param {string} targetDir The target directory in which an asset should be stored. - * @return {Object} An object containing old and new directory path, or an `undefined` value if `assetUrl` includes + * @return {Object} An object containing old and new directory path, or an `undefined` value if `assetUrl` starts with * `targetDir` or if `assetsUrl` can't be matched with either regular expression. * @public */ module.exports = (assetUrl, targetDir) => { if (assetUrl.startsWith(protocol)) assetUrl = assetUrl.slice(protocol.length); - if (assetUrl.includes(targetDir)) return; + if (assetUrl.startsWith(targetDir)) return; const result = assetUrl.match(OLD_ASSET_PATH_REGEX) || assetUrl.match(NEW_ASSET_PATH_REGEX); if (!result) return; const { groups: { directory, fileName } } = result; From 19c7090b49043b01942eecfc4956f7a0f233f67f Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Thu, 2 Dec 2021 14:06:49 +0100 Subject: [PATCH 5/8] Extract migrateFileMeta into separate file called cloneFileMeta --- server/script/migrateAssetsLocation.js | 26 +++--------------------- server/shared/util/cloneFileMeta.js | 28 ++++++++++++++++++++++++++ 2 files changed, 31 insertions(+), 23 deletions(-) create mode 100644 server/shared/util/cloneFileMeta.js diff --git a/server/script/migrateAssetsLocation.js b/server/script/migrateAssetsLocation.js index 5a674a1bd..aaddbf538 100644 --- a/server/script/migrateAssetsLocation.js +++ b/server/script/migrateAssetsLocation.js @@ -7,6 +7,7 @@ const { Revision, sequelize } = require('../shared/database'); +const cloneFileMeta = require('../shared/util/cloneFileMeta'); const get = require('lodash/get'); const getFileMetas = require('../shared/util/getFileMetas'); const Listr = require('listr'); @@ -108,7 +109,7 @@ class RepositoryMigration { async migrateRepository() { const { repository, repositoryMeta: metaConfigs } = this; - const data = await this.migrateFileMeta(repository.data, metaConfigs); + const data = await cloneFileMeta(repository.data, metaConfigs, this.repositoryAssetsPath); return { data }; } @@ -130,7 +131,7 @@ class RepositoryMigration { async migrateActivity(activity) { const { type, data: metaInputs } = activity; const metaConfigs = get(this.metaByActivityType, type, []); - const data = await this.migrateFileMeta(metaInputs, metaConfigs); + const data = await cloneFileMeta(metaInputs, metaConfigs, this.repositoryAssetsPath); return { data }; } @@ -230,25 +231,4 @@ class RepositoryMigration { }, {}); return { assets: { ...data.assets, ...updatedAssets } }; } - - async migrateFileMeta(metaInputs, metaConfigs) { - const newMeta = await Promise.reduce(metaConfigs, async (acc, metaKey) => { - const meta = get(metaInputs, metaKey); - if (!meta) return acc; - const url = get(meta, 'url'); - if (!url) return acc; - const { key, newKey } = resolveNewURL(url, this.repositoryAssetsPath) || {}; - if (!key || !newKey) return acc; - await storage.copyFile(key, newKey); - return { - ...acc, - [metaKey]: { - ...meta, - key: newKey, - url: `${protocol}${newKey}` - } - }; - }, {}); - return { ...metaInputs, ...newMeta }; - } } diff --git a/server/shared/util/cloneFileMeta.js b/server/shared/util/cloneFileMeta.js new file mode 100644 index 000000000..8605484a6 --- /dev/null +++ b/server/shared/util/cloneFileMeta.js @@ -0,0 +1,28 @@ +'use strict'; + +const get = require('lodash/get'); +const Promise = require('bluebird'); +const { protocol } = require('../../../config/server/storage'); +const resolveAssetURL = require('./resolveAssetURL'); +const storage = require('../../repository/storage'); + +module.exports = async (metaInputs, metaConfigs, repositoryAssetsPath) => { + const newMeta = await Promise.reduce(metaConfigs, async (acc, metaKey) => { + const meta = get(metaInputs, metaKey); + if (!meta) return acc; + const url = get(meta, 'url'); + if (!url) return acc; + const { key, newKey } = resolveAssetURL(url, repositoryAssetsPath) || {}; + if (!key || !newKey) return acc; + await storage.copyFile(key, newKey); + return { + ...acc, + [metaKey]: { + ...meta, + key: newKey, + url: `${protocol}${newKey}` + } + }; + }, {}); + return { ...metaInputs, ...newMeta }; +}; From 7734e52e64f0c14b6cb7862243d2c4b831db30d7 Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Thu, 2 Dec 2021 16:47:22 +0100 Subject: [PATCH 6/8] Extract migrateContentElement into separate file called cloneContentElement --- server/script/migrateAssetsLocation.js | 59 ++------------------- server/shared/util/cloneContentElement.js | 63 +++++++++++++++++++++++ 2 files changed, 66 insertions(+), 56 deletions(-) create mode 100644 server/shared/util/cloneContentElement.js diff --git a/server/script/migrateAssetsLocation.js b/server/script/migrateAssetsLocation.js index aaddbf538..dd87cebc9 100644 --- a/server/script/migrateAssetsLocation.js +++ b/server/script/migrateAssetsLocation.js @@ -7,19 +7,16 @@ const { Revision, sequelize } = require('../shared/database'); +const cloneContentElement = require('../shared/util/cloneContentElement'); const cloneFileMeta = require('../shared/util/cloneFileMeta'); const get = require('lodash/get'); const getFileMetas = require('../shared/util/getFileMetas'); const Listr = require('listr'); const Promise = require('bluebird'); -const { protocol } = require('../../config/server/storage'); -const resolveNewURL = require('../shared/util/resolveNewURL'); const { SCHEMAS } = require('../../config/shared/activities'); const storage = require('../repository/storage'); -const toPairs = require('lodash/toPairs'); const CHUNK_SIZE = 2000; -const IMAGE_ELEMENT_TYPE = 'IMAGE'; const ENTITIES = { REPOSITORY: 'REPOSITORY', @@ -30,7 +27,7 @@ const ENTITIES = { const mapEntityToAction = { [ENTITIES.REPOSITORY]: 'migrateRepository', [ENTITIES.ACTIVITY]: 'migrateActivity', - [ENTITIES.CONTENT_ELEMENT]: 'migrateContentElement' + [ENTITIES.CONTENT_ELEMENT]: 'cloneContentElement' }; migrate() @@ -143,31 +140,11 @@ class RepositoryMigration { { transaction } ); return Promise.each(contentElements, async it => { - const payload = await this.migrateContentElement(it); + const payload = await cloneContentElement(it, this.repositoryAssetsPath, this.metaByElementType); return it.update(payload, { transaction }); }); } - async migrateContentElement(element) { - const data = await this.migrateContentElementData(element); - const meta = await this.migrateContentElementMeta(element); - return { data, meta }; - } - - async migrateContentElementData(element) { - const { type, data } = element; - if (type === IMAGE_ELEMENT_TYPE) return this.imageMigrationHandler(element); - const embeds = data.embeds && (await this.embedsMigrationHandler(element)); - const assets = data.assets && (await this.defaultMigrationHandler(element)); - return { ...data, ...embeds, ...assets }; - } - - async migrateContentElementMeta(element) { - const { type, meta: metaInputs } = element; - const metaConfigs = get(this.metaByElementType, type, []); - return this.migrateFileMeta(metaInputs, metaConfigs); - } - async migrateRevisions(entity) { const { repositoryId, transaction } = this; const options = { @@ -201,34 +178,4 @@ class RepositoryMigration { const payload = await (this[handler] && this[handler](state)); return { state: { ...state, ...payload } }; } - - async imageMigrationHandler({ data }) { - const url = get(data, 'url'); - if (!url) return data; - const { key, newKey } = resolveNewURL(url, this.repositoryAssetsPath) || {}; - if (!key || !newKey) return data; - await storage.copyFile(key, newKey); - return { ...data, url: newKey }; - } - - async embedsMigrationHandler(element) { - const { repositoryId, data } = element; - const embeds = await Promise.reduce(Object.entries(data.embeds), async (acc, [id, embed]) => { - const payload = await this.migrateContentElement({ repositoryId, ...embed }); - return { ...acc, [id]: { ...embed, ...payload } }; - }, {}); - return { embeds }; - } - - async defaultMigrationHandler({ data }) { - const updatedAssets = await Promise - .filter(toPairs(data.assets), ([_, value]) => value.startsWith(protocol)) - .reduce(async (acc, [key, value]) => { - const { key: oldKey, newKey } = resolveNewURL(value, this.repositoryAssetsPath) || {}; - if (!oldKey || !newKey) return { ...acc, [key]: value }; - await storage.copyFile(oldKey, newKey); - return { ...acc, [key]: `${protocol}${newKey}` }; - }, {}); - return { assets: { ...data.assets, ...updatedAssets } }; - } } diff --git a/server/shared/util/cloneContentElement.js b/server/shared/util/cloneContentElement.js new file mode 100644 index 000000000..db47cd40e --- /dev/null +++ b/server/shared/util/cloneContentElement.js @@ -0,0 +1,63 @@ +'use strict'; + +const cloneFileMeta = require('./cloneFileMeta'); +const get = require('lodash/get'); +const Promise = require('bluebird'); +const { protocol } = require('../../../config/server/storage'); +const resolveAssetURL = require('./resolveAssetURL'); +const storage = require('../../repository/storage'); +const toPairs = require('lodash/toPairs'); + +const IMAGE_ELEMENT_TYPE = 'IMAGE'; + +async function embedsMigrationHandler(element, repositoryAssetsPath) { + const { repositoryId, data } = element; + const embeds = await Promise.reduce(Object.entries(data.embeds), async (acc, [id, embed]) => { + const payload = await cloneContentElement({ repositoryId, ...embed }, repositoryAssetsPath); + return { ...acc, [id]: { ...embed, ...payload } }; + }, {}); + return { embeds }; +} + +async function defaultMigrationHandler({ data }, repositoryAssetsPath) { + const updatedAssets = await Promise + .filter(toPairs(data.assets), ([_, value]) => value.startsWith(protocol)) + .reduce(async (acc, [key, value]) => { + const { key: oldKey, newKey } = resolveAssetURL(value, repositoryAssetsPath) || {}; + if (!oldKey || !newKey) return { ...acc, [key]: value }; + await storage.copyFile(oldKey, newKey); + return { ...acc, [key]: `${protocol}${newKey}` }; + }, {}); + return { assets: { ...data.assets, ...updatedAssets } }; +} + +async function imageMigrationHandler({ data }, repositoryAssetsPath) { + const url = get(data, 'url'); + if (!url) return data; + const { key, newKey } = resolveAssetURL(url, repositoryAssetsPath) || {}; + if (!key || !newKey) return data; + await storage.copyFile(key, newKey); + return { ...data, url: newKey }; +} + +async function migrateData(element, repositoryAssetsPath) { + const { type, data } = element; + if (type === IMAGE_ELEMENT_TYPE) return imageMigrationHandler(element, repositoryAssetsPath); + const embeds = data.embeds && (await embedsMigrationHandler(element, repositoryAssetsPath)); + const assets = data.assets && (await defaultMigrationHandler(element, repositoryAssetsPath)); + return { ...data, ...embeds, ...assets }; +} + +async function migrateMeta(element, repositoryAssetsPath, metaByElementType) { + const { type, meta: metaInputs } = element; + const metaConfigs = get(metaByElementType, type, []); + return cloneFileMeta(metaInputs, metaConfigs, repositoryAssetsPath); +} + +async function cloneContentElement(element, repositoryAssetsPath, metaByElementType) { + const data = await migrateData(element, repositoryAssetsPath); + const meta = await migrateMeta(element, repositoryAssetsPath, metaByElementType); + return { data, meta }; +} + +module.exports = cloneContentElement; From 41edd1453416426b879408168f40d43c890a5ec9 Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Fri, 3 Dec 2021 12:30:48 +0100 Subject: [PATCH 7/8] Refactor activity and content element cloning --- server/activity/activity.controller.js | 14 +++++----- server/activity/activity.model.js | 12 ++++----- .../content-element/content-element.model.js | 27 ++++++++++--------- server/repository/repository.model.js | 2 +- 4 files changed, 30 insertions(+), 25 deletions(-) diff --git a/server/activity/activity.controller.js b/server/activity/activity.controller.js index 0e974e762..c3be0fe7b 100644 --- a/server/activity/activity.controller.js +++ b/server/activity/activity.controller.js @@ -1,10 +1,10 @@ 'use strict'; +const { Activity, Repository } = require('../shared/database'); const { getOutlineLevels, isOutlineActivity } = require('../../config/shared/activities'); -const { Activity } = require('../shared/database'); const { fetchActivityContent } = require('../shared/publishing/helpers'); const find = require('lodash/find'); const get = require('lodash/get'); @@ -66,13 +66,15 @@ function publish({ activity }, res) { .then(data => res.json({ data })); } -function clone({ activity, body, user }, res) { +async function clone({ activity, body, user }, res) { const { repositoryId, parentId, position } = body; + // req.repository can not be used as it is an origin repository + const repository = await Repository.findByPk(repositoryId); const context = { userId: user.id }; - return activity.clone(repositoryId, parentId, position, context).then(mappings => { - const opts = { where: { id: Object.values(mappings) } }; - return Activity.findAll(opts).then(data => res.json({ data })); - }); + const mappings = await activity.clone(repository, parentId, position, context); + const opts = { where: { id: Object.values(mappings) } }; + const activities = await Activity.findAll(opts); + return res.json({ data: activities }); } function getPreviewUrl({ activity }, res) { diff --git a/server/activity/activity.model.js b/server/activity/activity.model.js index 0cfa5fb79..a6d198ffe 100644 --- a/server/activity/activity.model.js +++ b/server/activity/activity.model.js @@ -154,11 +154,11 @@ class Activity extends Model { return Events; } - static async cloneActivities(src, dstRepositoryId, dstParentId, opts) { + static async cloneActivities(src, dstRepository, dstParentId, opts) { if (!opts.idMappings) opts.idMappings = {}; const { idMappings, context, transaction } = opts; const dstActivities = await Activity.bulkCreate(map(src, it => ({ - repositoryId: dstRepositoryId, + repositoryId: dstRepository.id, parentId: dstParentId, ...pick(it, ['type', 'position', 'data', 'refs', 'modifiedAt']) })), { returning: true, context, transaction }); @@ -168,18 +168,18 @@ class Activity extends Model { acc[it.id] = parent.id; const where = { activityId: it.id, detached: false }; const elements = await ContentElement.findAll({ where, transaction }); - await ContentElement.cloneElements(elements, parent, { context, transaction }); + await ContentElement.cloneElements(elements, parent, dstRepository, { context, transaction }); const children = await it.getChildren({ where: { detached: false } }); if (!children.length) return acc; - return Activity.cloneActivities(children, dstRepositoryId, parent.id, opts); + return Activity.cloneActivities(children, dstRepository, parent.id, opts); }, idMappings); } - clone(repositoryId, parentId, position, context) { + async clone(repository, parentId, position, context) { return this.sequelize.transaction(transaction => { if (position) this.position = position; return Activity.cloneActivities( - [this], repositoryId, parentId, { context, transaction } + [this], repository, parentId, { context, transaction } ); }); } diff --git a/server/content-element/content-element.model.js b/server/content-element/content-element.model.js index 00d7865e5..4de56d6b4 100644 --- a/server/content-element/content-element.model.js +++ b/server/content-element/content-element.model.js @@ -2,10 +2,15 @@ const { Model, Op } = require('sequelize'); const calculatePosition = require('../shared/util/calculatePosition'); +const cloneContentElement = require('../shared/util/cloneContentElement'); const { ContentElement: Events } = require('../../common/sse'); +const get = require('lodash/get'); +const getFileMetas = require('../shared/util/getFileMetas'); const hooks = require('./hooks'); const isNumber = require('lodash/isNumber'); const pick = require('lodash/pick'); +const { SCHEMAS } = require('../../config/shared/activities'); +const storage = require('../repository/storage'); class ContentElement extends Model { static fields(DataTypes) { @@ -115,20 +120,18 @@ class ContentElement extends Model { : ContentElement.findAll(opt).map(hooks.applyFetchHooks); } - static cloneElements(src, container, options) { + static async cloneElements(src, container, repository, options) { const { id: activityId, repositoryId } = container; const { context, transaction } = options; - return this.bulkCreate(src.map(it => { - return Object.assign(pick(it, [ - 'type', - 'position', - 'data', - 'contentId', - 'contentSignature', - 'refs', - 'meta' - ]), { activityId, repositoryId }); - }), { returning: true, context, transaction }); + const repositoryAssetsPath = storage.getPath(repository.id); + const metaBySchemaType = getFileMetas(SCHEMAS); + const metaByElementType = get(metaBySchemaType, [repository.schema, 'element']); + const elements = await Promise.all(src.map(async it => { + const element = pick(it, ['type', 'position', 'data', 'contentId', 'contentSignature', 'refs', 'meta']); + const { data, meta } = await cloneContentElement(element, repositoryAssetsPath, metaByElementType); + return Object.assign(element, { activityId, data, meta, repositoryId }); + })); + return this.bulkCreate(elements, { returning: true, context, transaction }); } /** diff --git a/server/repository/repository.model.js b/server/repository/repository.model.js index 04305fb6c..214207097 100644 --- a/server/repository/repository.model.js +++ b/server/repository/repository.model.js @@ -131,7 +131,7 @@ class Repository extends Model { const src = await Activity.findAll({ where: { repositoryId: this.id, parentId: null }, transaction }); - const idMap = await Activity.cloneActivities(src, dst.id, null, { context, transaction }); + const idMap = await Activity.cloneActivities(src, dst, null, { context, transaction }); await dst.mapClonedReferences(idMap, transaction); return dst; }); From a5e3a9f0d7a72080c893059107403a4ca766a8c1 Mon Sep 17 00:00:00 2001 From: Hrvoje Vucic Date: Mon, 20 Dec 2021 13:42:42 +0100 Subject: [PATCH 8/8] Implement content element clone on repo import --- server/shared/transfer/default/processors.js | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/server/shared/transfer/default/processors.js b/server/shared/transfer/default/processors.js index 6b12633f6..777adb3ad 100644 --- a/server/shared/transfer/default/processors.js +++ b/server/shared/transfer/default/processors.js @@ -6,8 +6,11 @@ const { Repository, RepositoryUser } = require('../../database'); +const cloneContentElement = require('../../util/cloneContentElement'); const filter = require('lodash/filter'); const forEach = require('lodash/forEach'); +const get = require('lodash/get'); +const getFileMetas = require('../../util/getFileMetas'); const isEmpty = require('lodash/isEmpty'); const last = require('lodash/last'); const map = require('lodash/map'); @@ -70,10 +73,11 @@ async function processRepository(repository, _enc, { context, transaction }) { Object.assign(repository, { description, name }); const options = { context: { userId }, transaction }; const repositoryRecord = omit(repository, IGNORE_ATTRS); - const { id } = await Repository.create(repositoryRecord, options); + const { id, schema } = await Repository.create(repositoryRecord, options); const userRecord = { userId, repositoryId: id, role: ADMIN }; await RepositoryUser.create(userRecord, { transaction }); context.repositoryId = id; + context.repoSchema = schema; } async function processActivities(activities, _enc, options) { @@ -129,15 +133,19 @@ function remapActivityRefs(activity, { context, transaction }) { return activity.save({ transaction }); } -function insertElements(elements, { context, transaction }) { - const { activityIdMap, repositoryId, userId } = context; +async function insertElements(elements, { context, storage, transaction }) { + const { activityIdMap, repositoryId, repoSchema, userId } = context; if (!repositoryId) throw new Error('Invalid repository id'); - const elementRecords = map(elements, it => { + const metaBySchemaType = getFileMetas(SCHEMAS); + const metaByElementType = get(metaBySchemaType, [repoSchema, 'element']); + const repositoryAssetsPath = storage.getPath(repositoryId); + const elementRecords = await Promise.all(map(elements, async it => { const activityId = activityIdMap[it.activityId]; if (!activityId) throw new Error('Invalid activity id'); - Object.assign(it, { activityId, repositoryId }); + const { data, meta } = await cloneContentElement(it, repositoryAssetsPath, metaByElementType); + Object.assign(it, { activityId, data, meta, repositoryId }); return omit(it, IGNORE_ATTRS); - }); + })); const options = { context: { userId }, returning: true, transaction }; return ContentElement.bulkCreate(elementRecords, options); }