diff --git a/client/modules/IDE/actions/project.js b/client/modules/IDE/actions/project.js index 7787f879bb..cb7a4ee760 100644 --- a/client/modules/IDE/actions/project.js +++ b/client/modules/IDE/actions/project.js @@ -306,6 +306,7 @@ export function cloneProject(project) { generateNewIdsForChildren(rootFile, newFiles); // duplicate all files hosted on S3 + const copiedAssetKeys = []; each( newFiles, (file, callback) => { @@ -319,10 +320,16 @@ export function cloneProject(project) { const formParams = { url: file.url }; - apiClient.post('/S3/copy', formParams).then((response) => { - file.url = response.data.url; - callback(null); - }); + apiClient + .post('/S3/copy', formParams) + .then((response) => { + file.url = response.data.url; + + const objectKeyFromUrl = file.url.split('/').pop(); + copiedAssetKeys.push(objectKeyFromUrl); + callback(null); + }) + .catch(callback); } else { callback(null); } @@ -343,6 +350,12 @@ export function cloneProject(project) { dispatch(setNewProject(response.data)); }) .catch((error) => { + copiedAssetKeys.forEach((assetKey) => { + apiClient.delete( + `/S3/delete?objectKey=${encodeURIComponent(assetKey)}` + ); + }); + dispatch({ type: ActionTypes.PROJECT_SAVE_FAIL, error: error?.response?.data diff --git a/server/controllers/aws.controller.js b/server/controllers/aws.controller.js index b6e03db13c..88807b2aa6 100644 --- a/server/controllers/aws.controller.js +++ b/server/controllers/aws.controller.js @@ -156,7 +156,7 @@ export async function signS3(req, res) { const acl = 'public-read'; const policy = S3Policy.generate({ acl, - key: `${req.body.userId}/${filename}`, + key: `pending/${req.user.id}/${filename}`, bucket: process.env.S3_BUCKET, contentType: req.body.type, region: process.env.AWS_REGION, diff --git a/server/controllers/project.controller.js b/server/controllers/project.controller.js index 57eda81381..1a34006381 100644 --- a/server/controllers/project.controller.js +++ b/server/controllers/project.controller.js @@ -11,6 +11,7 @@ import Project from '../models/project'; import { User } from '../models/user'; import { resolvePathToFile } from '../utils/filePath'; import { generateFileSystemSafeName } from '../utils/generateFileSystemSafeName'; +import { commitPendingFiles } from '../utils/pendingAssets'; const s3Client = new S3Client({ credentials: { @@ -60,6 +61,11 @@ export async function updateProject(req, res) { // only allow whitelisted fields so ownership/slug etc can't be overwritten const allowedFields = ['name', 'files', 'updatedAt', 'visibility']; const updateData = {}; + + if (req.body.files !== undefined) { + updateData.files = await commitPendingFiles(req.body.files, req.user.id); + } + allowedFields.forEach((field) => { if (req.body[field] !== undefined) { updateData[field] = req.body[field]; @@ -77,21 +83,7 @@ export async function updateProject(req, res) { ) .populate('user', 'username') .exec(); - if ( - req.body.files && - updatedProject.files.length !== req.body.files.length - ) { - const oldFileIds = updatedProject.files.map((file) => file.id); - const newFileIds = req.body.files.map((file) => file.id); - const staleIds = oldFileIds.filter((id) => newFileIds.indexOf(id) === -1); - staleIds.forEach((staleId) => { - updatedProject.files.id(staleId).deleteOne(); - }); - const savedProject = await updatedProject.save(); - res.json(savedProject); - } else { - res.json(updatedProject); - } + res.json(updatedProject); } catch (error) { console.error(error); res.status(500).json({ success: false }); diff --git a/server/utils/pendingAssets.js b/server/utils/pendingAssets.js new file mode 100644 index 0000000000..348d592dbb --- /dev/null +++ b/server/utils/pendingAssets.js @@ -0,0 +1,57 @@ +import { + s3Client, + CopyObjectCommand, + DeleteObjectsCommand +} from '@aws-sdk/client-s3'; + +export async function commitPendingFiles(files, userId) { + if (!files) { + return []; + } + + const s3Base = process.env.S3_BUCKET_URL_BASE; + const s3Bucket = process.env.S3_BUCKET; + + return Promise.all( + files.map(async (file) => { + if (!file.url || !file.url.startsWith(s3Base)) { + return []; + } + + const assetKey = decodeURIComponent(file.url.slice(s3Base.length)); + console.log('asset key: ', assetKey); + + if (!assetKey.startsWith(`pending/${userId}/`)) { + return []; + } + + const fileName = assetKey.split('/').pop(); + const newKey = `${userId}/${fileName}`; + + console.log('filename, newKey: ', fileName, newKey); + + await s3Client.send( + new CopyObjectCommand({ + Bucket: s3Bucket, + CopySource: `${s3Bucket}/${assetKey}`, + Key: newKey, + ACL: 'public-read' + }) + ); + + await s3Client.send( + new DeleteObjectsCommand({ + Bucket: s3Bucket, + Delete: { Objects: [{ Key: assetKey }] } + }) + ); + + return { + ...file, + url: `${s3Base}${newKey}` + }; + }) + ); +} + +export default commitPendingFiles;