diff --git a/client/modules/IDE/actions/uploader.js b/client/modules/IDE/actions/uploader.js
index b861b58928..10b48d906f 100644
--- a/client/modules/IDE/actions/uploader.js
+++ b/client/modules/IDE/actions/uploader.js
@@ -3,6 +3,7 @@ import { apiClient } from '../../../utils/apiClient';
import { getConfig } from '../../../utils/getConfig';
import { isTestEnvironment } from '../../../utils/checkTestEnv';
import { handleCreateFile } from './files';
+import { showErrorModal } from './ide';
const s3BucketUrlBase = getConfig('S3_BUCKET_URL_BASE');
const awsRegion = getConfig('AWS_REGION');
@@ -22,7 +23,7 @@ function isS3Upload(file) {
return !TEXT_FILE_REGEX.test(file.name) || file.size >= MAX_LOCAL_FILE_SIZE;
}
-export async function dropzoneAcceptCallback(userId, file, done) {
+export async function dropzoneAcceptCallback(userId, file, done, dispatch) {
// if a user would want to edit this file as text, local interceptor
if (!isS3Upload(file)) {
try {
@@ -51,6 +52,13 @@ export async function dropzoneAcceptCallback(userId, file, done) {
file.postData = response.data;
done();
} catch (error) {
+ if (error?.response?.status === 403) {
+ if (dispatch) {
+ dispatch(showErrorModal('uploadLimit'));
+ }
+ done('Upload limit reached.');
+ return;
+ }
done(
error?.response?.data?.responseText?.message ||
error?.message ||
diff --git a/client/modules/IDE/components/ErrorModal.jsx b/client/modules/IDE/components/ErrorModal.jsx
index 66dd374c52..fa8aa3ca41 100644
--- a/client/modules/IDE/components/ErrorModal.jsx
+++ b/client/modules/IDE/components/ErrorModal.jsx
@@ -2,6 +2,12 @@ import PropTypes from 'prop-types';
import React from 'react';
import { Link } from 'react-router-dom';
import { useTranslation } from 'react-i18next';
+import prettyBytes from 'pretty-bytes';
+import { getConfig } from '../../../utils/getConfig';
+import { parseNumber } from '../../../utils/parseStringToType';
+
+const uploadLimit = parseNumber(getConfig('UPLOAD_LIMIT')) || 250000000;
+const uploadLimitText = prettyBytes(uploadLimit);
const ErrorModal = ({ type, service, closeModal }) => {
const { t } = useTranslation();
@@ -51,6 +57,18 @@ const ErrorModal = ({ type, service, closeModal }) => {
return
{t('ErrorModal.SavedDifferentWindow')}
;
}
+ function uploadLimitReached() {
+ return (
+
+ {t('UploadFileModal.SizeLimitError', { sizeLimit: uploadLimitText })}
+
+ assets
+
+ .
+
+ );
+ }
+
return (
{(() => { // eslint-disable-line
@@ -60,6 +78,8 @@ const ErrorModal = ({ type, service, closeModal }) => {
return staleSession();
} else if (type === 'staleProject') {
return staleProject();
+ } else if (type === 'uploadLimit') {
+ return uploadLimitReached();
} else if (type === 'oauthError') {
return oauthError();
}
@@ -73,7 +93,8 @@ ErrorModal.propTypes = {
'forceAuthentication',
'staleSession',
'staleProject',
- 'oauthError'
+ 'oauthError',
+ 'uploadLimit'
]).isRequired,
closeModal: PropTypes.func.isRequired,
service: PropTypes.oneOf(['google', 'github'])
diff --git a/client/modules/IDE/components/FileUploader.jsx b/client/modules/IDE/components/FileUploader.jsx
index fcb754c9bd..46f88647e1 100644
--- a/client/modules/IDE/components/FileUploader.jsx
+++ b/client/modules/IDE/components/FileUploader.jsx
@@ -69,7 +69,7 @@ function FileUploader() {
acceptedFiles: fileExtensionsAndMimeTypes,
dictDefaultMessage: t('FileUploader.DictDefaultMessage'),
accept: (file, done) => {
- dropzoneAcceptCallback(userId, file, done);
+ dropzoneAcceptCallback(userId, file, done, dispatch);
},
sending: dropzoneSendingCallback
});
diff --git a/client/modules/IDE/selectors/users.js b/client/modules/IDE/selectors/users.js
index 14f81fd114..f7bfb27b23 100644
--- a/client/modules/IDE/selectors/users.js
+++ b/client/modules/IDE/selectors/users.js
@@ -13,10 +13,11 @@ export const getCanUploadMedia = createSelector(
getAuthenticated,
getTotalSize,
(authenticated, totalSize) => {
+ const currentSize = totalSize || 0;
if (!authenticated) return false;
// eventually do the same thing for verified when
// email verification actually works
- if (totalSize > limit) return false;
+ if (currentSize >= limit) return false;
return true;
}
);
@@ -25,8 +26,8 @@ export const getreachedTotalSizeLimit = createSelector(
getTotalSize,
getAssetsTotalSize,
(totalSize, assetsTotalSize) => {
- const currentSize = totalSize || assetsTotalSize;
- if (currentSize && currentSize > limit) return true;
+ const currentSize = totalSize || assetsTotalSize || 0;
+ if (currentSize >= limit) return true;
// if (totalSize > 1000) return true;
return false;
}
diff --git a/server/controllers/aws.controller.js b/server/controllers/aws.controller.js
index 41bc627a51..b6e03db13c 100644
--- a/server/controllers/aws.controller.js
+++ b/server/controllers/aws.controller.js
@@ -78,28 +78,97 @@ export async function deleteObjectFromS3(req, res) {
}
}
-export function signS3(req, res) {
- const limit = process.env.UPLOAD_LIMIT || 250000000;
- if (req.user.totalSize > limit) {
- res
- .status(403)
- .send({ message: 'user has uploaded the maximum size of assets.' });
- return;
+export async function listObjectsInS3ForUser(userId) {
+ try {
+ let assets = [];
+ const params = {
+ Bucket: process.env.S3_BUCKET,
+ Prefix: `${userId}/`
+ };
+
+ const data = await s3Client.send(new ListObjectsCommand(params));
+
+ assets = data.Contents?.map((object) => ({
+ key: object.Key,
+ size: object.Size
+ }));
+
+ const projects = await Project.getProjectsForUserId(userId);
+ const projectAssets = [];
+ let totalSize = 0;
+
+ assets?.forEach((asset) => {
+ const name = asset.key.split('/').pop();
+ const foundAsset = {
+ key: asset.key,
+ name,
+ size: asset.size,
+ url: `${process.env.S3_BUCKET_URL_BASE}${asset.key}`
+ };
+ totalSize += asset.size;
+
+ const wasMatched = projects.some((project) =>
+ project.files.some((file) => {
+ if (!file.url) return false;
+ if (file.url.includes(asset.key)) {
+ foundAsset.name = file.name;
+ foundAsset.sketchName = project.name;
+ foundAsset.sketchId = project.id;
+ foundAsset.url = file.url;
+ return true;
+ }
+ return false;
+ })
+ );
+
+ if (wasMatched) {
+ projectAssets.push(foundAsset);
+ }
+ });
+
+ return { assets: projectAssets, totalSize };
+ } catch (error) {
+ if (error instanceof TypeError) {
+ return null;
+ }
+ console.error('Got an error: ', error);
+ throw error;
+ }
+}
+
+export async function signS3(req, res) {
+ const limit = Number(process.env.UPLOAD_LIMIT) || 250000000;
+
+ try {
+ const objects = await listObjectsInS3ForUser(req.user.id);
+ const currentSize = Number(objects?.totalSize ?? req.user.totalSize) || 0;
+ const incomingSize = Number(req.body.size) || 0;
+
+ if (currentSize >= limit || currentSize + incomingSize > limit) {
+ res
+ .status(403)
+ .send({ message: 'user has uploaded the maximum size of assets.' });
+ return;
+ }
+
+ const fileExtension = getExtension(req.body.name);
+ const filename = uuidv4() + fileExtension;
+ const acl = 'public-read';
+ const policy = S3Policy.generate({
+ acl,
+ key: `${req.body.userId}/${filename}`,
+ bucket: process.env.S3_BUCKET,
+ contentType: req.body.type,
+ region: process.env.AWS_REGION,
+ accessKey: process.env.AWS_ACCESS_KEY,
+ secretKey: process.env.AWS_SECRET_KEY,
+ metadata: []
+ });
+ res.json(policy);
+ } catch (error) {
+ console.error('Error signing upload policy:', error);
+ res.status(500).json({ error: 'Failed to sign upload policy' });
}
- const fileExtension = getExtension(req.body.name);
- const filename = uuidv4() + fileExtension;
- const acl = 'public-read';
- const policy = S3Policy.generate({
- acl,
- key: `${req.body.userId}/${filename}`,
- bucket: process.env.S3_BUCKET,
- contentType: req.body.type,
- region: process.env.AWS_REGION,
- accessKey: process.env.AWS_ACCESS_KEY,
- secretKey: process.env.AWS_SECRET_KEY,
- metadata: []
- });
- res.json(policy);
}
export async function copyObjectInS3(url, userId) {
@@ -182,64 +251,6 @@ export async function moveObjectToUserInS3(url, userId) {
return `${s3Bucket}${userId}/${newFilename}`;
}
-export async function listObjectsInS3ForUser(userId) {
- try {
- let assets = [];
- const params = {
- Bucket: process.env.S3_BUCKET,
- Prefix: `${userId}/`
- };
-
- const data = await s3Client.send(new ListObjectsCommand(params));
-
- assets = data.Contents?.map((object) => ({
- key: object.Key,
- size: object.Size
- }));
-
- const projects = await Project.getProjectsForUserId(userId);
- const projectAssets = [];
- let totalSize = 0;
-
- assets?.forEach((asset) => {
- const name = asset.key.split('/').pop();
- const foundAsset = {
- key: asset.key,
- name,
- size: asset.size,
- url: `${process.env.S3_BUCKET_URL_BASE}${asset.key}`
- };
- totalSize += asset.size;
-
- const wasMatched = projects.some((project) =>
- project.files.some((file) => {
- if (!file.url) return false;
- if (file.url.includes(asset.key)) {
- foundAsset.name = file.name;
- foundAsset.sketchName = project.name;
- foundAsset.sketchId = project.id;
- foundAsset.url = file.url;
- return true;
- }
- return false;
- })
- );
-
- if (wasMatched) {
- projectAssets.push(foundAsset);
- }
- });
-
- return { assets: projectAssets, totalSize };
- } catch (error) {
- if (error instanceof TypeError) {
- return null;
- }
- console.error('Got an error: ', error);
- throw error;
- }
-}
-
export async function listObjectsInS3ForUserRequestHandler(req, res) {
const { username } = req.user;