Cannot Upload Large Files - Digitalocean Spaces (s3 protocols)

I cannot upload large files through parse dashboard (eg : 50MB), Currently getting 502 error after a delay.

var express = require('express');
var http = require('http');
const bodyParser = require('body-parser');

//var https = require('https');
var ParseServer = require('parse-server').ParseServer;
//const httpServer = require('http').createServer({...});

//var ParseDashboard = require('parse-dashboard');
//var fs = require('fs');
var app = express();
// Specify the connection string for your mongodb database
// and the location to your Parse cloud code
var allowInsecureHTTP = false;

var S3Adapter = require("@parse/s3-files-adapter");
var AWS = require("aws-sdk");

//Configure Digital Ocean Spaces EndPoint
const spacesEndpoint = new AWS.Endpoint(process.env.SPACES_ENDPOINT);
var s3Options = {
  bucket: process.env.SPACES_BUCKET_NAME,
  baseUrl: process.env.SPACES_BASE_URL,
  region: process.env.SPACES_REGION,
  directAccess: true,
  globalCacheControl: "public, max-age=31536000",
  //bucketPrefix: process.env.SPACES_BUCKET_PREFIX,
  s3overrides: {
    accessKeyId: process.env.SPACES_ACCESS_KEY,
    secretAccessKey: process.env.SPACES_SECRET_KEY,
    endpoint: spacesEndpoint

var s3Adapter = new S3Adapter(s3Options);
var api = new ParseServer({
    appName: 'Medmelo',
    maxUploadSize: "200mb",
    databaseURI: 'mongodb://adminmed:[email protected]:27017/meddb',
    cloud:__dirname + '/cloud/main.js' ,
    appId: process.env.APPID,
    masterKey: process.env.MASTERKEY,
    //fileKey: process.env.FILEKEY,
    // publicServerURL: 'http://ip/parse',
    fileUpload: {
        enableForPublic: false,
        enableForAnonymousUser: false,
        enableForAuthenticatedUser: true,
    serverURL: 'http://localhost:1337/parse-server/',
    filesAdapter: s3Adapter

// Serve the Parse API on the /parse URL prefix

app.use('/parse-medmelo', api);
//app.use(express.json({limit: '200mb'}));
//app.use(express.urlencoded({limit: '200mb', extended: true,parameterLimit:200000}));
//app.use(bodyParser.json({limit: '200mb'}));
//app.use(bodyParser.urlencoded({limit: '200mb', extended: true, parameterLimit:200000}));
var port = 1337;

//var https = require('https').createServer(options,app).listen(port, function() {
//console.log('parse-server running on SSL port ' + port + '.');
http.timeout = 25 * 1000;
http.keepAliveTimeout = 70 * 1000;
http.headersTimeout = 120 * 1000;
http.createServer(app).listen(port, function() {
console.log('parse-server running on port ' + port);


user www-data;
worker_processes auto;
pid /run/;
include /etc/nginx/modules-enabled/*.conf;

events {
        worker_connections 768;

http {
        sendfile on;
        tcp_nopush on;
        tcp_nodelay on;
        keepalive_timeout 650;
        types_hash_max_size 2048;
        client_max_body_size 500M;
        client_body_temp_path /data/temp;
        proxy_connect_timeout 600;
        proxy_send_timeout 600;
        proxy_read_timeout 600;
        send_timeout 600;
        client_body_buffer_size 5M;
        include /etc/nginx/mime.types;
        default_type application/octet-stream;
        ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3; # Dropping SSLv3, ref: POODLE
        ssl_prefer_server_ciphers on;
        access_log /var/log/nginx/access.log;
        error_log /var/log/nginx/error.log;
        gzip on;
        include /etc/nginx/conf.d/*.conf;
        include /etc/nginx/sites-enabled/*;


website nginx configuration

# HTTP - redirect all requests to HTTPS
server {
    listen 80;
    listen [::]:80 default_server ipv6only=on;
    return 301 https://$host$request_uri;


    # HTTPS - proxy requests to /parse-server/
  # through to Parse Server
server {
   listen 443 ssl http2;
   listen [::]:443 ssl http2;
   ssl_certificate         /etc/ssl/cert.pem;
   ssl_certificate_key     /etc/ssl/key.pem;
   ssl_client_certificate /etc/ssl/cloudflare.crt;
   ssl_verify_client on;
   root /usr/share/nginx/html;
   index index.html index.htm index.nginx-debian.html;

  # Pass requests for /parse/ to Parse Server instance at localhost:1337
  location /parse-server/ {
      proxy_set_header X-Real-IP $remote_addr;
      proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
      proxy_set_header X-NginX-Proxy true;
      proxy_pass http://localhost:1337/parse-server/;
      client_max_body_size 500M;
      client_body_in_file_only   on;
      client_body_buffer_size    1M;
      proxy_ssl_session_reuse off;
      proxy_set_header Host $http_host;
      proxy_redirect off;
  location /dashboard/ {
      proxy_set_header X-Real-IP $remote_addr;
      proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
      proxy_set_header X-NginX-Proxy true;
      proxy_pass http://localhost:4040/dashboard/;
      proxy_ssl_session_reuse off;
      client_max_body_size 500M;
      proxy_set_header Host $http_host;
      proxy_redirect off;

Extra info - Domain is connected through Cloudflare, Smaller files are uploading without any issues, hosted on digitalocean vps, file storage - digitalocean spaces (same protocols of aws s3)

It’s been more than a week i’m trying to solve this issue,

Have you tried changing client_max_body_size in your nginx config?

http {
    client_max_body_size 200M;

Just remember that you’ll need to restart/reload nginx once you’ve made any changes there.

Also, i see you’ve commented out the bodyParser.json limit. If you’re sending your images as a base64 encoded value - you will likely need to add that back in and increase it (all depending on your use case) - e.g.:

server.use(bodyParser.json({ limit: '20mb' })) // I'm not sure of the platform limitations here

@woutercouvaras Thanks for replaying bro, I have already set client_max_body_size in nginx.conf and reload +restarted the server. `server.use(bodyParser.json({ limit: ‘20mb’ })) adding this make no difference I’m also getting a

connection reset by peer

in flutter app sometimes, Is it any hint related to my problem,

Sorry, I’m not sure what else to suggest.

The above definitely sounds like they are closing the connection. It might be worth looking into their restrictions/config requirements.

Sorry I couldn’t be of more help.

Tools like Gs Richcopy 360 or Sharegate can handle this issue fast and easily.
Both are excellent to upload large files to digital ocean from a local machine or another cloud/server